Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@etothepii/satisfactory-file-parser

Package Overview
Dependencies
Maintainers
1
Versions
89
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@etothepii/satisfactory-file-parser - npm Package Compare versions

Comparing version 0.1.17 to 0.1.18

build/parser/satisfactory/objects/ue/FMD5Hash.d.ts

3

build/parser/byte/byte-reader.class.d.ts

@@ -16,3 +16,4 @@ import { Alignment } from "./alignment.enum";

readBytes(count: number): Uint8Array;
readHex(hexLength: number): string;
private uint8ToHexRepresentation;
readHex(byteLength: number, hexSeparator?: string): string;
readInt8(): number;

@@ -19,0 +20,0 @@ readUint8(): number;

@@ -37,10 +37,8 @@ "use strict";

}
readHex(hexLength) {
let hexPart = [];
for (let i = 0; i < hexLength; i++) {
let currentHex = String.fromCharCode(this.bufferView.getUint8(this.currentByte++));
hexPart.push(currentHex);
}
return hexPart.join('');
uint8ToHexRepresentation(byte) {
return ('0' + byte.toString(16)).slice(-2);
}
readHex(byteLength, hexSeparator = '') {
return Array.from(this.readBytes(byteLength)).map(byte => this.uint8ToHexRepresentation(byte)).join(hexSeparator);
}
readInt8() {

@@ -47,0 +45,0 @@ let data = this.bufferView.getInt8(this.currentByte++);

@@ -0,5 +1,9 @@

export declare enum CompressionAlgorithmCode {
ZLIB = 3
}
export type ChunkCompressionInfo = {
compressionAlgorithm?: CompressionAlgorithmCode;
chunkHeaderSize: number;
packageFileTag: number;
maxChunkContentSize: number;
maxUncompressedChunkContentSize: number;
};

@@ -6,0 +10,0 @@ export type ChunkSummary = {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CompressionAlgorithmCode = void 0;
var CompressionAlgorithmCode;
(function (CompressionAlgorithmCode) {
CompressionAlgorithmCode[CompressionAlgorithmCode["ZLIB"] = 3] = "ZLIB";
})(CompressionAlgorithmCode = exports.CompressionAlgorithmCode || (exports.CompressionAlgorithmCode = {}));

@@ -14,2 +14,4 @@ "use strict";

const posAfterHeader = writer.getBufferPosition();
save_writer_1.SaveWriter.WriteSaveBodyHash(writer, save.gridHash);
save_writer_1.SaveWriter.WriteGrids(writer, save.grids);
save_writer_1.SaveWriter.WriteLevels(writer, save, save.header.buildVersion);

@@ -26,3 +28,6 @@ writer.endWriting();

onDecompressedSaveBody(reader.getBuffer());
const gridHash = reader.readSaveBodyHash();
save.gridHash = gridHash;
const grids = reader.readGrids();
save.grids = grids;
save.levels = reader.readLevels();

@@ -29,0 +34,0 @@ save.compressionInfo = reader.compressionInfo;

@@ -21,3 +21,3 @@ "use strict";

packageFileTag: 0,
maxChunkContentSize: 0,
maxUncompressedChunkContentSize: 0,
chunkHeaderSize: save_reader_1.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE

@@ -29,3 +29,3 @@ };

const versionThing = reader.readBytes(2 * 4);
const probablyDimensions = (0, util_types_1.ParseVec3f)(reader);
const dimensions = (0, util_types_1.ParseVec3f)(reader);
let itemTypeCount = reader.readInt32();

@@ -47,2 +47,3 @@ const itemCosts = new Array(itemTypeCount).fill(['', 0]);

return {
designerDimension: dimensions,
recipeReferences: recipeRefs,

@@ -66,4 +67,4 @@ itemCosts

}
if (this.compressionInfo.maxChunkContentSize <= 0) {
this.compressionInfo.maxChunkContentSize = chunkHeader.getInt32(8, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
if (this.compressionInfo.maxUncompressedChunkContentSize <= 0) {
this.compressionInfo.maxUncompressedChunkContentSize = chunkHeader.getInt32(8, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
}

@@ -70,0 +71,0 @@ const chunkCompressedLength = chunkHeader.getInt32(33, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);

import { ChunkCompressionInfo } from "../../file.types";
import { SaveComponent } from "../objects/SaveComponent";
import { SaveEntity } from "../objects/SaveEntity";
import { col4 } from "../structs/util.types";
import { col4, vec3 } from "../structs/util.types";
export interface BlueprintConfig {

@@ -11,2 +11,3 @@ description: string;

export type BlueprintHeader = {
designerDimension?: vec3;
itemCosts: [string, number][];

@@ -13,0 +14,0 @@ recipeReferences: string[];

@@ -129,6 +129,3 @@ import { ByteWriter } from "../../..";

static Serialize(writer: ByteWriter, property: ObjectProperty): void;
static SerializeValue(writer: ByteWriter, value: {
levelName: string;
pathName: string;
}): void;
static SerializeValue(writer: ByteWriter, value: ObjectReference): void;
}

@@ -249,3 +246,4 @@ export declare class EnumProperty extends BasicProperty {

}
export type GENERIC_MAP_KEY_TYPE = number | ObjectReference | boolean | GENERIC_STRUCT_PROPERTY_VALUE;
export type MAP_STRUCT_KEY_PROXY = [number, number, number, number, number, number, number, number, number, number, number, number];
export type GENERIC_MAP_KEY_TYPE = number | ObjectReference | boolean | GENERIC_STRUCT_PROPERTY_VALUE | MAP_STRUCT_KEY_PROXY;
export type GENERIC_MAP_VALUE_TYPE = number | ObjectReference | boolean | GENERIC_STRUCT_PROPERTY_VALUE;

@@ -255,2 +253,3 @@ export declare class MapProperty extends BasicProperty {

valueType: string;
structKeyProxy: MAP_STRUCT_KEY_PROXY;
modeType: number;

@@ -260,3 +259,2 @@ modeUnk1: string | undefined;

modeUnk3: string;
remainingData: number[];
values: [key: GENERIC_MAP_KEY_TYPE, value: GENERIC_MAP_VALUE_TYPE][];

@@ -263,0 +261,0 @@ constructor(keyType: string, valueType: string, ueType: string, index: number);

@@ -861,3 +861,3 @@ "use strict";

default:
throw new Error('Not Implemented.');
throw new Error(`Not Implemented SetProperty of ${subtype}.`);
}

@@ -876,4 +876,9 @@ return property;

case "IntProperty":
case "Int32Property":
property.values.forEach(v => Int32Property.SerializeValue(writer, v));
break;
case "UIntProperty":
case "UInt32Property":
property.values.forEach(v => Uint32Property.SerializeValue(writer, v));
break;
case "ObjectProperty":

@@ -889,3 +894,3 @@ property.values.forEach(v => ObjectProperty.SerializeValue(writer, v));

default:
throw new Error('Not Implemented.');
throw new Error(`Not Implemented SetProperty of ${property.subtype}.`);
}

@@ -900,6 +905,6 @@ }

this.valueType = valueType;
this.structKeyProxy = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
this.modeType = 0;
this.modeUnk2 = '';
this.modeUnk3 = '';
this.remainingData = [];
this.values = [];

@@ -920,3 +925,3 @@ }

key = propertyName;
reader.skipBytes(12);
property.structKeyProxy = Array.from(reader.readBytes(12));
}

@@ -983,3 +988,58 @@ else {

writer.writeInt32(property.modeType);
writer.writeBytesArray(property.remainingData);
writer.writeInt32(property.values.length);
for (const entry of property.values) {
switch (property.keyType) {
case 'StructProperty':
if (property.name === 'mSaveData' || property.name === 'mUnresolvedSaveData') {
writer.writeBytesArray(property.structKeyProxy);
}
else {
(0, exports.SerializeDynamicStructData)(writer, 0, entry[0]);
}
break;
case 'ObjectProperty':
ObjectProperty.SerializeValue(writer, entry[0]);
break;
case 'StrProperty':
case 'NameProperty':
StrProperty.SerializeValue(writer, entry[0]);
break;
case 'EnumProperty':
EnumProperty.SerializeValue(writer, entry[0]);
break;
case 'IntProperty':
case 'Int32Property':
Int32Property.SerializeValue(writer, entry[0]);
break;
case 'ByteProperty':
ByteProperty.SerializeValue(writer, entry[0]);
break;
default:
throw new Error(`not implemented map key type ${property.valueType}`);
}
switch (property.valueType) {
case 'StructProperty':
(0, exports.SerializeDynamicStructData)(writer, 0, entry[1]);
break;
case 'ObjectProperty':
ObjectProperty.SerializeValue(writer, entry[1]);
break;
case 'StrProperty':
case 'NameProperty':
StrProperty.SerializeValue(writer, entry[1]);
break;
case 'EnumProperty':
EnumProperty.SerializeValue(writer, entry[1]);
break;
case 'IntProperty':
case 'Int32Property':
Int32Property.SerializeValue(writer, entry[1]);
break;
case 'ByteProperty':
ByteProperty.SerializeValue(writer, entry[1]);
break;
default:
throw new Error(`not implemented map value type ${property.valueType}`);
}
}
}

@@ -986,0 +1046,0 @@ }

@@ -17,3 +17,3 @@ import { BinaryReadable } from "../../byte/binary-readable.interface";

trailingData: number[];
unknownType1: number;
saveOrBlueprintIndicator: number;
unknownType2: number;

@@ -20,0 +20,0 @@ constructor(typePath: string, rootObject: string, instanceName: string);

@@ -13,3 +13,3 @@ "use strict";

this.trailingData = [];
this.unknownType1 = 0;
this.saveOrBlueprintIndicator = 0;
this.unknownType2 = 0;

@@ -16,0 +16,0 @@ }

@@ -55,3 +55,3 @@ "use strict";

reader.onProgressCallback(reader.getBufferProgress());
level.collectables = level_class_1.Level.ReadCollectablesList(reader);
level.collectables = level_class_1.Level.ReadCollectablesList(reader, undefined);
console.log(`before object contents buffer pos ${reader.getBufferPosition()}`);

@@ -62,3 +62,3 @@ await AsynchronousLevel.StreamReadObjectContents(reader, level.objects, buildVersion, writer, reader.onProgressCallback);

reader.onProgressCallback(reader.getBufferProgress());
level_class_1.Level.ReadCollectablesList(reader);
level_class_1.Level.ReadCollectablesList(reader, undefined);
await writer.endLevel();

@@ -65,0 +65,0 @@ return level;

@@ -12,2 +12,3 @@ import { BinaryReadable } from "../../byte/binary-readable.interface";

collectables: ObjectReference[];
remainingStuffAfterHeaders: Uint8Array;
constructor(name: string);

@@ -21,3 +22,3 @@ static SerializeObjectHeaders(writer: ByteWriter, objects: (SaveEntity | SaveComponent)[]): void;

static SerializeCollectablesList(writer: ByteWriter, collectables: ObjectReference[]): void;
static ReadCollectablesList(reader: BinaryReadable): ObjectReference[];
static ReadCollectablesList(reader: BinaryReadable, printSmthWhenItsCollectables?: string | undefined): ObjectReference[];
}

@@ -12,2 +12,3 @@ "use strict";

this.collectables = [];
this.remainingStuffAfterHeaders = new Uint8Array();
}

@@ -35,5 +36,6 @@ static SerializeObjectHeaders(writer, objects) {

writer.writeInt32(0);
writer.writeInt32(0);
Level.SerializeObjectHeaders(writer, level.objects);
Level.SerializeCollectablesList(writer, level.collectables);
writer.writeBinarySizeFromPosition(lenIndicatorHeaderAndCollectableSize, lenIndicatorHeaderAndCollectableSize + 4);
writer.writeBinarySizeFromPosition(lenIndicatorHeaderAndCollectableSize, lenIndicatorHeaderAndCollectableSize + 8);
Level.SerializeObjectContents(writer, level.objects, buildVersion, level.name);

@@ -45,4 +47,7 @@ Level.SerializeCollectablesList(writer, level.collectables);

writer.writeInt32(0);
writer.writeInt32(0);
writer.writeInt32(objects.length);
for (const obj of objects) {
writer.writeInt32(obj.saveOrBlueprintIndicator);
writer.writeInt32(obj.unknownType2);
const lenReplacementPosition = writer.getBufferPosition();

@@ -58,3 +63,3 @@ writer.writeInt32(0);

}
writer.writeBinarySizeFromPosition(lenIndicatorEntities, lenIndicatorEntities + 4);
writer.writeBinarySizeFromPosition(lenIndicatorEntities, lenIndicatorEntities + 8);
}

@@ -95,3 +100,3 @@ static ReadObjectHeaders(reader, objectsList, onProgressCallback) {

const obj = objectsList[i];
obj.unknownType1 = reader.readInt32();
obj.saveOrBlueprintIndicator = reader.readInt32();
obj.unknownType2 = reader.readInt32();

@@ -114,3 +119,2 @@ const binarySize = reader.readInt32();

const level = new Level(levelName);
const levelStartPos = reader.getBufferPosition();
const headersBinLen = reader.readInt32();

@@ -120,9 +124,15 @@ const unk = reader.readInt32();

Level.ReadObjectHeaders(reader, level.objects, reader.onProgressCallback);
const remainingSize = headersBinLen - (reader.getBufferPosition() - posBeforeHeaders);
let remainingSize = headersBinLen - (reader.getBufferPosition() - posBeforeHeaders);
if (remainingSize > 0) {
reader.readBytes(remainingSize);
const doubledCollectablesIgnored = Level.ReadCollectablesList(reader, 'collectables at the header section! ' + levelName);
}
else {
}
remainingSize = headersBinLen - (reader.getBufferPosition() - posBeforeHeaders);
if (remainingSize !== 0) {
console.warn('remaining size not 0. Save may be corrupt.', remainingSize, levelName);
}
const objectContentsBinLen = reader.readInt32();
const unk2 = reader.readInt32();
reader.onProgressCallback(reader.getBufferProgress());
level.collectables = Level.ReadCollectablesList(reader);
const posBeforeContents = reader.getBufferPosition();

@@ -135,3 +145,3 @@ Level.ReadObjectContents(levelName, reader, level.objects, buildVersion, reader.onProgressCallback);

reader.onProgressCallback(reader.getBufferProgress());
level.collectables = Level.ReadCollectablesList(reader);
level.collectables = Level.ReadCollectablesList(reader, 'collectables 2! ' + levelName);
return level;

@@ -145,11 +155,6 @@ }

}
static ReadCollectablesList(reader) {
static ReadCollectablesList(reader, printSmthWhenItsCollectables) {
const collected = [];
let countSmthing = reader.readInt32();
if (countSmthing > 0) {
const lookahead = reader.readInt32();
if (lookahead === 0) {
return collected;
}
reader.skipBytes(-4);
for (let i = 0; i < countSmthing; i++) {

@@ -159,2 +164,5 @@ const collectable = ObjectReference_1.ObjectReference.Parse(reader);

}
if (process.env.NODE_ENV === 'debug' && countSmthing > 0) {
console.log(printSmthWhenItsCollectables, countSmthing, collected.map(coll => coll.pathName).join(', '));
}
}

@@ -161,0 +169,0 @@ return collected;

import { ChunkCompressionInfo } from "../../file.types";
import { ObjectReference } from "../objects/ObjectReference";
import { Level } from "./level.class";
import { Grids } from "./save-reader";
import { ByteArray4, Grids } from "./save-reader";
import { SatisfactorySaveHeader } from "./save.types";
export declare class SatisfactorySave {
header: SatisfactorySaveHeader;
gridHash: ByteArray4;
grids: Grids;

@@ -9,0 +10,0 @@ levels: Level[];

@@ -6,2 +6,3 @@ "use strict";

constructor(header) {
this.gridHash = [0, 0, 0, 0];
this.grids = {};

@@ -8,0 +9,0 @@ this.levels = [];

@@ -9,8 +9,11 @@ import { ByteReader } from "../../byte/byte-reader.class";

export declare const DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE = 49;
export type ByteArray4 = [number, number, number, number];
export type Grids = {
[parentName: string]: {
[level: number]: {
name: string;
size: number;
}[];
checksum: number;
cellSize: number;
gridHash: number;
children: {
[name: string]: number;
};
};

@@ -32,2 +35,3 @@ };

};
readSaveBodyHash: () => ByteArray4;
readGrids: () => Grids;

@@ -34,0 +38,0 @@ readLevels(): Level[];

"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -11,2 +34,3 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const parser_error_1 = require("../../error/parser.error");
const FMD5Hash = __importStar(require("../objects/ue/FMD5Hash"));
const asynchronous_level_class_1 = require("./asynchronous-level.class");

@@ -23,6 +47,6 @@ const level_class_1 = require("./level.class");

packageFileTag: 0,
maxChunkContentSize: 0,
maxUncompressedChunkContentSize: 0,
chunkHeaderSize: exports.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE
};
this.readGrids = () => {
this.readSaveBodyHash = () => {
const unk = this.readInt32();

@@ -32,31 +56,26 @@ const cannotBeLevelCountWhatIsIt = this.readInt32();

const unk2 = this.readInt32();
const hm1 = this.readInt32();
const gridValidationHash = Array.from(this.readBytes(4));
const unk3 = this.readInt32();
const noneString2 = this.readString();
return gridValidationHash;
};
this.readGrids = () => {
const grids = {};
const readLevelsList = (childrenCount) => {
const hmm = this.readInt32();
const readGrid = (childrenCount) => {
const parentChecksumIdOrSmth = this.readUint32();
const parentName = this.readString();
const whatever1 = this.readInt32();
const whatever2 = this.readInt32();
grids[parentName] = {};
const cellSize = this.readInt32();
const gridHash = this.readUint32();
grids[parentName] = { checksum: parentChecksumIdOrSmth, children: {}, cellSize, gridHash };
for (let i = 0; i < childrenCount; i++) {
const binaryLenIGuess = this.readUint32();
const cellHash = this.readUint32();
const levelInstanceName = this.readString();
const lod = /\_L([0-9]+)\_/.exec(levelInstanceName)[1];
const arr = grids[parentName][Number(lod)];
if (!arr) {
grids[parentName][Number(lod)] = [];
}
grids[parentName][Number(lod)].push({
name: levelInstanceName,
size: binaryLenIGuess
});
grids[parentName].children[levelInstanceName] = cellHash;
}
};
readLevelsList(1379);
readLevelsList(0);
readLevelsList(972);
readLevelsList(0);
readLevelsList(0);
readGrid(1379);
readGrid(0);
readGrid(972);
readGrid(0);
readGrid(0);
const unk5 = this.readInt32();

@@ -104,6 +123,13 @@ return grids;

}
if (this.header.saveHeaderType >= 11) {
this.header.partitionEnabledFlag = this.readInt32() === 1;
}
if (this.header.saveHeaderType >= 12) {
this.header.consistencyHashBytes = FMD5Hash.readMD5Hash(this);
}
if (this.header.saveHeaderType >= 13) {
this.header.unknownStuff = Array.from(this.readBytes(28));
this.header.creativeModeEnabled = this.readInt32() == 1;
}
if (this.header.saveVersion >= 21) {
console.log('save version', this.header.saveVersion, 'header type', this.header.saveHeaderType, 'buildversion', this.header.buildVersion);
}

@@ -129,4 +155,4 @@ else {

}
if (this.compressionInfo.maxChunkContentSize <= 0) {
this.compressionInfo.maxChunkContentSize = chunkHeader.getInt32(8, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
if (this.compressionInfo.maxUncompressedChunkContentSize <= 0) {
this.compressionInfo.maxUncompressedChunkContentSize = chunkHeader.getInt32(8, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
}

@@ -187,14 +213,4 @@ const chunkCompressedLength = chunkHeader.getInt32(33, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);

}
level_class_1.Level.ReadCollectablesList(this);
this.onProgressCallback(this.getBufferProgress(), `finished reading levels pack of ${levelCount}.`);
if (this.getBufferPosition() < this.getBufferLength()) {
const appendedLevel = new level_class_1.Level(this.header.mapName);
levels.push(appendedLevel);
level_class_1.Level.ReadObjectHeaders(this, appendedLevel.objects, this.onProgressCallback);
const unk = this.readInt32();
appendedLevel.collectables = level_class_1.Level.ReadCollectablesList(this);
level_class_1.Level.ReadObjectContents(appendedLevel.name, this, appendedLevel.objects, this.header.buildVersion, this.onProgressCallback);
level_class_1.Level.ReadCollectablesList(this);
level_class_1.Level.ReadCollectablesList(this);
}
levels.push(level_class_1.Level.ReadLevel(this, this.header.mapName, this.header.buildVersion));
const trailingStuffToIgnore = level_class_1.Level.ReadCollectablesList(this, 'collectables 6!');
this.onProgressCallback(this.getBufferProgress(), 'finished parsing.');

@@ -201,0 +217,0 @@ return levels;

@@ -5,3 +5,3 @@ import { Alignment } from "../../byte/alignment.enum";

import { SatisfactorySave } from "./satisfactory-save";
import { Grids } from "./save-reader";
import { ByteArray4, Grids } from "./save-reader";
import { SatisfactorySaveHeader } from "./save.types";

@@ -11,2 +11,3 @@ export declare class SaveWriter extends ByteWriter {

static WriteHeader(writer: ByteWriter, header: SatisfactorySaveHeader): void;
static WriteSaveBodyHash: (writer: ByteWriter, hash: ByteArray4) => void;
static WriteGrids: (writer: ByteWriter, grids: Grids) => void;

@@ -13,0 +14,0 @@ static WriteLevels(writer: ByteWriter, save: SatisfactorySave, buildVersion: number): void;

@@ -11,3 +11,6 @@ "use strict";

const parser_error_1 = require("../../error/parser.error");
const file_types_1 = require("../../file.types");
const FMD5Hash_1 = require("../objects/ue/FMD5Hash");
const level_class_1 = require("./level.class");
const save_reader_1 = require("./save-reader");
class SaveWriter extends byte_writer_class_1.ByteWriter {

@@ -25,3 +28,3 @@ constructor() {

writer.writeInt32(header.playDurationSeconds);
writer.writeInt64(BigInt(header.saveDateTime));
writer.writeInt64(BigInt(header.saveDateTime) * 10000n + save_reader_1.SaveReader.EPOCH_TICKS);
writer.writeByte(header.sessionVisibility);

@@ -43,4 +46,10 @@ if (header.saveHeaderType >= 7) {

}
if (header.saveHeaderType >= 11) {
writer.writeInt32(header.partitionEnabledFlag ? 1 : 0);
}
if (header.saveHeaderType >= 12) {
(0, FMD5Hash_1.writeMD5Hash)(writer, header.consistencyHashBytes);
}
if (header.saveHeaderType >= 13) {
writer.writeBytesArray(header.unknownStuff);
writer.writeInt32(header.creativeModeEnabled ? 1 : 0);
}

@@ -61,3 +70,3 @@ if (header.saveVersion >= 21) {

}
level_class_1.Level.SerializeCollectablesList(writer, save.trailingCollectedObjects);
level_class_1.Level.SerializeCollectablesList(writer, save.trailingCollectedObjects ?? []);
}

@@ -70,3 +79,3 @@ static GenerateCompressedChunksFromData(bufferArray, compressionInfo, onBinaryBeforeCompressing, onChunk, alignment = alignment_enum_1.Alignment.LITTLE_ENDIAN) {

const miniView = new DataView(saveBody.buffer);
miniView.setInt32(0, totalUncompressedSize, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
miniView.setInt32(0, totalUncompressedSize - 4, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
onBinaryBeforeCompressing(saveBody.buffer);

@@ -76,3 +85,3 @@ let handledByte = 0;

while (handledByte < saveBody.byteLength) {
const uncompressedContentSize = Math.min(compressionInfo.maxChunkContentSize, saveBody.byteLength - handledByte);
const uncompressedContentSize = Math.min(compressionInfo.maxUncompressedChunkContentSize, saveBody.byteLength - handledByte);
const uncompressedChunk = saveBody.buffer.slice(handledByte, handledByte + uncompressedContentSize);

@@ -90,6 +99,6 @@ let compressedChunk = new Uint8Array(0);

view.setInt32(0, compressionInfo.packageFileTag, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(4, 572662306, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(8, compressionInfo.maxChunkContentSize, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(4, 0x22222222, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(8, compressionInfo.maxUncompressedChunkContentSize, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(12, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setUint8(16, 3);
view.setUint8(16, file_types_1.CompressionAlgorithmCode.ZLIB);
view.setInt32(17, compressedChunk.byteLength, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);

@@ -123,21 +132,24 @@ view.setInt32(21, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);

}
SaveWriter.WriteGrids = (writer, grids) => {
SaveWriter.WriteSaveBodyHash = (writer, hash) => {
writer.writeInt32(0);
writer.writeInt32(0);
writer.writeInt32(6);
writer.writeString('None');
writer.writeInt32(0);
writer.writeInt32(0);
writer.writeInt32(0);
writer.writeBytesArray(hash);
writer.writeInt32(1);
writer.writeString('None');
};
SaveWriter.WriteGrids = (writer, grids) => {
for (const parentEntry of Object.entries(grids)) {
writer.writeInt32(0);
writer.writeInt32(parentEntry[1].checksum);
writer.writeString(parentEntry[0]);
writer.writeInt32(0);
writer.writeInt32(0);
for (const child of Object.entries(parentEntry[1]).flatMap(entry => entry[1])) {
writer.writeUint32(child.size);
writer.writeString(child.name);
writer.writeInt32(parentEntry[1].cellSize);
writer.writeUint32(parentEntry[1].gridHash);
for (const child of Object.entries(parentEntry[1].children)) {
writer.writeUint32(child[1]);
writer.writeString(child[0]);
}
}
writer.writeInt32(0);
};
exports.SaveWriter = SaveWriter;

@@ -0,1 +1,2 @@

import { MD5Hash } from '../objects/ue/FMD5Hash';
export interface ModData {

@@ -24,3 +25,5 @@ Reference: string;

fEditorObjectVersion?: number;
unknownStuff?: number[];
partitionEnabledFlag?: boolean;
consistencyHashBytes?: MD5Hash;
creativeModeEnabled?: boolean;
}

@@ -27,0 +30,0 @@ export type RoughSaveVersion = '<U6' | 'U6/U7' | 'U8+';

@@ -13,2 +13,4 @@ "use strict";

}
await writer.writeGridHash(save.gridHash);
await writer.writeGrids(save.grids);
await writer.openLevels();

@@ -15,0 +17,0 @@ const objectBatchSize = 10000;

@@ -17,3 +17,3 @@ "use strict";

packageFileTag: 0,
maxChunkContentSize: 0,
maxUncompressedChunkContentSize: 0,
chunkHeaderSize: SaveStreamReader.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE

@@ -77,4 +77,4 @@ };

}
if (this.compressionInfo.maxChunkContentSize <= 0) {
this.compressionInfo.maxChunkContentSize = this.operatingDataView.getInt32(8, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
if (this.compressionInfo.maxUncompressedChunkContentSize <= 0) {
this.compressionInfo.maxUncompressedChunkContentSize = this.operatingDataView.getInt32(8, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
}

@@ -81,0 +81,0 @@ const chunkCompressedLength = this.operatingDataView.getInt32(32, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);

@@ -5,2 +5,3 @@ /// <reference types="node" />

import { SaveObject } from "../satisfactory/objects/SaveObject";
import { ByteArray4, Grids } from '../satisfactory/save/save-reader';
export declare class SaveStreamWriter {

@@ -14,2 +15,4 @@ private writer;

writeCompressionInfo(compressionInfo: ChunkCompressionInfo): Promise<void>;
writeGridHash(gridHash: ByteArray4): Promise<void>;
writeGrids(grids: Grids): Promise<void>;
openLevels(): Promise<void>;

@@ -16,0 +19,0 @@ openLevel(levelName: string): Promise<void>;

@@ -33,6 +33,20 @@ "use strict";

}
async openLevels() {
async writeGridHash(gridHash) {
if (this.mode !== 'WROTE_COMPRESSION_INFO') {
throw new Error(`Wrong order of commands. Levels have to come after compression info. mode is ${this.mode}.`);
throw new Error(`Wrong order of commands. Save hbody hash has to come after compression info. mode is ${this.mode}.`);
}
await this.writer.write(`, "gridHash": ${JSON.stringify(gridHash)}`);
this.mode = 'WROTE_GRID_HASH';
}
async writeGrids(grids) {
if (this.mode !== 'WROTE_GRID_HASH') {
throw new Error(`Wrong order of commands. Grids have to come after save body hash info. mode is ${this.mode}.`);
}
await this.writer.write(`, "grids": ${JSON.stringify(grids)}`);
this.mode = 'WROTE_GRIDS';
}
async openLevels() {
if (this.mode !== 'WROTE_GRIDS') {
throw new Error(`Wrong order of commands. Levels have to come after grids info. mode is ${this.mode}.`);
}
await this.writer.write(`, "levels": [`);

@@ -39,0 +53,0 @@ this.mode = 'OPENED_LEVELS';

{
"name": "@etothepii/satisfactory-file-parser",
"author": "etothepii",
"version": "0.1.17",
"version": "0.1.18",
"description": "A file parser for satisfactory files. Includes save files and blueprint files.",

@@ -6,0 +6,0 @@ "types": "./build/index.d.ts",

@@ -16,9 +16,7 @@ # Satisfactory File Parser

U8 has read and write support for blueprints, but only read support for saves. More coming soon.
| Game Version | Package |
|:--------------:|:-----------------------------|
| <= U5 | ❌ |
| U6 + U7 | ✅ 0.0.1 - 0.0.34 |
| U8 | ⚠️ >= 0.1.17 (readig for both saves and blueprints, writing only for blueprints) |
| U6 + U7 | ✅ 0.0.1 - 0.0.34 |
| U8 | ✅ >= 0.1.18 |

@@ -100,3 +98,3 @@

Copyright (c) 2023 etotheepii
Copyright (c) 2023 etothepii

@@ -103,0 +101,0 @@ Permission is hereby granted, free of charge, to any person obtaining a copy

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc