Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@etothepii/satisfactory-file-parser

Package Overview
Dependencies
Maintainers
1
Versions
89
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@etothepii/satisfactory-file-parser - npm Package Compare versions

Comparing version 0.0.34 to 0.1.1

3

build/parser/parser.js

@@ -26,4 +26,3 @@ "use strict";

onDecompressedSaveBody(reader.getBuffer());
const levelParseResult = reader.readLevels();
save.levels = reader.levels;
save.levels = reader.readLevelsU8();
save.compressionInfo = reader.compressionInfo;

@@ -30,0 +29,0 @@ save.trailingCollectedObjects = reader.trailingCollectedObjects;

@@ -24,3 +24,3 @@ "use strict";

const unk = reader.readBytes(3 * 4);
const positionThingOrWhat = (0, util_types_1.ParseVec3)(reader);
const positionThingOrWhat = (0, util_types_1.ParseVec3f)(reader);
let itemTypeCount = reader.readInt32();

@@ -102,3 +102,3 @@ const itemCosts = new Array(itemTypeCount).fill(['', 0]);

level_class_1.Level.ReadObjectHeaders(reader, objects, () => { });
level_class_1.Level.ReadObjectContents(reader, objects, 0, () => { });
level_class_1.Level.ReadObjectContents('', reader, objects, 0, () => { });
return objects;

@@ -105,0 +105,0 @@ }

@@ -29,6 +29,13 @@ "use strict";

}
fields.specialProperties = DataFields.ParseAdditionalSpecialProperties(reader, typePath, start + length - reader.getBufferPosition() - 4);
const remainingLen = length - (reader.getBufferPosition() - start);
const oldRemainingLen = start + length - reader.getBufferPosition() - 4;
fields.specialProperties = DataFields.ParseAdditionalSpecialProperties(reader, typePath, remainingLen);
const end = reader.getBufferPosition();
let remainingBytes = start + length - end;
fields.trailingData = Array.from(reader.readBytes(remainingBytes));
const newWayOfCalculatingReminingSize = length - (end - start);
if (newWayOfCalculatingReminingSize > 0) {
reader.readBytes(newWayOfCalculatingReminingSize);
}
else if (newWayOfCalculatingReminingSize < 0) {
console.warn(`Unexpected. Did i read too much or is save corrupt !? bytes left to read is ${newWayOfCalculatingReminingSize}`);
}
return fields;

@@ -38,2 +45,3 @@ }

let property;
const start = reader.getBufferPosition();
switch (typePath) {

@@ -58,5 +66,5 @@ case '/Game/FactoryGame/Buildable/Factory/ConveyorBeltMk1/Build_ConveyorBeltMk1.Build_ConveyorBeltMk1_C':

};
if (remainingLen >= 24) {
property.sourceTranslation = (0, util_types_1.ParseVec3)(reader);
property.targetTranslation = (0, util_types_1.ParseVec3)(reader);
if (remainingLen - (reader.getBufferPosition() - start) >= 24) {
property.sourceTranslation = (0, util_types_1.ParseVec3f)(reader);
property.targetTranslation = (0, util_types_1.ParseVec3f)(reader);
}

@@ -173,2 +181,3 @@ break;

if (readBytes !== binarySize) {
console.warn(`possibly corrupt. Read ${readBytes} for ${propertyType} ${propertyName}, but ${binarySize} were indicated.`);
throw new Error(`possibly corrupt. Read ${readBytes} for ${propertyType} ${propertyName}, but ${binarySize} were indicated.`);

@@ -175,0 +184,0 @@ }

@@ -205,3 +205,3 @@ "use strict";

static CalcOverhead(property) {
throw new Error('unimplemented');
return 1;
}

@@ -497,3 +497,3 @@ static Serialize(writer, property) {

const unk3 = reader.readInt32();
if (unk2 !== 0) {
if (unk3 !== 0) {
struct.unk3 = unk3;

@@ -523,15 +523,23 @@ }

case 'Vector':
value = (0, util_types_1.ParseVec3)(reader);
break;
case 'Rotator':
case 'Vector2D':
value = (0, util_types_1.ParseVec3)(reader);
value = (0, util_types_1.ParseVec3f)(reader);
break;
case 'Quat':
value = (0, util_types_1.ParseVec4)(reader);
break;
case 'Vector4':
case 'Vector4D':
value = (0, util_types_1.ParseVec4)(reader);
value = (0, util_types_1.ParseVec4f)(reader);
break;
case 'Box':
const otherBoxLike = {
other1: (0, util_types_1.ParseVec3f)(reader),
other2: (0, util_types_1.ParseVec3f)(reader)
};
value = {
min: (0, util_types_1.ParseVec3)(reader),
max: (0, util_types_1.ParseVec3)(reader),
min: (0, util_types_1.ParseVec3f)(reader),
max: (0, util_types_1.ParseVec3f)(reader),
isValid: reader.readByte() >= 1

@@ -619,3 +627,3 @@ };

value = value;
(0, util_types_1.SerializeVec3)(writer, value);
(0, util_types_1.SerializeVec3f)(writer, value);
break;

@@ -626,8 +634,8 @@ case 'Quat':

value = value;
(0, util_types_1.SerializeVec4)(writer, value);
(0, util_types_1.SerializeVec4f)(writer, value);
break;
case 'Box':
value = value;
(0, util_types_1.SerializeVec3)(writer, value.min);
(0, util_types_1.SerializeVec3)(writer, value.max);
(0, util_types_1.SerializeVec3f)(writer, value.min);
(0, util_types_1.SerializeVec3f)(writer, value.max);
writer.writeByte(value.isValid ? 1 : 0);

@@ -763,4 +771,4 @@ break;

});
const readBytees = reader.getBufferPosition() - before;
if (readBytees !== binarySize) {
const readBytes = reader.getBufferPosition() - before;
if (readBytes !== binarySize) {
throw new Error('possibly corrupt in array of struct.');

@@ -832,4 +840,3 @@ }

default:
console.log(property.type, property.ueType);
throw new Error();
throw new Error(`Unknown array property ${property.ueType}, ${property.type}`);
}

@@ -863,3 +870,3 @@ }

if (propertyName === 'mRemovalLocations') {
property = new SetProperty(subtype, new Array(elementCount).fill(0).map(() => (0, util_types_1.ParseVec3)(reader)), ueType, index);
property = new SetProperty(subtype, new Array(elementCount).fill(0).map(() => (0, util_types_1.ParseVec3f)(reader)), ueType, index);
}

@@ -891,3 +898,3 @@ break;

case "StructProperty":
property.values.forEach(v => (0, util_types_1.SerializeVec3)(writer, v));
property.values.forEach(v => (0, util_types_1.SerializeVec3f)(writer, v));
break;

@@ -894,0 +901,0 @@ default:

@@ -37,2 +37,5 @@ "use strict";

static ParseData(entity, length, reader, buildVersion, typePath) {
const typeOrWhat = reader.readInt32();
const something = reader.readInt32();
const afterSizeIndicator = reader.getBufferPosition();
var newLen = length - 12;

@@ -51,3 +54,4 @@ entity.parentObjectRoot = reader.readString();

}
return SaveObject_1.SaveObject.ParseData(entity, newLen, reader, buildVersion, typePath);
const remainingSize = something - (reader.getBufferPosition() - afterSizeIndicator);
return SaveObject_1.SaveObject.ParseData(entity, remainingSize, reader, buildVersion, typePath);
}

@@ -54,0 +58,0 @@ static SerializeHeader(writer, entity) {

@@ -17,3 +17,3 @@ import { BinaryReadable } from "../../byte/binary-readable.interface";

static ReadObjectHeaders(reader: BinaryReadable, objectsList: SaveObject[], onProgressCallback: (progress: number, msg?: string) => void): void;
static ReadObjectContents(reader: BinaryReadable, objectsList: SaveObject[], buildVersion: number, onProgressCallback: (progress: number, msg?: string) => void): Promise<void>;
static ReadObjectContents(levelName: string, reader: BinaryReadable, objectsList: SaveObject[], buildVersion: number, onProgressCallback: (progress: number, msg?: string) => void): Promise<void>;
static ReadLevel(reader: SaveReader, levelName: string, buildVersion: number): Level;

@@ -20,0 +20,0 @@ static SerializeCollectablesList(writer: ByteWriter, collectables: ObjectReference[]): void;

@@ -80,5 +80,5 @@ "use strict";

}
const unk = reader.readInt32();
}
static async ReadObjectContents(reader, objectsList, buildVersion, onProgressCallback) {
const binarySize = reader.readInt32();
static async ReadObjectContents(levelName, reader, objectsList, buildVersion, onProgressCallback) {
const posBefore = reader.getBufferPosition();

@@ -94,2 +94,5 @@ const countEntities = reader.readInt32();

const len = reader.readInt32();
if (len === 0) {
throw new Error(`check number is a wrong value (${len}). This normally indicates a corrupt entity or save.`);
}
const before = reader.getBufferPosition();

@@ -101,21 +104,40 @@ const obj = objectsList[i];

else if ((0, SaveComponent_1.isSaveComponent)(obj)) {
SaveComponent_1.SaveComponent.ParseData(obj, len, reader, buildVersion, obj.typePath);
const wtf1 = reader.readInt32();
const wtf2 = reader.readInt32();
SaveComponent_1.SaveComponent.ParseData(obj, wtf2, reader, buildVersion, obj.typePath);
}
const after = reader.getBufferPosition();
if (after - before !== len) {
console.warn('entity may be corrupt', this.name, i);
}
}
const posAfter = reader.getBufferPosition();
if (posAfter - posBefore !== binarySize) {
console.warn('save seems corrupt.', this.name);
}
}
static ReadLevel(reader, levelName, buildVersion) {
const level = new Level(levelName);
reader.readInt32();
const levelStartPos = reader.getBufferPosition();
const placeholderHead = reader.readInt32();
if (placeholderHead === 8) {
const morePlaceholder = reader.readBytes(28);
}
else {
reader.skipBytes(-4);
}
const lookahead1 = reader.readInt32();
const lookahead2 = reader.readInt32();
if (lookahead1 !== 0 && lookahead2 !== 0) {
reader.skipBytes(-8);
return level;
}
else if (lookahead1 !== 0 && lookahead2 === 0) {
reader.skipBytes(-8);
}
else {
throw new Error('Unexpected. what now? TODO FIXME');
}
const binLen = reader.readInt32();
const unk = reader.readInt32();
Level.ReadObjectHeaders(reader, level.objects, reader.onProgressCallback);
reader.onProgressCallback(reader.getBufferProgress());
level.collectables = Level.ReadCollectablesList(reader);
Level.ReadObjectContents(reader, level.objects, buildVersion, reader.onProgressCallback);
Level.ReadObjectContents(levelName, reader, level.objects, buildVersion, reader.onProgressCallback);
reader.onProgressCallback(reader.getBufferProgress());

@@ -132,7 +154,14 @@ Level.ReadCollectablesList(reader);

static ReadCollectablesList(reader) {
let countCollected = reader.readInt32();
const collected = [];
for (let i = 0; i < countCollected; i++) {
const collectable = ObjectReference_1.ObjectReference.Parse(reader);
collected.push(collectable);
let countSmthing = reader.readInt32();
if (countSmthing > 0) {
const lookahead = reader.readInt32();
if (lookahead === 0) {
return collected;
}
reader.skipBytes(-4);
for (let i = 0; i < countSmthing; i++) {
const collectable = ObjectReference_1.ObjectReference.Parse(reader);
collected.push(collectable);
}
}

@@ -139,0 +168,0 @@ return collected;

@@ -19,3 +19,3 @@ import { ByteReader } from "../../byte/byte-reader.class";

export type ReadMode = 'stream' | 'whole';
export declare const DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE = 48;
export declare const DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE = 49;
export declare class SaveReader extends ByteReader {

@@ -34,4 +34,4 @@ onProgressCallback: (progress: number, msg?: string) => void;

};
readLevels(): Level[];
readLevelsU8(): Level[];
readLevelsAsynchronously(writer: SaveStreamWriter): Promise<Level[]>;
}

@@ -33,3 +33,3 @@ "use strict";

exports.projectionFilterApplies = projectionFilterApplies;
exports.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE = 48;
exports.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE = 49;
class SaveReader extends byte_reader_class_1.ByteReader {

@@ -85,2 +85,8 @@ constructor(fileBuffer, onProgressCallback = () => { }) {

}
if (this.header.saveHeaderType >= 13) {
const unk1 = this.readInt32();
const unk2 = this.readInt32();
const unk3 = this.readBytes(16);
const unk4 = this.readInt32();
}
if (this.header.saveVersion >= 21) {

@@ -110,4 +116,4 @@ }

}
const chunkCompressedLength = chunkHeader.getInt32(32, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
const chunkUncompressedLength = chunkHeader.getInt32(40, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
const chunkCompressedLength = chunkHeader.getInt32(33, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
const chunkUncompressedLength = chunkHeader.getInt32(25, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
totalUncompressedBodySize += chunkUncompressedLength;

@@ -140,4 +146,4 @@ const currentChunkSize = chunkCompressedLength;

const dataLength = this.readInt32();
if (totalUncompressedBodySize !== dataLength + 4) {
throw new parser_error_1.CorruptSaveError(`Possibly corrupt. Indicated size of total save body (${dataLength}) does not match the uncompressed real size of ${totalUncompressedBodySize}.`);
if (totalUncompressedBodySize !== dataLength + 8) {
throw new parser_error_1.CorruptSaveError(`Possibly corrupt. Indicated size of total save body (${dataLength + 8}) does not match the uncompressed real size of ${totalUncompressedBodySize}.`);
}

@@ -149,3 +155,3 @@ return {

}
readLevels() {
readLevelsU8() {
if (!this.header) {

@@ -155,15 +161,57 @@ throw new parser_error_1.ParserError('ParserError', 'Header must be set before objects can be read.');

if (this.header.saveVersion < 29) {
throw new parser_error_1.UnsupportedVersionError('Support for < U6 is not yet implemented.');
throw new parser_error_1.UnsupportedVersionError('Game Version < U6 is not supported.');
}
const numSubLevels = this.readInt32();
this.levels = new Array(numSubLevels + 1);
for (let j = 0; j <= numSubLevels; j++) {
let levelName = (j === numSubLevels) ? '' + this.header.mapName : this.readString();
this.onProgressCallback(this.getBufferProgress(), `reading level [${(j + 1)}/${(numSubLevels + 1)}] ${levelName}`);
this.levels[j] = level_class_1.Level.ReadLevel(this, levelName, this.header.buildVersion);
if (this.header.saveHeaderType < 13) {
throw new parser_error_1.UnsupportedVersionError('Game Version < U8 is not supported in this package version. Consider downgrading to the latest package version supporting it, which is 0.0.34');
}
if (this.getBufferPosition() < this.bufferView.byteLength) {
this.trailingCollectedObjects = level_class_1.Level.ReadCollectablesList(this);
const unk = this.readInt32();
const cannotBeLevelCountWhatIsIt = this.readInt32();
const noneString = this.readString();
const unk2 = this.readInt32();
const hm1 = this.readInt32();
const unk3 = this.readInt32();
const noneString2 = this.readString();
const grids = {};
const readLevelsList = (childrenCount) => {
const hmm = this.readInt32();
const parentName = this.readString();
const whatever = this.readBytes(8);
grids[parentName] = {};
for (let i = 0; i < childrenCount; i++) {
const binaryLenIGuess = this.readInt32();
const levelInstanceName = this.readString();
const lod = /\_L([0-9]+)\_/.exec(levelInstanceName)[1];
const arr = grids[parentName][Number(lod)];
if (!arr) {
grids[parentName][Number(lod)] = [];
}
grids[parentName][Number(lod)].push(levelInstanceName);
}
};
readLevelsList(1379);
readLevelsList(0);
readLevelsList(972);
readLevelsList(0);
readLevelsList(0);
const unk5 = this.readInt32();
const levels = [];
const levelCount = this.readInt32();
this.onProgressCallback(this.getBufferProgress(), `reading pack of ${levelCount} levels.`);
for (let i = 0; i < levelCount; i++) {
let levelSingleName = this.readString();
levels.push(level_class_1.Level.ReadLevel(this, levelSingleName, this.header.buildVersion));
}
return this.levels;
level_class_1.Level.ReadCollectablesList(this);
this.onProgressCallback(this.getBufferProgress(), `finished reading levels pack of ${levelCount}.`);
if (this.getBufferPosition() < this.getBufferLength()) {
const appendedLevel = new level_class_1.Level(this.header.mapName);
levels.push(appendedLevel);
level_class_1.Level.ReadObjectHeaders(this, appendedLevel.objects, this.onProgressCallback);
appendedLevel.collectables = level_class_1.Level.ReadCollectablesList(this);
level_class_1.Level.ReadObjectContents(appendedLevel.name, this, appendedLevel.objects, this.header.buildVersion, this.onProgressCallback);
level_class_1.Level.ReadCollectablesList(this);
level_class_1.Level.ReadCollectablesList(this);
}
this.onProgressCallback(this.getBufferProgress(), 'finished parsing.');
return levels;
}

@@ -170,0 +218,0 @@ async readLevelsAsynchronously(writer) {

@@ -81,3 +81,3 @@ "use strict";

view.setInt32(0, compressionInfo.packageFileTag, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(4, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(4, 572662306, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(8, compressionInfo.maxChunkContentSize, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);

@@ -84,0 +84,0 @@ view.setInt32(12, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);

@@ -18,2 +18,4 @@ import { BinaryReadable } from "../../byte/binary-readable.interface";

export declare const SerializeVec4: (writer: ByteWriter, vec: vec4) => void;
export declare const ParseVec4f: (reader: BinaryReadable) => vec4;
export declare const SerializeVec4f: (writer: ByteWriter, vec: vec4) => void;
export declare const sub: (other: vec3, vec: vec3) => vec3;

@@ -29,2 +31,4 @@ export declare const add: (vec: vec3, other: vec3) => vec3;

export declare const SerializeVec3: (writer: ByteWriter, vec: vec3) => void;
export declare const ParseVec3f: (reader: BinaryReadable) => vec3;
export declare const SerializeVec3f: (writer: ByteWriter, vec: vec3) => void;
export type vec2 = {

@@ -36,2 +40,4 @@ x: number;

export declare const SerializeVec2: (writer: ByteWriter, vec: vec2) => void;
export declare const ParseVec2f: (reader: BinaryReadable) => vec2;
export declare const SerializeVec2f: (writer: ByteWriter, vec: vec2) => void;
export type Transform = {

@@ -38,0 +44,0 @@ rotation: vec4;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SerializeTransform = exports.ParseTransform = exports.SerializeVec2 = exports.ParseVec2 = exports.SerializeVec3 = exports.ParseVec3 = exports.norm = exports.mult = exports.length = exports.add = exports.sub = exports.SerializeVec4 = exports.ParseVec4 = exports.ParseCol4BGRA = exports.SerializeCol4BGRA = exports.ParseCol4RGBA = exports.SerializeCol4RGBA = void 0;
exports.SerializeTransform = exports.ParseTransform = exports.SerializeVec2f = exports.ParseVec2f = exports.SerializeVec2 = exports.ParseVec2 = exports.SerializeVec3f = exports.ParseVec3f = exports.SerializeVec3 = exports.ParseVec3 = exports.norm = exports.mult = exports.length = exports.add = exports.sub = exports.SerializeVec4f = exports.ParseVec4f = exports.SerializeVec4 = exports.ParseVec4 = exports.ParseCol4BGRA = exports.SerializeCol4BGRA = exports.ParseCol4RGBA = exports.SerializeCol4RGBA = void 0;
const SerializeCol4RGBA = (writer, value) => {

@@ -39,3 +39,3 @@ writer.writeFloat(value.r);

...((0, exports.ParseVec3)(reader)),
w: reader.readFloat32()
w: reader.readDouble()
};

@@ -46,5 +46,17 @@ };

(0, exports.SerializeVec3)(writer, vec);
writer.writeDouble(vec.w);
};
exports.SerializeVec4 = SerializeVec4;
const ParseVec4f = (reader) => {
return {
...((0, exports.ParseVec3f)(reader)),
w: reader.readFloat32()
};
};
exports.ParseVec4f = ParseVec4f;
const SerializeVec4f = (writer, vec) => {
(0, exports.SerializeVec3f)(writer, vec);
writer.writeFloat(vec.w);
};
exports.SerializeVec4 = SerializeVec4;
exports.SerializeVec4f = SerializeVec4f;
const sub = (other, vec) => ({ x: other.x - vec.x, y: other.y - vec.y, z: other.z - vec.z });

@@ -63,3 +75,3 @@ exports.sub = sub;

...((0, exports.ParseVec2)(reader)),
z: reader.readFloat32()
z: reader.readDouble()
};

@@ -70,7 +82,31 @@ };

(0, exports.SerializeVec2)(writer, vec);
writer.writeDouble(vec.z);
};
exports.SerializeVec3 = SerializeVec3;
const ParseVec3f = (reader) => {
return {
...((0, exports.ParseVec2f)(reader)),
z: reader.readFloat32()
};
};
exports.ParseVec3f = ParseVec3f;
const SerializeVec3f = (writer, vec) => {
(0, exports.SerializeVec2f)(writer, vec);
writer.writeFloat(vec.z);
};
exports.SerializeVec3 = SerializeVec3;
exports.SerializeVec3f = SerializeVec3f;
const ParseVec2 = (reader) => {
return {
x: reader.readDouble(),
y: reader.readDouble(),
};
};
exports.ParseVec2 = ParseVec2;
const SerializeVec2 = (writer, vec) => {
writer.writeDouble(vec.x);
writer.writeDouble(vec.y);
};
exports.SerializeVec2 = SerializeVec2;
const ParseVec2f = (reader) => {
return {
x: reader.readFloat32(),

@@ -80,13 +116,13 @@ y: reader.readFloat32(),

};
exports.ParseVec2 = ParseVec2;
const SerializeVec2 = (writer, vec) => {
exports.ParseVec2f = ParseVec2f;
const SerializeVec2f = (writer, vec) => {
writer.writeFloat(vec.x);
writer.writeFloat(vec.y);
};
exports.SerializeVec2 = SerializeVec2;
exports.SerializeVec2f = SerializeVec2f;
const ParseTransform = (reader) => {
return {
rotation: (0, exports.ParseVec4)(reader),
translation: (0, exports.ParseVec3)(reader),
scale3d: (0, exports.ParseVec3)(reader),
rotation: (0, exports.ParseVec4f)(reader),
translation: (0, exports.ParseVec3f)(reader),
scale3d: (0, exports.ParseVec3f)(reader),
};

@@ -96,6 +132,6 @@ };

const SerializeTransform = (writer, transform) => {
(0, exports.SerializeVec4)(writer, transform.rotation);
(0, exports.SerializeVec3)(writer, transform.translation);
(0, exports.SerializeVec3)(writer, transform.scale3d);
(0, exports.SerializeVec4f)(writer, transform.rotation);
(0, exports.SerializeVec3f)(writer, transform.translation);
(0, exports.SerializeVec3f)(writer, transform.scale3d);
};
exports.SerializeTransform = SerializeTransform;
{
"name": "@etothepii/satisfactory-file-parser",
"author": "etothepii",
"version": "0.0.34",
"version": "0.1.1",
"description": "A file parser for satisfactory files. Includes save files and blueprint files.",

@@ -17,3 +17,3 @@ "types": "./build/index.d.ts",

"scripts": {
"test": "jest --config=jest.config.json",
"test": "jest --config=jest.config.json --max_old_space_size=30720",
"build": "tsc && npm run bundle",

@@ -20,0 +20,0 @@ "bundle": "npx webpack && dts-bundle-generator -o dist/index.d.ts src/index.ts",

@@ -12,11 +12,15 @@ # Satisfactory File Parser

## Supported Versions
Game Version Files of U5 and below are NOT supported. However, U6 and above is perfectly fine and parsable.
- ✅ U7
- ✅ U6
- ❌ <= U5
The version support of the packages is indicated below.
### U8 is on the Horizon!
We can not promise that U8 saves will work out of the box!
If there will be changes, those will come very soon as soon as U8 is out on Experimental.
Game Version Files of U5 and below are NOT supported.
U8 has only read support so far and only for save files, not for blueprint files. More coming soon.
| Game Version | Package |
|:--------------:|:-----------------------------|
| <= U5 | ❌ |
| U6 + U7 | ✅ 0.0.1 - 0.0.34 |
| U8 | ⚠️ >= 0.1.0 ( .sav file Reading only so far) |
## Installation via npm

@@ -23,0 +27,0 @@ `npm install @etothepii/satisfactory-file-parser`

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc