Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@etothepii/satisfactory-file-parser

Package Overview
Dependencies
Maintainers
1
Versions
89
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@etothepii/satisfactory-file-parser - npm Package Compare versions

Comparing version 0.1.24 to 0.1.25

build/parser/stream/reworked/readable-stream-parser.d.ts

1

build/index.d.ts

@@ -27,1 +27,2 @@ export * from './parser/satisfactory/blueprint/blueprint.types';

export { Parser } from './parser/parser';
export { ReadableStreamParser } from './parser/stream/reworked/readable-stream-parser';

4

build/index.js

@@ -17,3 +17,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.Parser = exports.SaveStreamWriter = exports.SaveStreamJsonStringifier = exports.SaveWriter = exports.SaveReader = exports.BlueprintWriter = exports.BlueprintConfigWriter = exports.BlueprintReader = exports.BlueprintConfigReader = exports.ByteWriter = exports.ByteReader = exports.SatisfactorySave = exports.Level = exports.SaveEntity = exports.SaveComponent = exports.ObjectReference = exports.DataFields = void 0;
exports.ReadableStreamParser = exports.Parser = exports.SaveStreamWriter = exports.SaveStreamJsonStringifier = exports.SaveWriter = exports.SaveReader = exports.BlueprintWriter = exports.BlueprintConfigWriter = exports.BlueprintReader = exports.BlueprintConfigReader = exports.ByteWriter = exports.ByteReader = exports.SatisfactorySave = exports.Level = exports.SaveEntity = exports.SaveComponent = exports.ObjectReference = exports.DataFields = void 0;
__exportStar(require("./parser/satisfactory/blueprint/blueprint.types"), exports);

@@ -60,1 +60,3 @@ var DataFields_1 = require("./parser/satisfactory/objects/DataFields");

Object.defineProperty(exports, "Parser", { enumerable: true, get: function () { return parser_1.Parser; } });
var readable_stream_parser_1 = require("./parser/stream/reworked/readable-stream-parser");
Object.defineProperty(exports, "ReadableStreamParser", { enumerable: true, get: function () { return readable_stream_parser_1.ReadableStreamParser; } });

@@ -7,3 +7,3 @@ /// <reference types="node" />

static WriteSave(save: SatisfactorySave, onBinaryBeforeCompressing: (buffer: ArrayBuffer) => void, onHeader: (header: Uint8Array) => void, onChunk: (chunk: Uint8Array) => void): ChunkSummary[];
static ParseSaveFile(file: Buffer, onDecompressedSaveBody?: (buffer: ArrayBuffer) => void, onProgress?: (progress: number, message?: string) => void): SatisfactorySave;
static ParseSaveFile(name: string, file: Buffer, onDecompressedSaveBody?: (buffer: ArrayBuffer) => void, onProgress?: (progress: number, message?: string) => void): SatisfactorySave;
static WriteBlueprintFiles(blueprint: Blueprint, onMainFileBinaryBeforeCompressing?: (binary: ArrayBuffer) => void, onMainFileHeader?: (header: Uint8Array) => void, onMainFileChunk?: (chunk: Uint8Array) => void): {

@@ -10,0 +10,0 @@ mainFileChunkSummary: ChunkSummary[];

@@ -21,6 +21,6 @@ "use strict";

}
static ParseSaveFile(file, onDecompressedSaveBody = () => { }, onProgress = () => { }) {
static ParseSaveFile(name, file, onDecompressedSaveBody = () => { }, onProgress = () => { }) {
const reader = new save_reader_1.SaveReader(new Uint8Array(file).buffer, onProgress);
const header = reader.readHeader();
const save = new satisfactory_save_1.SatisfactorySave(header);
const save = new satisfactory_save_1.SatisfactorySave(name, header);
const inflateResult = reader.inflateChunks();

@@ -27,0 +27,0 @@ onDecompressedSaveBody(reader.getBuffer());

@@ -92,4 +92,4 @@ "use strict";

for (let i = 0; i < countEntities; i++) {
if (i % 1000 === 0) {
onProgressCallback(reader.getBufferProgress());
if (countEntities >= 10000 && i % 10000 === 0) {
onProgressCallback(reader.getBufferProgress(), `read object count [${(i + 1)}/${(countEntities + 1)}] in level ${levelName}`);
}

@@ -96,0 +96,0 @@ const obj = objectsList[i];

@@ -7,2 +7,3 @@ import { ChunkCompressionInfo } from "../../file.types";

export declare class SatisfactorySave {
name: string;
header: SatisfactorySaveHeader;

@@ -14,3 +15,3 @@ gridHash: ByteArray4;

compressionInfo?: ChunkCompressionInfo;
constructor(header: SatisfactorySaveHeader);
constructor(name: string, header: SatisfactorySaveHeader);
}

@@ -5,3 +5,3 @@ "use strict";

class SatisfactorySave {
constructor(header) {
constructor(name, header) {
this.gridHash = [0, 0, 0, 0];

@@ -11,2 +11,3 @@ this.grids = {};

this.trailingCollectedObjects = [];
this.name = name;
this.header = header;

@@ -13,0 +14,0 @@ }

@@ -182,2 +182,5 @@ "use strict";

let levelSingleName = this.readString();
if (i % 500 === 0) {
this.onProgressCallback(this.getBufferProgress(), `reading level [${(i + 1)}/${(levelCount + 1)}] ${levelSingleName}`);
}
levels.push(level_class_1.Level.ReadLevel(this, levelSingleName, this.header.buildVersion));

@@ -184,0 +187,0 @@ }

/// <reference types="node" />
import { WritableStream } from 'stream/web';
export declare class StreamParser {
static ParseSaveFileAsynchronousToOutput(bytes: Uint8Array, outputJson: WritableStream<string>, onDecompressedSaveBody?: (buffer: ArrayBuffer) => void, onProgress?: (progress: number, message?: string) => void): Promise<void>;
static ParseSaveFileToJsonStream(name: string, bytes: Uint8Array, outputJson: WritableStream<string>, onDecompressedSaveBody?: (buffer: ArrayBuffer) => void, onProgress?: (progress: number, message?: string) => void): Promise<void>;
private static readLevelsAsync;
private static ReadNObjectHeaders;
private static ReadNObjects;
}

@@ -5,8 +5,10 @@ "use strict";

const __1 = require("../../..");
const SaveComponent_1 = require("../../satisfactory/objects/SaveComponent");
const SaveEntity_1 = require("../../satisfactory/objects/SaveEntity");
class StreamParser {
static async ParseSaveFileAsynchronousToOutput(bytes, outputJson, onDecompressedSaveBody = () => { }, onProgress = () => { }) {
static async ParseSaveFileToJsonStream(name, bytes, outputJson, onDecompressedSaveBody = () => { }, onProgress = () => { }) {
const reader = new __1.SaveReader(bytes.buffer, onProgress);
const writer = outputJson.getWriter();
const header = reader.readHeader();
const save = new __1.SatisfactorySave(header);
const save = new __1.SatisfactorySave(name, header);
const inflateResult = reader.inflateChunks();

@@ -16,5 +18,6 @@ onDecompressedSaveBody(reader.getBuffer());

const grids = reader.readGrids();
await writer.write(`{"header:" ${JSON.stringify(header)}, "compressionInfo": {}, "gridHash": ${JSON.stringify(gridHash)}, "grids": ${JSON.stringify(grids)}, levels: [`);
await writer.write(`{"header": ${JSON.stringify(header)}, "compressionInfo": ${JSON.stringify(reader.compressionInfo)}, "gridHash": ${JSON.stringify(gridHash)}, "grids": ${JSON.stringify(grids)}, "levels": [`);
await StreamParser.readLevelsAsync(reader, writer, save.header.mapName, save.header.buildVersion);
await writer.write(`]}`);
await writer.ready;
await writer.close();

@@ -24,11 +27,86 @@ }

const levelCount = reader.readInt32();
reader.onProgressCallback(reader.getBufferProgress(), `reading pack of ${levelCount} levels.`);
reader.onProgressCallback(reader.getBufferProgress(), `reading pack of ${levelCount + 1} levels.`);
const batchingSizeOfObjects = 1000;
for (let j = 0; j <= levelCount; j++) {
let levelName = (j === levelCount) ? '' + mapName : reader.readString();
if (j % 300 === 0) {
if (j % 500 === 0) {
reader.onProgressCallback(reader.getBufferProgress(), `reading level [${(j + 1)}/${(levelCount + 1)}] ${levelName}`);
}
await writer.write(`${j > 0 ? ', ' : ''}{"name": "${levelName}", "objects": [`);
const headersBinLen = reader.readInt32();
const unk = reader.readInt32();
const posBeforeHeaders = reader.getBufferPosition();
const afterAllHeaders = posBeforeHeaders + headersBinLen;
let countObjectHeaders = reader.readInt32();
let totalReadObjects = 0;
let afterHeadersOfBatch = reader.getBufferPosition();
let afterObjectsOfBatch = -1;
do {
reader.skipBytes(afterHeadersOfBatch - reader.getBufferPosition());
const objectCountToRead = Math.min(countObjectHeaders - totalReadObjects, batchingSizeOfObjects);
const objects = StreamParser.ReadNObjectHeaders(reader, objectCountToRead);
afterHeadersOfBatch = reader.getBufferPosition();
if (totalReadObjects === 0) {
reader.skipBytes(afterAllHeaders - reader.getBufferPosition());
const objectContentsBinLen = reader.readInt32();
const unk2 = reader.readInt32();
const posBeforeContents = reader.getBufferPosition();
const countEntities = reader.readInt32();
afterObjectsOfBatch = reader.getBufferPosition();
}
else {
reader.skipBytes(afterObjectsOfBatch - reader.getBufferPosition());
}
StreamParser.ReadNObjects(reader, objectCountToRead, objects, buildVersion);
afterObjectsOfBatch = reader.getBufferPosition();
totalReadObjects += objectCountToRead;
if (countObjectHeaders > 10000 && totalReadObjects % 10000 === 0) {
reader.onProgressCallback(reader.getBufferProgress(), `read object count [${(totalReadObjects + 1)}/${(countObjectHeaders + 1)}] in level ${levelName}`);
}
await writer.write(`${objects.map(obj => JSON.stringify(obj)).join(', ')}`);
} while (totalReadObjects < countObjectHeaders);
await writer.write('], "collectables": [');
const collectables = __1.Level.ReadCollectablesList(reader);
await writer.write(`${collectables.map(obj => JSON.stringify(obj)).join(', ')}`);
await writer.write(']');
await writer.write('}');
}
}
}
StreamParser.ReadNObjectHeaders = (reader, count) => {
let objects = [];
let objectsRead = 0;
for (; objectsRead < count; objectsRead++) {
let obj;
let objectType = reader.readInt32();
switch (objectType) {
case __1.SaveEntity.TypeID:
obj = new __1.SaveEntity('', '', '', '');
__1.SaveEntity.ParseHeader(reader, obj);
break;
case __1.SaveComponent.TypeID:
obj = new __1.SaveComponent('', '', '', '');
__1.SaveComponent.ParseHeader(reader, obj);
break;
default:
throw new Error('Unknown object type' + objectType);
}
objects.push(obj);
}
return objects;
};
StreamParser.ReadNObjects = (reader, count, objects, buildVersion) => {
for (let i = 0; i < count; i++) {
objects[i].saveOrBlueprintIndicator = reader.readInt32();
objects[i].unknownType2 = reader.readInt32();
const binarySize = reader.readInt32();
const before = reader.getBufferPosition();
if ((0, SaveEntity_1.isSaveEntity)(objects[i])) {
__1.SaveEntity.ParseData(objects[i], binarySize, reader, buildVersion, objects[i].typePath);
}
else if ((0, SaveComponent_1.isSaveComponent)(objects[i])) {
__1.SaveComponent.ParseData(objects[i], binarySize, reader, buildVersion, objects[i].typePath);
}
}
};
exports.StreamParser = StreamParser;

@@ -9,18 +9,20 @@ /// <reference types="node" />

private mode;
private tracker;
private formatTracker;
constructor(writer: WritableStreamDefaultWriter<string>);
beginSave(): Promise<void>;
writeHeader(header: SatisfactorySaveHeader): Promise<void>;
writeCompressionInfo(compressionInfo: ChunkCompressionInfo): Promise<void>;
writeGridHash(gridHash: ByteArray4): Promise<void>;
writeGrids(grids: Grids): Promise<void>;
openLevels(): Promise<void>;
openLevel(levelName: string): Promise<void>;
writeObjects(...objects: SaveObject[]): Promise<void>;
switchInLevelToCollectables(): Promise<void>;
writeCollectables(...collectables: ObjectReference[]): Promise<void>;
endLevel(): Promise<void>;
endLevels(): Promise<void>;
endSave(): Promise<void>;
private createExecutionFunction;
beginSave: () => Promise<void>;
writeHeader: (header: SatisfactorySaveHeader) => Promise<void>;
writeCompressionInfo: (compressionInfo: ChunkCompressionInfo) => Promise<void>;
writeGridHash: (gridHash: ByteArray4) => Promise<void>;
writeGrids: (grids: Grids) => Promise<void>;
openLevels: () => Promise<void>;
openLevel: (levelName: string) => Promise<void>;
writeObjects: (...objects: SaveObject[]) => Promise<void>;
switchInLevelToCollectables: (...objects: SaveObject[]) => Promise<void>;
writeCollectables: (...collectables: ObjectReference[]) => Promise<void>;
endLevel: () => Promise<void>;
endLevels: () => Promise<void>;
endSave: () => Promise<void>;
close(): Promise<void>;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SaveStreamWriter = void 0;
class ModeStateTracker {
constructor(mode) {
this.mode = mode;
}
checkIsComingFrom(...allowedPredecessors) {
if (!allowedPredecessors.includes(this.mode)) {
throw new Error(`Wrong order of commands. mode is ${this.mode}. but only ${allowedPredecessors.join(', ')} is/are allowed.`);
}
}
advance(newMode) {
this.mode = newMode;
}
}
class SaveStreamWriter {
constructor(writer) {
this.writer = writer;
this.createExecutionFunction = async (allowedInputModes, fn, targetMode) => {
this.tracker.checkIsComingFrom(...allowedInputModes);
await fn();
this.tracker.advance(targetMode);
};
this.beginSave = () => this.createExecutionFunction(['BEFORE_START'], async () => {
await this.writer.write('{');
}, 'OPENED_SAVE');
this.writeHeader = (header) => this.createExecutionFunction(['OPENED_SAVE'], async () => {
await this.writer.write(`"header": ${JSON.stringify(header)}`);
}, 'FINISHED_HEADER');
this.writeCompressionInfo = (compressionInfo) => this.createExecutionFunction(['FINISHED_HEADER'], async () => {
await this.writer.write(`, "compressionInfo": ${JSON.stringify(compressionInfo)}`);
}, 'WROTE_COMPRESSION_INFO');
this.writeGridHash = (gridHash) => this.createExecutionFunction(['WROTE_COMPRESSION_INFO'], async () => {
await this.writer.write(`, "gridHash": ${JSON.stringify(gridHash)}`);
}, 'WROTE_GRID_HASH');
this.writeGrids = (grids) => this.createExecutionFunction(['WROTE_GRID_HASH'], async () => {
await this.writer.write(`, "grids": ${JSON.stringify(grids)}`);
}, 'WROTE_GRIDS');
this.openLevels = () => this.createExecutionFunction(['WROTE_GRIDS'], async () => {
await this.writer.write(`, "levels": [`);
}, 'OPENED_LEVELS');
this.openLevel = (levelName) => this.createExecutionFunction(['OPENED_LEVELS', 'FINISHED_LEVEL'], async () => {
this.formatTracker.levels.push({
objectCount: 0, collectablesCount: 0
});
const prefix = this.formatTracker.levels.length > 1 ? ', ' : '';
await this.writer.write(`${prefix}{"name": "${levelName}", "objects": [`);
}, 'OPENED_LEVEL');
this.writeObjects = (...objects) => this.createExecutionFunction(['OPENED_LEVEL', 'WROTE_OBJECT'], async () => {
const stringified = objects.map(saveObj => JSON.stringify(saveObj));
for (const obj of stringified) {
await this.writer.write(`${this.formatTracker.levels.at(-1).objectCount >= 1 ? ', ' : ''}${obj}`);
this.formatTracker.levels.at(-1).objectCount++;
}
}, 'WROTE_OBJECT');
this.switchInLevelToCollectables = (...objects) => this.createExecutionFunction(['OPENED_LEVEL', 'WROTE_OBJECT'], async () => {
await this.writer.write(`], "collectables": [`);
}, 'SWITCH_TO_COLLECTABLES');
this.writeCollectables = (...collectables) => this.createExecutionFunction(['SWITCH_TO_COLLECTABLES', 'WROTE_COLLECTABLE'], async () => {
const stringified = collectables.map(coll => JSON.stringify(coll));
for (const obj of stringified) {
await this.writer.write(`${this.formatTracker.levels.at(-1).collectablesCount >= 1 ? ', ' : ''}${obj}`);
this.formatTracker.levels.at(-1).collectablesCount++;
}
}, 'WROTE_COLLECTABLE');
this.endLevel = () => this.createExecutionFunction(['SWITCH_TO_COLLECTABLES', 'WROTE_COLLECTABLE'], async () => {
await this.writer.write(`]}`);
}, 'FINISHED_LEVEL');
this.endLevels = () => this.createExecutionFunction(['OPENED_LEVELS', 'FINISHED_LEVEL'], async () => {
await this.writer.write(`]`);
}, 'FINISHED_LEVELS');
this.endSave = () => this.createExecutionFunction(['FINISHED_LEVELS'], async () => {
await this.writer.write('}');
}, 'FINISHED_SAVE');
this.mode = 'BEFORE_START';
this.tracker = new ModeStateTracker(this.mode);
this.formatTracker = {

@@ -12,105 +82,2 @@ levels: []

}
async beginSave() {
if (this.mode !== 'BEFORE_START') {
throw new Error(`Wrong order of commands. Already opened save. mode is ${this.mode}.`);
}
await this.writer.write('{');
this.mode = 'OPENED_SAVE';
}
async writeHeader(header) {
if (this.mode !== 'OPENED_SAVE') {
throw new Error(`Wrong order of commands. Header has to come after save open. mode is ${this.mode}.`);
}
await this.writer.write(`"header": ${JSON.stringify(header)}`);
this.mode = 'FINISHED_HEADER';
}
async writeCompressionInfo(compressionInfo) {
if (this.mode !== 'FINISHED_HEADER') {
throw new Error(`Wrong order of commands. Compression info has to come after header. mode is ${this.mode}.`);
}
await this.writer.write(`, "compressionInfo": ${JSON.stringify(compressionInfo)}`);
this.mode = 'WROTE_COMPRESSION_INFO';
}
async writeGridHash(gridHash) {
if (this.mode !== 'WROTE_COMPRESSION_INFO') {
throw new Error(`Wrong order of commands. Save hbody hash has to come after compression info. mode is ${this.mode}.`);
}
await this.writer.write(`, "gridHash": ${JSON.stringify(gridHash)}`);
this.mode = 'WROTE_GRID_HASH';
}
async writeGrids(grids) {
if (this.mode !== 'WROTE_GRID_HASH') {
throw new Error(`Wrong order of commands. Grids have to come after save body hash info. mode is ${this.mode}.`);
}
await this.writer.write(`, "grids": ${JSON.stringify(grids)}`);
this.mode = 'WROTE_GRIDS';
}
async openLevels() {
if (this.mode !== 'WROTE_GRIDS') {
throw new Error(`Wrong order of commands. Levels have to come after grids info. mode is ${this.mode}.`);
}
await this.writer.write(`, "levels": [`);
this.mode = 'OPENED_LEVELS';
}
async openLevel(levelName) {
if (this.mode !== 'OPENED_LEVELS' && this.mode !== 'FINISHED_LEVEL') {
throw new Error(`Wrong order of commands. Single level can only come after opening levels array or after finishing single level. mode is ${this.mode}.`);
}
this.formatTracker.levels.push({
objectCount: 0, collectablesCount: 0
});
const prefix = this.formatTracker.levels.length > 1 ? ', ' : '';
await this.writer.write(`${prefix}{"name": "${levelName}", "objects": [`);
this.mode = 'OPENED_LEVEL';
}
async writeObjects(...objects) {
if (this.mode !== 'OPENED_LEVEL' && this.mode !== 'WROTE_OBJECT') {
throw new Error(`Wrong order of commands. An object can not be written into the level if it was not opened or other objects were not written. mode is ${this.mode}.`);
}
const stringified = objects.map(saveObj => JSON.stringify(saveObj));
for (const obj of stringified) {
await this.writer.write(`${this.formatTracker.levels.at(-1).objectCount >= 1 ? ', ' : ''}${obj}`);
this.formatTracker.levels.at(-1).objectCount++;
}
this.mode = 'WROTE_OBJECT';
}
async switchInLevelToCollectables() {
if (this.mode !== 'OPENED_LEVEL' && this.mode !== 'WROTE_OBJECT') {
throw new Error(`Wrong order of commands. The level structure can not be switched to collectables if the level was not opened recently or has not written objects. mode is ${this.mode}.`);
}
await this.writer.write(`], "collectables": [`);
this.mode = 'SWITCH_TO_COLLECTABLES';
}
async writeCollectables(...collectables) {
if (this.mode !== 'SWITCH_TO_COLLECTABLES' && this.mode !== 'WROTE_COLLECTABLE') {
throw new Error(`Wrong order of commands. A collectable can not be written into the level if we did not switch to collectables or other collectables were not written. mode is ${this.mode}.`);
}
const stringified = collectables.map(coll => JSON.stringify(coll));
for (const obj of stringified) {
await this.writer.write(`${this.formatTracker.levels.at(-1).collectablesCount >= 1 ? ', ' : ''}${obj}`);
this.formatTracker.levels.at(-1).collectablesCount++;
}
this.mode = 'WROTE_COLLECTABLE';
}
async endLevel() {
if (this.mode !== 'SWITCH_TO_COLLECTABLES' && this.mode !== 'WROTE_COLLECTABLE') {
throw new Error(`Wrong order of commands. Single level can not be closed if a switch to collectibles or writing of collectible did not happen. mode is ${this.mode}.`);
}
await this.writer.write(`]}`);
this.mode = 'FINISHED_LEVEL';
}
async endLevels() {
if (this.mode !== 'OPENED_LEVELS' && this.mode !== 'FINISHED_LEVEL') {
throw new Error(`Wrong order of commands. Levels can only be closed after opening levels array or after finishing single level. mode is ${this.mode}.`);
}
await this.writer.write(`]`);
this.mode = 'FINISHED_LEVELS';
}
async endSave() {
if (this.mode !== 'FINISHED_LEVELS') {
throw new Error(`Wrong order of commands. Save has to end after levels. mode is ${this.mode}.`);
}
await this.writer.write('}');
this.mode = 'FINISHED_SAVE';
}
async close() {

@@ -117,0 +84,0 @@ return this.writer.close();

{
"name": "@etothepii/satisfactory-file-parser",
"author": "etothepii",
"version": "0.1.24",
"version": "0.1.25",
"description": "A file parser for satisfactory files. Includes save files and blueprint files.",

@@ -6,0 +6,0 @@ "types": "./build/index.d.ts",

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc