Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@etothepii/satisfactory-file-parser

Package Overview
Dependencies
Maintainers
0
Versions
89
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@etothepii/satisfactory-file-parser - npm Package Compare versions

Comparing version 0.4.13 to 0.4.14

build/parser/byte/binary-writable.interface.d.ts

3

build/parser/byte/binary-readable.interface.d.ts
import { BinaryOperable } from "./binary-operable.interface";
export interface BinaryReadable extends BinaryOperable {
debug: boolean;
skipBytes: (count?: number) => void;

@@ -20,4 +19,2 @@ readBytes: (count: number) => Uint8Array;

getBufferProgress: () => number;
allocate: (count: number) => PromiseLike<void>;
getAmountAllocatedLeft: () => number;
}

@@ -7,3 +7,2 @@ import { Alignment } from "./alignment.enum";

alignment: Alignment;
debug: boolean;
protected currentByte: number;

@@ -31,4 +30,2 @@ protected handledByte: number;

readString(): string;
allocate(count: number): Promise<void>;
getAmountAllocatedLeft: () => number;
getBufferPosition: () => number;

@@ -35,0 +32,0 @@ getBufferSlice: (begin: number, end: number | undefined) => ArrayBuffer;

@@ -7,7 +7,5 @@ "use strict";

constructor(fileBuffer, alignment) {
this.debug = false;
this.currentByte = 0;
this.handledByte = 0;
this.maxByte = 0;
this.getAmountAllocatedLeft = () => this.bufferView.byteLength - this.currentByte;
this.getBufferPosition = () => this.currentByte;

@@ -121,9 +119,3 @@ this.getBufferSlice = (begin, end) => this.bufferView.buffer.slice(begin, end);

}
allocate(count) {
if (this.currentByte + count < this.bufferView.byteLength) {
console.warn('tried to allocate but there is not enough data left.');
}
return Promise.resolve();
}
}
exports.ByteReader = ByteReader;
import { Alignment } from "./alignment.enum";
export declare abstract class ByteWriter {
protected alignment: Alignment;
import { BinaryWritable } from './binary-writable.interface';
export declare abstract class ByteWriter implements BinaryWritable {
alignment: Alignment;
protected bufferArray: ArrayBuffer;

@@ -21,3 +22,3 @@ protected bufferView: DataView;

writeUint64(value: bigint): void;
writeFloat(value: number): void;
writeFloat32(value: number): void;
writeDouble(value: number): void;

@@ -28,2 +29,4 @@ writeString(value: string): void;

getBufferSlice: (start: number, end?: number) => ArrayBuffer;
getBufferLength: () => number;
getBufferProgress: () => number;
writeBinarySizeFromPosition(lenIndicatorPos: number, start: number): void;

@@ -30,0 +33,0 @@ protected extendBufferIfNeeded(countNeededBytes: number, factor?: number): void;

@@ -9,2 +9,4 @@ "use strict";

this.getBufferSlice = (start, end) => this.bufferArray.slice(start, end);
this.getBufferLength = () => this.bufferArray.byteLength;
this.getBufferProgress = () => this.currentByte / this.bufferArray.byteLength;
this.alignment = alignment;

@@ -73,3 +75,3 @@ this.bufferArray = new ArrayBuffer(bufferSize);

}
writeFloat(value) {
writeFloat32(value) {
this.extendBufferIfNeeded(4);

@@ -76,0 +78,0 @@ this.bufferView.setFloat32(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);

@@ -32,3 +32,3 @@ "use strict";

for (let i = 0; i < itemTypeCount; i++) {
let indexOrWhat = reader.readInt32();
reader.readInt32();
let itemPathName = reader.readString();

@@ -41,3 +41,3 @@ let itemCount = reader.readInt32();

for (let i = 0; i < recipeCount; i++) {
let indexOrWhat = reader.readInt32();
reader.readInt32();
const recipeName = reader.readString();

@@ -110,3 +110,3 @@ recipeRefs[i] = recipeName;

BlueprintReader.ReadBlueprintObjectContents(reader, objects, 0);
const pos = reader.getBufferPosition();
reader.getBufferPosition();
return objects;

@@ -139,10 +139,10 @@ }

static ParseConfig(reader) {
const unk = reader.readInt32();
const alwaysTwo = reader.readInt32();
const description = reader.readString();
const unk3 = reader.readInt32();
const colorMaybe = util_types_1.col4.ParseRGBA(reader);
const iconID = reader.readInt32();
const color = util_types_1.col4.ParseRGBA(reader);
return {
description,
color: colorMaybe,
iconID: unk3
color,
iconID,
};

@@ -149,0 +149,0 @@ }

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.DataFields = void 0;
const parser_error_1 = require("../../error/parser.error");
const util_types_1 = require("../structs/util.types");

@@ -32,2 +33,3 @@ const ArrayProperty_1 = require("./property/generic/ArrayProperty");

if (length === 0) {
console.warn(`properties length for object ${obj.instanceName} was indicated as 0. Which is suspicious. Skipping object properties.`);
return;

@@ -49,14 +51,12 @@ }

}
let padding = reader.readInt32();
if (padding !== 0) {
}
reader.readInt32();
const remainingLen = length - (reader.getBufferPosition() - start);
obj.specialProperties = DataFields.ParseAdditionalSpecialProperties(reader, typePath, remainingLen);
const end = reader.getBufferPosition();
const newWayOfCalculatingReminingSize = length - (end - start);
if (newWayOfCalculatingReminingSize > 0) {
obj.trailingData = Array.from(reader.readBytes(newWayOfCalculatingReminingSize));
const remainingSize = length - (end - start);
if (remainingSize > 0) {
obj.trailingData = Array.from(reader.readBytes(remainingSize));
}
else if (newWayOfCalculatingReminingSize < 0) {
throw new Error(`Unexpected. Read more bytes than are indicated for this etity. bytes left to read is ${newWayOfCalculatingReminingSize}`);
else if (remainingSize < 0) {
throw new parser_error_1.ParserError('ParserError', `Unexpected. Read more bytes than are indicated for entity ${obj.instanceName}. bytes left to read is ${remainingSize}`);
}

@@ -295,3 +295,3 @@ }

console.warn(`possibly corrupt. Read ${readBytes} for ${propertyType} ${propertyName}, but ${binarySize} were indicated.`);
throw new Error(`possibly corrupt. Read ${readBytes} for ${propertyType} ${propertyName}, but ${binarySize} were indicated.`);
throw new parser_error_1.ParserError('ParserError', `possibly corrupt. Read ${readBytes} bytes for ${propertyType} ${propertyName}, but ${binarySize} bytes were indicated.`);
}

@@ -298,0 +298,0 @@ return currentProperty;

@@ -27,5 +27,5 @@ "use strict";

static SerializeValue(writer, value) {
writer.writeFloat(value);
writer.writeFloat32(value);
}
}
exports.FloatProperty = FloatProperty;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SerializeFINLuaProcessorStateStorage = exports.ReadFINLuaProcessorStateStorage = exports.SerializeFINNetworkTrace = exports.ReadFINNetworkTrace = exports.SerializeDynamicStructData = exports.ParseDynamicStructData = exports.StructProperty = void 0;
const parser_error_1 = require("../../../../error/parser.error");
const util_types_1 = require("../../../structs/util.types");

@@ -99,2 +100,3 @@ const DataFields_1 = require("../../DataFields");

case 'InventoryItem':
const before = reader.getBufferPosition();
value = {

@@ -117,2 +119,8 @@ unk1: reader.readInt32(),

}
const bytesLeft = size - (reader.getBufferPosition() - before);
if (bytesLeft === 0 || (bytesLeft === 4 && reader.readInt32() === 0)) {
}
else {
throw new parser_error_1.CorruptSaveError(`save may be corrupt. InventoryItem has weird format that was not seen so far and therefore not implemented. Could be that the save is ported from way before U8.`);
}
break;

@@ -193,4 +201,4 @@ case 'FluidBox':

writer.writeString(value.instanceName);
writer.writeFloat(value.offset);
writer.writeFloat(value.forward);
writer.writeFloat32(value.offset);
writer.writeFloat32(value.forward);
break;

@@ -225,6 +233,5 @@ case 'TimerHandle':

break;
break;
case 'FluidBox':
value = value;
writer.writeFloat(value.value);
writer.writeFloat32(value.value);
break;

@@ -231,0 +238,0 @@ case 'SlateBrush':

@@ -17,3 +17,3 @@ import { BinaryReadable } from "../../byte/binary-readable.interface";

trailingData: number[];
saveOrBlueprintIndicator: number;
objectVersion: number;
unknownType2: number;

@@ -20,0 +20,0 @@ constructor(typePath: string, rootObject: string, instanceName: string);

@@ -13,3 +13,3 @@ "use strict";

this.trailingData = [];
this.saveOrBlueprintIndicator = 0;
this.objectVersion = 0;
this.unknownType2 = 0;

@@ -16,0 +16,0 @@ }

@@ -12,3 +12,2 @@ import { BinaryReadable } from "../../byte/binary-readable.interface";

collectables: ObjectReference[];
remainingStuffAfterHeaders: Uint8Array;
constructor(name: string);

@@ -22,3 +21,3 @@ static SerializeObjectHeaders(writer: ByteWriter, objects: (SaveEntity | SaveComponent)[]): void;

static SerializeCollectablesList(writer: ByteWriter, collectables: ObjectReference[]): void;
static ReadCollectablesList(reader: BinaryReadable, printSmthWhenItsCollectables?: string | undefined): ObjectReference[];
static ReadCollectablesList(reader: BinaryReadable): ObjectReference[];
}

@@ -12,3 +12,2 @@ "use strict";

this.collectables = [];
this.remainingStuffAfterHeaders = new Uint8Array();
}

@@ -34,8 +33,7 @@ static SerializeObjectHeaders(writer, objects) {

static WriteLevel(writer, level, buildVersion) {
const lenIndicatorHeaderAndCollectableSize = writer.getBufferPosition();
const lenIndicatorHeaderAndDestroyedEntitiesSize = writer.getBufferPosition();
writer.writeInt32(0);
writer.writeInt32(0);
Level.SerializeObjectHeaders(writer, level.objects);
Level.SerializeCollectablesList(writer, level.collectables);
writer.writeBinarySizeFromPosition(lenIndicatorHeaderAndCollectableSize, lenIndicatorHeaderAndCollectableSize + 8);
writer.writeBinarySizeFromPosition(lenIndicatorHeaderAndDestroyedEntitiesSize, lenIndicatorHeaderAndDestroyedEntitiesSize + 8);
Level.SerializeObjectContents(writer, level.objects, buildVersion, level.name);

@@ -50,3 +48,3 @@ Level.SerializeCollectablesList(writer, level.collectables);

for (const obj of objects) {
writer.writeInt32(obj.saveOrBlueprintIndicator);
writer.writeInt32(obj.objectVersion);
writer.writeInt32(obj.unknownType2);

@@ -99,3 +97,3 @@ const lenReplacementPosition = writer.getBufferPosition();

const obj = objectsList[i];
obj.saveOrBlueprintIndicator = reader.readInt32();
obj.objectVersion = reader.readInt32();
obj.unknownType2 = reader.readInt32();

@@ -124,3 +122,3 @@ const binarySize = reader.readInt32();

if (remainingSize > 0) {
const doubledCollectablesIgnored = Level.ReadCollectablesList(reader, 'collectables at the header section! ' + levelName);
const doubledCollectablesIgnored = Level.ReadCollectablesList(reader);
}

@@ -143,3 +141,3 @@ else {

reader.onProgressCallback(reader.getBufferProgress());
level.collectables = Level.ReadCollectablesList(reader, 'collectables 2! ' + levelName);
level.collectables = Level.ReadCollectablesList(reader);
return level;

@@ -153,10 +151,7 @@ }

}
static ReadCollectablesList(reader, printSmthWhenItsCollectables) {
static ReadCollectablesList(reader) {
const collected = [];
let countSmthing = reader.readInt32();
if (countSmthing > 0) {
for (let i = 0; i < countSmthing; i++) {
const collectable = ObjectReference_1.ObjectReference.read(reader);
collected.push(collectable);
}
let count = reader.readInt32();
for (let i = 0; i < count; i++) {
collected.push(ObjectReference_1.ObjectReference.read(reader));
}

@@ -163,0 +158,0 @@ return collected;

@@ -170,8 +170,12 @@ "use strict";

}
if (this.header.saveVersion < 29) {
const roughSaveVersion = SaveReader.getRoughSaveVersion(this.header.saveVersion, this.header.saveHeaderType);
if (roughSaveVersion === '<U6') {
throw new parser_error_1.UnsupportedVersionError('Game Version < U6 is not supported.');
}
if (this.header.saveHeaderType < 13) {
throw new parser_error_1.UnsupportedVersionError('Game Version < U8 is not supported in this package version. Consider downgrading to the latest package version supporting it, which is 0.0.34');
else if (roughSaveVersion === 'U6/U7') {
throw new parser_error_1.UnsupportedVersionError('Game Version U6/U7 is not supported in this package version. Consider downgrading to the latest package version supporting it, which is 0.0.34');
}
else if (roughSaveVersion === 'U8') {
throw new parser_error_1.UnsupportedVersionError('Game Version U8 is not supported in this package version. Consider downgrading to the latest package version supporting it, which is 0.3.7');
}
const levels = [];

@@ -188,3 +192,3 @@ const levelCount = this.readInt32();

levels.push(level_class_1.Level.ReadLevel(this, this.header.mapName, this.header.buildVersion));
const trailingStuffToIgnore = level_class_1.Level.ReadCollectablesList(this, 'collectables 6!');
const trailingStuffToIgnore = level_class_1.Level.ReadCollectablesList(this);
this.onProgressCallback(this.getBufferProgress(), 'finished parsing.');

@@ -191,0 +195,0 @@ return levels;

@@ -32,26 +32,1 @@ import { MD5Hash } from '../objects/ue/MD5Hash';

export type RoughSaveVersion = '<U6' | 'U6/U7' | 'U8' | 'U1.0+';
export type ResourceNodeEntry = {
purity: string;
pathName: string;
position: {
x: number;
y: number;
z: number;
};
type: string;
};
export type CollectibleEntry = {
pathName: string;
position: {
x: number;
y: number;
z: number;
};
type: string;
};
export type StaticData = {
collectibles: CollectibleEntry[];
artifacts: CollectibleEntry[];
slugs: CollectibleEntry[];
resourceNodes: ResourceNodeEntry[];
};

@@ -7,6 +7,6 @@ "use strict";

col4.SerializeRGBA = (writer, value) => {
writer.writeFloat(value.r);
writer.writeFloat(value.g);
writer.writeFloat(value.b);
writer.writeFloat(value.a);
writer.writeFloat32(value.r);
writer.writeFloat32(value.g);
writer.writeFloat32(value.b);
writer.writeFloat32(value.a);
};

@@ -56,3 +56,3 @@ col4.ParseRGBA = (reader) => {

vec3.SerializeF(writer, vec);
writer.writeFloat(vec.w);
writer.writeFloat32(vec.w);
};

@@ -92,3 +92,3 @@ })(vec4 || (exports.vec4 = vec4 = {}));

vec2.SerializeF(writer, vec);
writer.writeFloat(vec.z);
writer.writeFloat32(vec.z);
};

@@ -120,4 +120,4 @@ vec3.sub = (other, vec) => ({ x: other.x - vec.x, y: other.y - vec.y, z: other.z - vec.z });

vec2.SerializeF = (writer, vec) => {
writer.writeFloat(vec.x);
writer.writeFloat(vec.y);
writer.writeFloat32(vec.x);
writer.writeFloat32(vec.y);
};

@@ -124,0 +124,0 @@ })(vec2 || (exports.vec2 = vec2 = {}));

@@ -10,4 +10,5 @@ import { ReadableStream } from "stream/web";

private static WriteLevels;
private static ReadDestroyedEntityReferences;
private static ReadNObjectHeaders;
private static ReadNObjects;
}

@@ -6,4 +6,6 @@ "use strict";

const web_1 = require("stream/web");
const parser_error_1 = require("../../error/parser.error");
const SaveComponent_1 = require("../../satisfactory/objects/SaveComponent");
const SaveEntity_1 = require("../../satisfactory/objects/SaveEntity");
const ObjectReference_1 = require("../../satisfactory/objects/values/ObjectReference");
const level_class_1 = require("../../satisfactory/save/level.class");

@@ -61,3 +63,3 @@ const satisfactory_save_1 = require("../../satisfactory/save/satisfactory-save");

const headersBinLen = reader.readInt32();
const unk = reader.readInt32();
reader.readInt32();
const posBeforeHeaders = reader.getBufferPosition();

@@ -157,2 +159,12 @@ const afterAllHeaders = posBeforeHeaders + headersBinLen;

const save = new satisfactory_save_1.SatisfactorySave(name, header);
const roughSaveVersion = save_reader_1.SaveReader.getRoughSaveVersion(header.saveVersion, header.saveHeaderType);
if (roughSaveVersion === '<U6') {
throw new parser_error_1.UnsupportedVersionError('Game Version < U6 is not supported.');
}
else if (roughSaveVersion === 'U6/U7') {
throw new parser_error_1.UnsupportedVersionError('Game Version U6/U7 is not supported in this package version. Consider downgrading to the latest package version supporting it, which is 0.0.34');
}
else if (roughSaveVersion === 'U8') {
throw new parser_error_1.UnsupportedVersionError('Game Version U8 is not supported in this package version. Consider downgrading to the latest package version supporting it, which is 0.3.7');
}
const inflateResult = reader.inflateChunks();

@@ -172,2 +184,11 @@ onDecompressedSaveBody(reader.getBuffer());

};
ReadableStreamParser.ReadDestroyedEntityReferences = (reader) => {
const destroyedEntitiesCount = reader.readInt32();
const destroyedEntities = [];
for (let i = 0; i < destroyedEntitiesCount; i++) {
const destroyed = ObjectReference_1.ObjectReference.read(reader);
destroyedEntities.push(destroyed);
}
return destroyedEntities;
};
ReadableStreamParser.ReadNObjectHeaders = (reader, count) => {

@@ -189,3 +210,3 @@ let objects = [];

default:
throw new Error('Unknown object type' + objectType);
throw new parser_error_1.CorruptSaveError('Unknown object type' + objectType);
}

@@ -198,3 +219,3 @@ objects.push(obj);

for (let i = 0; i < count; i++) {
objects[i].saveOrBlueprintIndicator = reader.readInt32();
objects[i].objectVersion = reader.readInt32();
objects[i].unknownType2 = reader.readInt32();

@@ -209,3 +230,7 @@ const binarySize = reader.readInt32();

}
const after = reader.getBufferPosition();
if (after - before !== binarySize) {
throw new parser_error_1.CorruptSaveError(`Could not read entity ${objects[i].instanceName}, as ${after - before} bytes were read, but ${binarySize} bytes were indicated.`);
}
}
};
{
"name": "@etothepii/satisfactory-file-parser",
"author": "etothepii",
"version": "0.4.13",
"version": "0.4.14",
"description": "A file parser for satisfactory files. Includes save files and blueprint files.",

@@ -6,0 +6,0 @@ "types": "./build/index.d.ts",

@@ -12,3 +12,3 @@ # Satisfactory File Parser

## Supported Versions
The version support of the packages is indicated below.
The version support of the packages is indicated below. Some bugs might still be present, see Bug Reporting further down.

@@ -22,4 +22,6 @@ Game Version Files of U5 and below are NOT supported.

| U8 | ✅ 0.1.20 - 0.3.7 |
| U1.0 | ✅ >= 0.4.13 |
| U1.0 | ✅ >= 0.4.14 |
## Changelog
The Changelog is alongside the readme [here](./CHANGELOG.md)

@@ -31,19 +33,28 @@ ## Installation via npm

I recommend parsing via stream, to save RAM. The binary data of the whole save will still be in memory, but the converted JSON won't.
The returned `stream` is a readable WHATWG stream of type string.
I recommend parsing via stream, to save RAM. The binary data of the whole save will still be in memory, but the converted JSON can be streamed. (You can of course keep reading the stream in memory).
The returned `stream` is a readable WHATWG stream of type string and represents a `SatisfactorySave` object. this object can be serialized again.
WHATWG is used by default by browsers. Node js can use them using `Writable.toWeb()` and `Writable.fromWeb()` for example.
```js
const jsonFileStream = fs.createWriteStream(outJsonPath, { highWaterMark: 1024 * 1024 * 200 }); // your outgoing JSON stream. In this case directly to file.
const whatwgWriteStream = Writable.toWeb(outJsonStream) as WritableStream<string>; // convert the file stream to WHATWG-compliant stream
import * as fs from 'fs';
import * as path from 'path';
import { Writable } from 'stream';
import { WritableStream } from 'stream/web';
import { ReadableStreamParser } from '@etothepii/satisfactory-file-parser';
const { stream, startStreaming } = ReadableStreamParser.CreateReadableStreamFromSaveToJson(savename, file, decompressedBody => {
const filepath = path.join(__dirname, 'MySave.sav');
const file = fs.readFileSync(filepath);
const outJsonPath = path.join(__dirname, 'MySave.json');
const jsonFileStream = fs.createWriteStream(outJsonPath, { highWaterMark: 1024 * 1024 * 200 }); // your outgoing JSON stream. In this case directly to file.
const whatwgWriteStream = Writable.toWeb(jsonFileStream) as WritableStream<string>; // convert the file stream to WHATWG-compliant stream
const { stream, startStreaming } = ReadableStreamParser.CreateReadableStreamFromSaveToJson('MySave', file, decompressedBody => {
console.log('on binary body data.');
}, (progress: number, msg: string | undefined) => {
// a callback for reporting progress as number [0,1]. Sometimes has a message.
console.log(`${new Date().toString()}: progress`, progress, msg);
console.log(`progress`, progress, msg);
});
stream.pipeTo(whatwgWriteStream);
whatwgWriteStream.on('close', () => {
jsonFileStream.on('close', () => {
// write stream finished

@@ -61,10 +72,15 @@ });

import * as fs from 'fs';
import * as path from 'path';
import { Parser } from "@etothepii/satisfactory-file-parser";
let header: Uint8Array, bodyChunks: Uint8Array[] = [];
Parser.WriteSave(save, binaryBeforeCompressed => {
// save is your SatisfactorySave object to serialize. In this example i read it back in from a json file.
const save = JSON.parse(fs.readFileSync(path.join(__dirname, 'MySave.json'), {encoding: 'utf-8'}));
let fileHeader: Uint8Array;
const bodyChunks: Uint8Array[] = [];
Parser.WriteSave(save, binaryBeforeCompressed => {
console.log('on binary data before being compressed.');
}, header => {
console.log('on save header.');
header = header;
fileHeader = header;
}, chunk => {

@@ -76,7 +92,7 @@ console.log('on save body chunk.');

// write complete sav file back to disk
fs.writeFileSync('./MyModifiedSave.sav', Buffer.concat([header!, ...bodyChunks]));
fs.writeFileSync('./MyModifiedSave.sav', Buffer.concat([fileHeader!, ...bodyChunks]));
```
### Old Usage of Save Parsing. Deprecated.
### Old Save Parsing. Deprecated.
For reading a save file (`.sav`) and parse In-Memory, just pass a Buffer to the parser with the file content.

@@ -88,3 +104,3 @@ ```js

const file = fs.readFileSync('./MySave.sav') as Buffer;
const parsedSave = Parser.ParseSaveFile(file);
const parsedSave = Parser.ParseSaveFile('MySave', file);
```

@@ -94,3 +110,3 @@

## Usage of Blueprint Parsing
Blueprint parsing works very similiar. Note, that blueprints consist of 2 files. The `.sbp` main file and the config file `.sbpcfg`.
Note, that blueprints consist of 2 files. The `.sbp` main file and the config file `.sbpcfg`.

@@ -101,4 +117,4 @@ ```js

const mainFile = fs.readFileSync('./MyBlueprint.sbp') as Buffer;
const configFile = fs.readFileSync('./MyBlueprint.sbpcfg') as Buffer;
const mainFile = fs.readFileSync('./MyBlueprint.sbp');
const configFile = fs.readFileSync('./MyBlueprint.sbpcfg');
const parsedBlueprint = Parser.ParseBlueprintFiles('MyBlueprint', mainFile, configFile);

@@ -112,3 +128,4 @@ ```

let mainFileHeader: Uint8Array, mainFileBodyChunks: Uint8Array[] = [];
let mainFileHeader: Uint8Array;
const mainFileBodyChunks: Uint8Array[] = [];
const summary = Parser.WriteBlueprintFiles(blueprint, mainFileBinaryBeforeCompressed => {

@@ -128,9 +145,13 @@ console.log('on main file binary data before being compressed.');

// write .sbpcfg file back to disk, we get that data from the result of WriteBlueprintFiles
fs.writeFileSync('./MyModifiedSave.sbpcfg', Buffer.from(summary.configFileBinary));
fs.writeFileSync('./MyModifiedBlueprint.sbpcfg', Buffer.from(summary.configFileBinary));
```
### Bug Reports or Feedback
So far this was just a private hobby project. But i figure some people actually use it.
If you find a bug or have feedback about the parser, you can just hit me up on the satisfactory discord `etothepii`.
## License
MIT License
Copyright (c) 2023 etothepii
Copyright (c) 2024 etothepii

@@ -137,0 +158,0 @@ Permission is hereby granted, free of charge, to any person obtaining a copy

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc