Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@etothepii/satisfactory-file-parser

Package Overview
Dependencies
Maintainers
1
Versions
89
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@etothepii/satisfactory-file-parser - npm Package Compare versions

Comparing version 0.0.32 to 0.0.33

9

build/parser/parser.d.ts
/// <reference types="node" />
/// <reference types="node" />
import { TransformStream, WritableStream } from "stream/web";
import { ChunkSummary } from "./file.types";

@@ -8,12 +6,5 @@ import { Blueprint } from "./satisfactory/blueprint/blueprint.types";

import { SaveProjectionConfig } from "./satisfactory/save/save-reader";
export declare class PassthroughWebTransform extends TransformStream<Uint8Array, Uint8Array> {
constructor();
}
export declare class Parser {
static WriteSave(save: SatisfactorySave, onBinaryBeforeCompressing: (buffer: ArrayBuffer) => void, onHeader: (header: Uint8Array) => void, onChunk: (chunk: Uint8Array) => void): ChunkSummary[];
private static ParseSaveFileStream;
private static ParseStreamSave;
private static DecompressStreamSave;
static ParseSaveFile(file: Buffer, onDecompressedSaveBody?: (buffer: ArrayBuffer) => void, onProgress?: (progress: number, message?: string) => void): SatisfactorySave;
static ParseSaveFileAsynchronousToOutput(file: Buffer, outputJson: WritableStream<string>, onDecompressedSaveBody?: (buffer: ArrayBuffer) => void, onProgress?: (progress: number, message?: string) => void): Promise<SatisfactorySave>;
static WriteBlueprintFiles(blueprint: Blueprint, onMainFileBinaryBeforeCompressing?: (binary: ArrayBuffer) => void, onMainFileHeader?: (header: Uint8Array) => void, onMainFileChunk?: (chunk: Uint8Array) => void): {

@@ -20,0 +11,0 @@ mainFileChunkSummary: ChunkSummary[];

102

build/parser/parser.js

@@ -7,3 +7,3 @@ (function (factory) {

else if (typeof define === "function" && define.amd) {
define(["require", "exports", "stream/web", "./satisfactory/blueprint/blueprint-reader", "./satisfactory/blueprint/blueprint-writer", "./satisfactory/save/level.class", "./satisfactory/save/satisfactory-save", "./satisfactory/save/save-reader", "./satisfactory/save/save-writer", "./stream/save-stream-reader.class", "./stream/save-stream-writer.class"], factory);
define(["require", "exports", "./satisfactory/blueprint/blueprint-reader", "./satisfactory/blueprint/blueprint-writer", "./satisfactory/save/level.class", "./satisfactory/save/satisfactory-save", "./satisfactory/save/save-reader", "./satisfactory/save/save-writer"], factory);
}

@@ -13,4 +13,3 @@ })(function (require, exports) {

Object.defineProperty(exports, "__esModule", { value: true });
exports.Parser = exports.PassthroughWebTransform = void 0;
const web_1 = require("stream/web");
exports.Parser = void 0;
const blueprint_reader_1 = require("./satisfactory/blueprint/blueprint-reader");

@@ -22,20 +21,2 @@ const blueprint_writer_1 = require("./satisfactory/blueprint/blueprint-writer");

const save_writer_1 = require("./satisfactory/save/save-writer");
const save_stream_reader_class_1 = require("./stream/save-stream-reader.class");
const save_stream_writer_class_1 = require("./stream/save-stream-writer.class");
class PassthroughWebTransform extends web_1.TransformStream {
constructor() {
super({
start: (controller) => {
},
transform: async (chunk, controller) => {
chunk = await chunk;
controller.enqueue(chunk);
},
flush: (controller) => {
console.log('on passthrough dispose.');
}
});
}
}
exports.PassthroughWebTransform = PassthroughWebTransform;
class Parser {

@@ -51,64 +32,2 @@ static WriteSave(save, onBinaryBeforeCompressing, onHeader, onChunk) {

}
static async ParseSaveFileStream(input, output, binaryOutput) {
return new Promise(async (resolve, reject) => {
if (input.locked || output.locked || (binaryOutput !== undefined ? binaryOutput.locked : false)) {
throw new Error('One of the input/output streams is locked. Aborting save parsing.');
}
const binaryOutWriter = binaryOutput ? binaryOutput.getWriter() : undefined;
const writer = new save_stream_writer_class_1.SaveStreamWriter(output.getWriter());
const reader = new save_stream_reader_class_1.SaveStreamReader(input.getReader(), 500 * 1000 * 1000);
const passthrough = new PassthroughWebTransform();
const passthroughWriter = passthrough.writable.getWriter();
const passthroughReader = new save_stream_reader_class_1.SaveStreamReader(passthrough.readable.getReader(), 3000 * 1000 * 1000);
await writer.beginSave();
const header = await reader.readHeader();
await writer.writeHeader(header);
let contentParsingPromise;
const decompressionPromise = Parser.DecompressStreamSave(reader, writer, passthroughWriter, async () => {
console.log('start streaming content');
passthroughReader.debug = true;
contentParsingPromise = await Parser.ParseStreamSave(passthroughReader, writer, header);
await writer.close();
return resolve();
}, binaryOutWriter);
});
}
static async ParseStreamSave(reader, jsonWriter, header) {
return new Promise(async (resolve, reject) => {
reader.onCloseCallback = async () => {
console.log('content reader closes.');
await jsonWriter.endLevels();
await jsonWriter.endSave();
return resolve();
};
console.log('before open levels');
await jsonWriter.openLevels();
const dataLength = await reader.readInt32();
await reader.allocate(Math.min(1000, dataLength));
console.log(`incoming body size should be ${dataLength} bytes big.`);
await reader.streamLevelsToOutput(jsonWriter, header);
await reader.close();
});
}
static async DecompressStreamSave(reader, jsonWriter, outputWriter, onDecompressionInfoWritten, debugAdditionalBinaryOutputWriter) {
return new Promise(async (resolve, reject) => {
reader.onCloseCallback = async () => {
console.log('decompress reader closes.');
return resolve();
};
let first = true;
while (reader.hasInput()) {
const decompressedChunk = await reader.readBodyChunk();
if (first && reader.compressionInfo) {
await jsonWriter.writeCompressionInfo(reader.compressionInfo);
onDecompressionInfoWritten();
first = false;
}
if (debugAdditionalBinaryOutputWriter) {
debugAdditionalBinaryOutputWriter.write(decompressedChunk);
}
outputWriter.write(decompressedChunk);
}
});
}
static ParseSaveFile(file, onDecompressedSaveBody = () => { }, onProgress = () => { }) {

@@ -126,19 +45,2 @@ const reader = new save_reader_1.SaveReader(new Uint8Array(file).buffer, onProgress);

}
static async ParseSaveFileAsynchronousToOutput(file, outputJson, onDecompressedSaveBody = () => { }, onProgress = () => { }) {
const reader = new save_reader_1.SaveReader(new Uint8Array(file).buffer, onProgress);
const writer = new save_stream_writer_class_1.SaveStreamWriter(outputJson.getWriter());
await writer.beginSave();
const header = reader.readHeader();
const save = new satisfactory_save_1.SatisfactorySave(header);
await writer.writeHeader(save.header);
await writer.writeCompressionInfo({});
const inflateResult = reader.inflateChunks();
onDecompressedSaveBody(reader.getBuffer());
const levelParseResult = await reader.readLevelsAsynchronously(writer);
save.levels = reader.levels;
save.compressionInfo = reader.compressionInfo;
save.trailingCollectedObjects = reader.trailingCollectedObjects;
await writer.endSave();
return save;
}
static WriteBlueprintFiles(blueprint, onMainFileBinaryBeforeCompressing = () => { }, onMainFileHeader = () => { }, onMainFileChunk = () => { }) {

@@ -145,0 +47,0 @@ const blueprintWriter = new blueprint_writer_1.BlueprintWriter();

@@ -126,3 +126,2 @@ (function (factory) {

level.collectables = Level.ReadCollectablesList(reader);
console.log(`before object contents buffer pos ${reader.getBufferPosition()}`);
Level.ReadObjectContents(reader, level.objects, buildVersion, reader.onProgressCallback);

@@ -129,0 +128,0 @@ reader.onProgressCallback(reader.getBufferProgress());

2

package.json
{
"name": "@etothepii/satisfactory-file-parser",
"author": "etothepii",
"version": "0.0.32",
"version": "0.0.33",
"description": "A file parser for satisfactory files. Includes save files and blueprint files.",

@@ -6,0 +6,0 @@ "types": "./build/index.d.ts",

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc