Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@etothepii/satisfactory-file-parser

Package Overview
Dependencies
Maintainers
1
Versions
89
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@etothepii/satisfactory-file-parser - npm Package Compare versions

Comparing version 0.1.25 to 0.1.26

85

build/index.js

@@ -1,60 +0,25 @@

"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ReadableStreamParser = exports.Parser = exports.SaveStreamWriter = exports.SaveStreamJsonStringifier = exports.SaveWriter = exports.SaveReader = exports.BlueprintWriter = exports.BlueprintConfigWriter = exports.BlueprintReader = exports.BlueprintConfigReader = exports.ByteWriter = exports.ByteReader = exports.SatisfactorySave = exports.Level = exports.SaveEntity = exports.SaveComponent = exports.ObjectReference = exports.DataFields = void 0;
__exportStar(require("./parser/satisfactory/blueprint/blueprint.types"), exports);
var DataFields_1 = require("./parser/satisfactory/objects/DataFields");
Object.defineProperty(exports, "DataFields", { enumerable: true, get: function () { return DataFields_1.DataFields; } });
__exportStar(require("./parser/satisfactory/objects/GUIDInfo"), exports);
var ObjectReference_1 = require("./parser/satisfactory/objects/ObjectReference");
Object.defineProperty(exports, "ObjectReference", { enumerable: true, get: function () { return ObjectReference_1.ObjectReference; } });
__exportStar(require("./parser/satisfactory/objects/Property"), exports);
var SaveComponent_1 = require("./parser/satisfactory/objects/SaveComponent");
Object.defineProperty(exports, "SaveComponent", { enumerable: true, get: function () { return SaveComponent_1.SaveComponent; } });
var SaveEntity_1 = require("./parser/satisfactory/objects/SaveEntity");
Object.defineProperty(exports, "SaveEntity", { enumerable: true, get: function () { return SaveEntity_1.SaveEntity; } });
__exportStar(require("./parser/satisfactory/objects/ue/GUID"), exports);
__exportStar(require("./parser/satisfactory/objects/ue/MD5Hash"), exports);
var level_class_1 = require("./parser/satisfactory/save/level.class");
Object.defineProperty(exports, "Level", { enumerable: true, get: function () { return level_class_1.Level; } });
var satisfactory_save_1 = require("./parser/satisfactory/save/satisfactory-save");
Object.defineProperty(exports, "SatisfactorySave", { enumerable: true, get: function () { return satisfactory_save_1.SatisfactorySave; } });
__exportStar(require("./parser/satisfactory/save/save.types"), exports);
var byte_reader_class_1 = require("./parser/byte/byte-reader.class");
Object.defineProperty(exports, "ByteReader", { enumerable: true, get: function () { return byte_reader_class_1.ByteReader; } });
var byte_writer_class_1 = require("./parser/byte/byte-writer.class");
Object.defineProperty(exports, "ByteWriter", { enumerable: true, get: function () { return byte_writer_class_1.ByteWriter; } });
var blueprint_reader_1 = require("./parser/satisfactory/blueprint/blueprint-reader");
Object.defineProperty(exports, "BlueprintConfigReader", { enumerable: true, get: function () { return blueprint_reader_1.BlueprintConfigReader; } });
Object.defineProperty(exports, "BlueprintReader", { enumerable: true, get: function () { return blueprint_reader_1.BlueprintReader; } });
var blueprint_writer_1 = require("./parser/satisfactory/blueprint/blueprint-writer");
Object.defineProperty(exports, "BlueprintConfigWriter", { enumerable: true, get: function () { return blueprint_writer_1.BlueprintConfigWriter; } });
Object.defineProperty(exports, "BlueprintWriter", { enumerable: true, get: function () { return blueprint_writer_1.BlueprintWriter; } });
var save_reader_1 = require("./parser/satisfactory/save/save-reader");
Object.defineProperty(exports, "SaveReader", { enumerable: true, get: function () { return save_reader_1.SaveReader; } });
var save_writer_1 = require("./parser/satisfactory/save/save-writer");
Object.defineProperty(exports, "SaveWriter", { enumerable: true, get: function () { return save_writer_1.SaveWriter; } });
__exportStar(require("./parser/satisfactory/structs/util.types"), exports);
var save_stream_json_stringifier_1 = require("./parser/stream/save-stream-json-stringifier");
Object.defineProperty(exports, "SaveStreamJsonStringifier", { enumerable: true, get: function () { return save_stream_json_stringifier_1.SaveStreamJsonStringifier; } });
var save_stream_writer_class_1 = require("./parser/stream/save-stream-writer.class");
Object.defineProperty(exports, "SaveStreamWriter", { enumerable: true, get: function () { return save_stream_writer_class_1.SaveStreamWriter; } });
__exportStar(require("./parser/error/parser.error"), exports);
__exportStar(require("./parser/file.types"), exports);
var parser_1 = require("./parser/parser");
Object.defineProperty(exports, "Parser", { enumerable: true, get: function () { return parser_1.Parser; } });
var readable_stream_parser_1 = require("./parser/stream/reworked/readable-stream-parser");
Object.defineProperty(exports, "ReadableStreamParser", { enumerable: true, get: function () { return readable_stream_parser_1.ReadableStreamParser; } });
export * from './parser/satisfactory/blueprint/blueprint.types';
export { DataFields } from './parser/satisfactory/objects/DataFields';
export * from './parser/satisfactory/objects/GUIDInfo';
export { ObjectReference } from './parser/satisfactory/objects/ObjectReference';
export * from './parser/satisfactory/objects/Property';
export { SaveComponent } from './parser/satisfactory/objects/SaveComponent';
export { SaveEntity } from './parser/satisfactory/objects/SaveEntity';
export * from './parser/satisfactory/objects/ue/GUID';
export * from './parser/satisfactory/objects/ue/MD5Hash';
export { Level } from './parser/satisfactory/save/level.class';
export { SatisfactorySave } from './parser/satisfactory/save/satisfactory-save';
export * from './parser/satisfactory/save/save.types';
export { ByteReader } from './parser/byte/byte-reader.class';
export { ByteWriter } from './parser/byte/byte-writer.class';
export { BlueprintConfigReader, BlueprintReader } from './parser/satisfactory/blueprint/blueprint-reader';
export { BlueprintConfigWriter, BlueprintWriter } from './parser/satisfactory/blueprint/blueprint-writer';
export { SaveReader } from './parser/satisfactory/save/save-reader';
export { SaveWriter } from './parser/satisfactory/save/save-writer';
export * from './parser/satisfactory/structs/util.types';
export { SaveStreamJsonStringifier } from './parser/stream/save-stream-json-stringifier';
export { SaveStreamWriter } from './parser/stream/save-stream-writer.class';
export * from './parser/error/parser.error';
export * from './parser/file.types';
export { Parser } from './parser/parser';
export { ReadableStreamParser } from './parser/stream/reworked/readable-stream-parser';

@@ -1,8 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Alignment = void 0;
var Alignment;
export var Alignment;
(function (Alignment) {
Alignment[Alignment["BIG_ENDIAN"] = 0] = "BIG_ENDIAN";
Alignment[Alignment["LITTLE_ENDIAN"] = 1] = "LITTLE_ENDIAN";
})(Alignment = exports.Alignment || (exports.Alignment = {}));
})(Alignment || (Alignment = {}));

@@ -1,2 +0,1 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
export {};

@@ -1,2 +0,1 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
export {};

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ByteReader = void 0;
const alignment_enum_1 = require("./alignment.enum");
class ByteReader {
import { Alignment } from "./alignment.enum";
export class ByteReader {
constructor(fileBuffer, alignment) {

@@ -52,3 +49,3 @@ this.debug = false;

readInt16() {
let data = this.bufferView.getInt16(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.bufferView.getInt16(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 2;

@@ -58,3 +55,3 @@ return data;

readUint16() {
let data = this.bufferView.getUint16(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.bufferView.getUint16(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 2;

@@ -64,3 +61,3 @@ return data;

readInt32() {
let data = this.bufferView.getInt32(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.bufferView.getInt32(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 4;

@@ -70,3 +67,3 @@ return data;

readUint32() {
let data = this.bufferView.getUint32(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.bufferView.getUint32(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 4;

@@ -76,3 +73,3 @@ return data;

readLong() {
let data = this.bufferView.getBigInt64(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.bufferView.getBigInt64(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 8;

@@ -85,3 +82,3 @@ return data;

readUint64() {
let data = this.bufferView.getBigUint64(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.bufferView.getBigUint64(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 8;

@@ -91,3 +88,3 @@ return data;

readFloat32() {
let data = this.bufferView.getFloat32(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.bufferView.getFloat32(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 4;

@@ -97,3 +94,3 @@ return data;

readDouble() {
let data = this.bufferView.getFloat64(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.bufferView.getFloat64(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 8;

@@ -135,2 +132,1 @@ return data;

}
exports.ByteReader = ByteReader;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ByteWriter = void 0;
const alignment_enum_1 = require("./alignment.enum");
import { Alignment } from "./alignment.enum";
class ByteWriter {

@@ -44,3 +41,3 @@ constructor(alignment, bufferSize = 500) {

this.extendBufferIfNeeded(2);
this.bufferView.setInt16(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.bufferView.setInt16(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 2;

@@ -50,3 +47,3 @@ }

this.extendBufferIfNeeded(2);
this.bufferView.setUint16(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.bufferView.setUint16(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 2;

@@ -56,3 +53,3 @@ }

this.extendBufferIfNeeded(4);
this.bufferView.setInt32(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.bufferView.setInt32(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 4;

@@ -62,3 +59,3 @@ }

this.extendBufferIfNeeded(4);
this.bufferView.setUint32(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.bufferView.setUint32(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 4;

@@ -68,3 +65,3 @@ }

this.extendBufferIfNeeded(8);
this.bufferView.setBigInt64(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.bufferView.setBigInt64(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 8;

@@ -74,3 +71,3 @@ }

this.extendBufferIfNeeded(8);
this.bufferView.setBigUint64(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.bufferView.setBigUint64(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 8;

@@ -80,3 +77,3 @@ }

this.extendBufferIfNeeded(4);
this.bufferView.setFloat32(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.bufferView.setFloat32(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 4;

@@ -86,3 +83,3 @@ }

this.extendBufferIfNeeded(8);
this.bufferView.setFloat64(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.bufferView.setFloat64(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 8;

@@ -147,2 +144,2 @@ }

ByteWriter.IsASCIICompatible = (value) => /^[\x00-\x7F]*$/.test(value);
exports.ByteWriter = ByteWriter;
export { ByteWriter };

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.UnimplementedError = exports.TimeoutError = exports.CompressionLibraryError = exports.CorruptSaveError = exports.UnsupportedVersionError = exports.ParserError = void 0;
class ParserError extends Error {
export class ParserError extends Error {
constructor(name, message) {

@@ -10,4 +7,3 @@ super(message);

}
exports.ParserError = ParserError;
class UnsupportedVersionError extends ParserError {
export class UnsupportedVersionError extends ParserError {
constructor(message) {

@@ -17,4 +13,3 @@ super('UnsupportedVersionError', message ?? 'This save version is not supported.');

}
exports.UnsupportedVersionError = UnsupportedVersionError;
class CorruptSaveError extends ParserError {
export class CorruptSaveError extends ParserError {
constructor(message) {

@@ -24,4 +19,3 @@ super('CorruptSaveError', message ?? 'This save data is most likely corrupt.');

}
exports.CorruptSaveError = CorruptSaveError;
class CompressionLibraryError extends ParserError {
export class CompressionLibraryError extends ParserError {
constructor(message) {

@@ -31,4 +25,3 @@ super('CompressionLibraryError', message ?? 'Failed to compress/decompress save data.');

}
exports.CompressionLibraryError = CompressionLibraryError;
class TimeoutError extends ParserError {
export class TimeoutError extends ParserError {
constructor(message) {

@@ -38,4 +31,3 @@ super('TimeoutError', message ?? 'Operation timed out.');

}
exports.TimeoutError = TimeoutError;
class UnimplementedError extends ParserError {
export class UnimplementedError extends ParserError {
constructor(message) {

@@ -45,2 +37,1 @@ super('UnimplementedError', message ?? 'Unimplemented Operation.');

}
exports.UnimplementedError = UnimplementedError;

@@ -1,7 +0,4 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CompressionAlgorithmCode = void 0;
var CompressionAlgorithmCode;
export var CompressionAlgorithmCode;
(function (CompressionAlgorithmCode) {
CompressionAlgorithmCode[CompressionAlgorithmCode["ZLIB"] = 3] = "ZLIB";
})(CompressionAlgorithmCode = exports.CompressionAlgorithmCode || (exports.CompressionAlgorithmCode = {}));
})(CompressionAlgorithmCode || (CompressionAlgorithmCode = {}));

@@ -1,17 +0,14 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Parser = void 0;
const blueprint_reader_1 = require("./satisfactory/blueprint/blueprint-reader");
const blueprint_writer_1 = require("./satisfactory/blueprint/blueprint-writer");
const satisfactory_save_1 = require("./satisfactory/save/satisfactory-save");
const save_reader_1 = require("./satisfactory/save/save-reader");
const save_writer_1 = require("./satisfactory/save/save-writer");
import { BlueprintConfigReader, BlueprintReader } from "./satisfactory/blueprint/blueprint-reader";
import { BlueprintConfigWriter, BlueprintWriter } from "./satisfactory/blueprint/blueprint-writer";
import { SatisfactorySave } from "./satisfactory/save/satisfactory-save";
import { SaveReader } from "./satisfactory/save/save-reader";
import { SaveWriter } from "./satisfactory/save/save-writer";
class Parser {
static WriteSave(save, onBinaryBeforeCompressing, onHeader, onChunk) {
const writer = new save_writer_1.SaveWriter();
save_writer_1.SaveWriter.WriteHeader(writer, save.header);
const writer = new SaveWriter();
SaveWriter.WriteHeader(writer, save.header);
const posAfterHeader = writer.getBufferPosition();
save_writer_1.SaveWriter.WriteSaveBodyHash(writer, save.gridHash);
save_writer_1.SaveWriter.WriteGrids(writer, save.grids);
save_writer_1.SaveWriter.WriteLevels(writer, save, save.header.buildVersion);
SaveWriter.WriteSaveBodyHash(writer, save.gridHash);
SaveWriter.WriteGrids(writer, save.grids);
SaveWriter.WriteLevels(writer, save, save.header.buildVersion);
writer.endWriting();

@@ -22,5 +19,5 @@ const chunkSummary = writer.generateChunks(save.compressionInfo, posAfterHeader, onBinaryBeforeCompressing, onHeader, onChunk);

static ParseSaveFile(name, file, onDecompressedSaveBody = () => { }, onProgress = () => { }) {
const reader = new save_reader_1.SaveReader(new Uint8Array(file).buffer, onProgress);
const reader = new SaveReader(new Uint8Array(file).buffer, onProgress);
const header = reader.readHeader();
const save = new satisfactory_save_1.SatisfactorySave(name, header);
const save = new SatisfactorySave(name, header);
const inflateResult = reader.inflateChunks();

@@ -38,6 +35,6 @@ onDecompressedSaveBody(reader.getBuffer());

static WriteBlueprintFiles(blueprint, onMainFileBinaryBeforeCompressing = () => { }, onMainFileHeader = () => { }, onMainFileChunk = () => { }) {
const blueprintWriter = new blueprint_writer_1.BlueprintWriter();
blueprint_writer_1.BlueprintWriter.SerializeHeader(blueprintWriter, blueprint.header);
const blueprintWriter = new BlueprintWriter();
BlueprintWriter.SerializeHeader(blueprintWriter, blueprint.header);
const saveBodyPos = blueprintWriter.getBufferPosition();
blueprint_writer_1.BlueprintWriter.SerializeObjects(blueprintWriter, blueprint.objects);
BlueprintWriter.SerializeObjects(blueprintWriter, blueprint.objects);
blueprintWriter.endWriting();

@@ -47,4 +44,4 @@ let binaryChunks = [];

const mainFileChunkSummary = blueprintWriter.generateChunks(blueprint.compressionInfo, saveBodyPos, onMainFileBinaryBeforeCompressing, onMainFileHeader, onMainFileChunk);
const configWriter = new blueprint_writer_1.BlueprintConfigWriter();
blueprint_writer_1.BlueprintConfigWriter.SerializeConfig(configWriter, blueprint.config);
const configWriter = new BlueprintConfigWriter();
BlueprintConfigWriter.SerializeConfig(configWriter, blueprint.config);
const configFileBinary = configWriter.endWriting();

@@ -57,9 +54,9 @@ return {

static ParseBlueprintFiles(name, blueprintFile, blueprintConfigFile, onDecompressedBlueprintBody = () => { }) {
const blueprintConfigReader = new blueprint_reader_1.BlueprintConfigReader(new Uint8Array(blueprintConfigFile).buffer);
const config = blueprint_reader_1.BlueprintConfigReader.ParseConfig(blueprintConfigReader);
const blueprintReader = new blueprint_reader_1.BlueprintReader(new Uint8Array(blueprintFile).buffer);
const header = blueprint_reader_1.BlueprintReader.ReadHeader(blueprintReader);
const blueprintConfigReader = new BlueprintConfigReader(new Uint8Array(blueprintConfigFile).buffer);
const config = BlueprintConfigReader.ParseConfig(blueprintConfigReader);
const blueprintReader = new BlueprintReader(new Uint8Array(blueprintFile).buffer);
const header = BlueprintReader.ReadHeader(blueprintReader);
const inflateResult = blueprintReader.inflateChunks();
onDecompressedBlueprintBody(inflateResult.inflatedData);
const blueprintObjects = blueprint_reader_1.BlueprintReader.ParseObjects(blueprintReader);
const blueprintObjects = BlueprintReader.ParseObjects(blueprintReader);
const blueprint = {

@@ -84,2 +81,2 @@ name,

}, indent);
exports.Parser = Parser;
export { Parser };

@@ -1,23 +0,17 @@

"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.BlueprintConfigReader = exports.BlueprintReader = void 0;
const pako_1 = __importDefault(require("pako"));
const alignment_enum_1 = require("../../byte/alignment.enum");
const byte_reader_class_1 = require("../../byte/byte-reader.class");
const parser_error_1 = require("../../error/parser.error");
const SaveComponent_1 = require("../objects/SaveComponent");
const SaveEntity_1 = require("../objects/SaveEntity");
const level_class_1 = require("../save/level.class");
const save_reader_1 = require("../save/save-reader");
const util_types_1 = require("../structs/util.types");
class BlueprintReader extends byte_reader_class_1.ByteReader {
import Pako from "pako";
import { Alignment } from "../../byte/alignment.enum";
import { ByteReader } from "../../byte/byte-reader.class";
import { CorruptSaveError, ParserError } from "../../error/parser.error";
import { SaveComponent, isSaveComponent } from "../objects/SaveComponent";
import { SaveEntity, isSaveEntity } from "../objects/SaveEntity";
import { Level } from "../save/level.class";
import { DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE } from "../save/save-reader";
import { ParseCol4RGBA, ParseVec3f } from "../structs/util.types";
export class BlueprintReader extends ByteReader {
constructor(bluePrintBuffer) {
super(bluePrintBuffer, alignment_enum_1.Alignment.LITTLE_ENDIAN);
super(bluePrintBuffer, Alignment.LITTLE_ENDIAN);
this.compressionInfo = {
packageFileTag: 0,
maxUncompressedChunkContentSize: 0,
chunkHeaderSize: save_reader_1.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE
chunkHeaderSize: DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE
};

@@ -28,3 +22,3 @@ }

const versionThing = reader.readBytes(2 * 4);
const dimensions = (0, util_types_1.ParseVec3f)(reader);
const dimensions = ParseVec3f(reader);
let itemTypeCount = reader.readInt32();

@@ -63,9 +57,9 @@ const itemCosts = new Array(itemTypeCount).fill(['', 0]);

if (this.compressionInfo.packageFileTag <= 0) {
this.compressionInfo.packageFileTag = chunkHeader.getUint32(0, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.compressionInfo.packageFileTag = chunkHeader.getUint32(0, this.alignment === Alignment.LITTLE_ENDIAN);
}
if (this.compressionInfo.maxUncompressedChunkContentSize <= 0) {
this.compressionInfo.maxUncompressedChunkContentSize = chunkHeader.getInt32(8, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.compressionInfo.maxUncompressedChunkContentSize = chunkHeader.getInt32(8, this.alignment === Alignment.LITTLE_ENDIAN);
}
const chunkCompressedLength = chunkHeader.getInt32(33, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
const chunkUncompressedLength = chunkHeader.getInt32(25, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
const chunkCompressedLength = chunkHeader.getInt32(33, this.alignment === Alignment.LITTLE_ENDIAN);
const chunkUncompressedLength = chunkHeader.getInt32(25, this.alignment === Alignment.LITTLE_ENDIAN);
totalUncompressedBodySize += chunkUncompressedLength;

@@ -80,7 +74,7 @@ const currentChunkSize = chunkCompressedLength;

let currentInflatedChunk = null;
currentInflatedChunk = pako_1.default.inflate(currentChunk);
currentInflatedChunk = Pako.inflate(currentChunk);
currentChunks.push(currentInflatedChunk);
}
catch (err) {
throw new parser_error_1.ParserError('ParserError', 'An error occurred while calling pako inflate.' + err);
throw new ParserError('ParserError', 'An error occurred while calling pako inflate.' + err);
}

@@ -108,3 +102,3 @@ }

let objects = [];
level_class_1.Level.ReadObjectHeaders(reader, objects, () => { });
Level.ReadObjectHeaders(reader, objects, () => { });
const someChecksumThing = reader.readInt32();

@@ -120,10 +114,10 @@ BlueprintReader.ReadBlueprintObjectContents(reader, objects, 0);

if (len === 0) {
throw new parser_error_1.CorruptSaveError(`check number is a wrong value (${len}). This normally indicates a corrupt entity or blueprint.`);
throw new CorruptSaveError(`check number is a wrong value (${len}). This normally indicates a corrupt entity or blueprint.`);
}
const obj = objectsList[i];
if ((0, SaveEntity_1.isSaveEntity)(obj)) {
SaveEntity_1.SaveEntity.ParseData(obj, len, reader, buildVersion, obj.typePath);
if (isSaveEntity(obj)) {
SaveEntity.ParseData(obj, len, reader, buildVersion, obj.typePath);
}
else if ((0, SaveComponent_1.isSaveComponent)(obj)) {
SaveComponent_1.SaveComponent.ParseData(obj, len, reader, buildVersion, obj.typePath);
else if (isSaveComponent(obj)) {
SaveComponent.ParseData(obj, len, reader, buildVersion, obj.typePath);
}

@@ -133,6 +127,5 @@ }

}
exports.BlueprintReader = BlueprintReader;
class BlueprintConfigReader extends byte_reader_class_1.ByteReader {
export class BlueprintConfigReader extends ByteReader {
constructor(bluePrintConfigBuffer) {
super(bluePrintConfigBuffer, alignment_enum_1.Alignment.LITTLE_ENDIAN);
super(bluePrintConfigBuffer, Alignment.LITTLE_ENDIAN);
this.bluePrintConfigBuffer = bluePrintConfigBuffer;

@@ -145,3 +138,3 @@ this.parse = () => BlueprintConfigReader.ParseConfig(this);

const unk3 = reader.readInt32();
const colorMaybe = (0, util_types_1.ParseCol4RGBA)(reader);
const colorMaybe = ParseCol4RGBA(reader);
return {

@@ -154,2 +147,1 @@ description,

}
exports.BlueprintConfigReader = BlueprintConfigReader;

@@ -1,15 +0,12 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BlueprintConfigWriter = exports.BlueprintWriter = void 0;
const alignment_enum_1 = require("../../byte/alignment.enum");
const byte_writer_class_1 = require("../../byte/byte-writer.class");
const parser_error_1 = require("../../error/parser.error");
const SaveComponent_1 = require("../objects/SaveComponent");
const SaveEntity_1 = require("../objects/SaveEntity");
const level_class_1 = require("../save/level.class");
const save_writer_1 = require("../save/save-writer");
const util_types_1 = require("../structs/util.types");
class BlueprintWriter extends byte_writer_class_1.ByteWriter {
import { Alignment } from "../../byte/alignment.enum";
import { ByteWriter } from "../../byte/byte-writer.class";
import { ParserError } from "../../error/parser.error";
import { SaveComponent, isSaveComponent } from "../objects/SaveComponent";
import { SaveEntity, isSaveEntity } from "../objects/SaveEntity";
import { Level } from "../save/level.class";
import { SaveWriter } from "../save/save-writer";
import { SerializeCol4RGBA } from "../structs/util.types";
export class BlueprintWriter extends ByteWriter {
constructor() {
super(alignment_enum_1.Alignment.LITTLE_ENDIAN);
super(Alignment.LITTLE_ENDIAN);
}

@@ -42,3 +39,3 @@ static SerializeHeader(writer, header) {

if (posAfterHeader <= 0) {
throw new parser_error_1.ParserError('ParserError', 'seems like this buffer has no header. Please write the header first before you can generate chunks.');
throw new ParserError('ParserError', 'seems like this buffer has no header. Please write the header first before you can generate chunks.');
}

@@ -48,3 +45,3 @@ const header = new Uint8Array(this.bufferArray.slice(0, posAfterHeader));

this.bufferArray = this.bufferArray.slice(posAfterHeader);
const chunkSummary = save_writer_1.SaveWriter.GenerateCompressedChunksFromData(this.bufferArray, compressionInfo, onBinaryBeforeCompressing, onChunk, this.alignment);
const chunkSummary = SaveWriter.GenerateCompressedChunksFromData(this.bufferArray, compressionInfo, onBinaryBeforeCompressing, onChunk, this.alignment);
return chunkSummary;

@@ -55,3 +52,3 @@ }

writer.writeInt32(0);
level_class_1.Level.SerializeObjectHeaders(writer, objects);
Level.SerializeObjectHeaders(writer, objects);
writer.writeBinarySizeFromPosition(headersLenIndicator, headersLenIndicator + 4);

@@ -67,7 +64,7 @@ BlueprintWriter.SerializeObjectContents(writer, objects, 0, '');

writer.writeInt32(0);
if ((0, SaveEntity_1.isSaveEntity)(obj)) {
SaveEntity_1.SaveEntity.SerializeData(writer, obj, buildVersion);
if (isSaveEntity(obj)) {
SaveEntity.SerializeData(writer, obj, buildVersion);
}
else if ((0, SaveComponent_1.isSaveComponent)(obj)) {
SaveComponent_1.SaveComponent.SerializeData(writer, obj, buildVersion);
else if (isSaveComponent(obj)) {
SaveComponent.SerializeData(writer, obj, buildVersion);
}

@@ -79,6 +76,5 @@ writer.writeBinarySizeFromPosition(lenReplacementPosition, lenReplacementPosition + 4);

}
exports.BlueprintWriter = BlueprintWriter;
class BlueprintConfigWriter extends byte_writer_class_1.ByteWriter {
export class BlueprintConfigWriter extends ByteWriter {
constructor() {
super(alignment_enum_1.Alignment.LITTLE_ENDIAN);
super(Alignment.LITTLE_ENDIAN);
}

@@ -89,5 +85,4 @@ static SerializeConfig(writer, config) {

writer.writeInt32(config.iconID);
(0, util_types_1.SerializeCol4RGBA)(writer, config.color);
SerializeCol4RGBA(writer, config.color);
}
}
exports.BlueprintConfigWriter = BlueprintConfigWriter;

@@ -1,2 +0,1 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
export {};

@@ -1,7 +0,4 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.DataFields = void 0;
const util_types_1 = require("../structs/util.types");
const Property_1 = require("./Property");
class DataFields {
import { ParseVec3f } from "../structs/util.types";
import { ArrayProperty, BoolProperty, ByteProperty, DoubleProperty, EnumProperty, FloatProperty, Int32Property, Int64Property, Int8Property, MapProperty, ObjectProperty, SetProperty, StrProperty, StructProperty, TextProperty, Uint32Property, Uint8Property } from "./Property";
export class DataFields {
constructor() {

@@ -62,8 +59,8 @@ }

property = {
source: Property_1.ObjectProperty.ReadValue(reader),
target: Property_1.ObjectProperty.ReadValue(reader)
source: ObjectProperty.ReadValue(reader),
target: ObjectProperty.ReadValue(reader)
};
if (remainingLen - (reader.getBufferPosition() - start) >= 24) {
property.sourceTranslation = (0, util_types_1.ParseVec3f)(reader);
property.targetTranslation = (0, util_types_1.ParseVec3f)(reader);
property.sourceTranslation = ParseVec3f(reader);
property.targetTranslation = ParseVec3f(reader);
}

@@ -100,72 +97,72 @@ break;

case 'BoolProperty':
currentProperty = Property_1.BoolProperty.Parse(reader, propertyType, index);
overhead = Property_1.BoolProperty.CalcOverhead(currentProperty);
currentProperty = BoolProperty.Parse(reader, propertyType, index);
overhead = BoolProperty.CalcOverhead(currentProperty);
break;
case 'ByteProperty':
currentProperty = Property_1.ByteProperty.Parse(reader, propertyType, index);
overhead = Property_1.ByteProperty.CalcOverhead(currentProperty);
currentProperty = ByteProperty.Parse(reader, propertyType, index);
overhead = ByteProperty.CalcOverhead(currentProperty);
break;
case 'Int8Property':
currentProperty = Property_1.Int8Property.Parse(reader, propertyType, index);
overhead = Property_1.Int8Property.CalcOverhead(currentProperty);
currentProperty = Int8Property.Parse(reader, propertyType, index);
overhead = Int8Property.CalcOverhead(currentProperty);
break;
case 'UInt8Property':
currentProperty = Property_1.Uint8Property.Parse(reader, propertyType, index);
overhead = Property_1.Uint8Property.CalcOverhead(currentProperty);
currentProperty = Uint8Property.Parse(reader, propertyType, index);
overhead = Uint8Property.CalcOverhead(currentProperty);
break;
case 'IntProperty':
case 'Int32Property':
currentProperty = Property_1.Int32Property.Parse(reader, propertyType, index);
overhead = Property_1.Int32Property.CalcOverhead(currentProperty);
currentProperty = Int32Property.Parse(reader, propertyType, index);
overhead = Int32Property.CalcOverhead(currentProperty);
break;
case 'UInt32Property':
currentProperty = Property_1.Uint32Property.Parse(reader, propertyType, index);
overhead = Property_1.Uint32Property.CalcOverhead(currentProperty);
currentProperty = Uint32Property.Parse(reader, propertyType, index);
overhead = Uint32Property.CalcOverhead(currentProperty);
break;
case 'Int64Property':
currentProperty = Property_1.Int64Property.Parse(reader, propertyType, index);
overhead = Property_1.Int64Property.CalcOverhead(currentProperty);
currentProperty = Int64Property.Parse(reader, propertyType, index);
overhead = Int64Property.CalcOverhead(currentProperty);
break;
case 'SingleProperty':
case 'FloatProperty':
currentProperty = Property_1.FloatProperty.Parse(reader, propertyType, index);
overhead = Property_1.FloatProperty.CalcOverhead(currentProperty);
currentProperty = FloatProperty.Parse(reader, propertyType, index);
overhead = FloatProperty.CalcOverhead(currentProperty);
break;
case 'DoubleProperty':
currentProperty = Property_1.DoubleProperty.Parse(reader, propertyType, index);
overhead = Property_1.DoubleProperty.CalcOverhead(currentProperty);
currentProperty = DoubleProperty.Parse(reader, propertyType, index);
overhead = DoubleProperty.CalcOverhead(currentProperty);
break;
case 'StrProperty':
case 'NameProperty':
currentProperty = Property_1.StrProperty.Parse(reader, propertyType, index);
overhead = Property_1.StrProperty.CalcOverhead(currentProperty);
currentProperty = StrProperty.Parse(reader, propertyType, index);
overhead = StrProperty.CalcOverhead(currentProperty);
break;
case 'ObjectProperty':
case 'InterfaceProperty':
currentProperty = Property_1.ObjectProperty.Parse(reader, propertyType, index);
overhead = Property_1.ObjectProperty.CalcOverhead(currentProperty);
currentProperty = ObjectProperty.Parse(reader, propertyType, index);
overhead = ObjectProperty.CalcOverhead(currentProperty);
break;
case 'EnumProperty':
currentProperty = Property_1.EnumProperty.Parse(reader, propertyType, index);
overhead = Property_1.EnumProperty.CalcOverhead(currentProperty);
currentProperty = EnumProperty.Parse(reader, propertyType, index);
overhead = EnumProperty.CalcOverhead(currentProperty);
break;
case 'StructProperty':
currentProperty = Property_1.StructProperty.Parse(reader, propertyType, index, binarySize);
overhead = Property_1.StructProperty.CalcOverhead(currentProperty);
currentProperty = StructProperty.Parse(reader, propertyType, index, binarySize);
overhead = StructProperty.CalcOverhead(currentProperty);
break;
case 'ArrayProperty':
currentProperty = Property_1.ArrayProperty.Parse(reader, propertyType, index, propertyName);
overhead = Property_1.ArrayProperty.CalcOverhead(currentProperty);
currentProperty = ArrayProperty.Parse(reader, propertyType, index, propertyName);
overhead = ArrayProperty.CalcOverhead(currentProperty);
break;
case 'MapProperty':
currentProperty = Property_1.MapProperty.Parse(reader, propertyName, buildVersion, binarySize);
overhead = Property_1.MapProperty.CalcOverhead(currentProperty);
currentProperty = MapProperty.Parse(reader, propertyName, buildVersion, binarySize);
overhead = MapProperty.CalcOverhead(currentProperty);
break;
case 'TextProperty':
currentProperty = Property_1.TextProperty.Parse(reader, propertyType, index);
overhead = Property_1.TextProperty.CalcOverhead(currentProperty);
currentProperty = TextProperty.Parse(reader, propertyType, index);
overhead = TextProperty.CalcOverhead(currentProperty);
break;
case 'SetProperty':
currentProperty = Property_1.SetProperty.Parse(reader, propertyType, index, propertyName);
overhead = Property_1.SetProperty.CalcOverhead(currentProperty);
currentProperty = SetProperty.Parse(reader, propertyType, index, propertyName);
overhead = SetProperty.CalcOverhead(currentProperty);
break;

@@ -208,4 +205,4 @@ default:

case '/Game/FactoryGame/Events/Christmas/Buildings/PowerLineLights/Build_XmassLightsLine.Build_XmassLightsLine_C':
Property_1.ObjectProperty.SerializeValue(writer, property.source);
Property_1.ObjectProperty.SerializeValue(writer, property.target);
ObjectProperty.SerializeValue(writer, property.source);
ObjectProperty.SerializeValue(writer, property.target);
break;

@@ -236,76 +233,76 @@ case '/Game/FactoryGame/Character/Player/BP_PlayerState.BP_PlayerState_C':

case 'BoolProperty':
overhead = Property_1.BoolProperty.CalcOverhead(property);
Property_1.BoolProperty.Serialize(writer, property);
overhead = BoolProperty.CalcOverhead(property);
BoolProperty.Serialize(writer, property);
break;
case 'ByteProperty':
overhead = Property_1.ByteProperty.CalcOverhead(property);
Property_1.ByteProperty.Serialize(writer, property);
overhead = ByteProperty.CalcOverhead(property);
ByteProperty.Serialize(writer, property);
break;
case 'Int8Property':
overhead = Property_1.Int8Property.CalcOverhead(property);
Property_1.Int8Property.Serialize(writer, property);
overhead = Int8Property.CalcOverhead(property);
Int8Property.Serialize(writer, property);
break;
case 'UInt8Property':
overhead = Property_1.Uint8Property.CalcOverhead(property);
Property_1.Uint8Property.Serialize(writer, property);
overhead = Uint8Property.CalcOverhead(property);
Uint8Property.Serialize(writer, property);
break;
case 'IntProperty':
case 'Int32Property':
overhead = Property_1.Int32Property.CalcOverhead(property);
Property_1.Int32Property.Serialize(writer, property);
overhead = Int32Property.CalcOverhead(property);
Int32Property.Serialize(writer, property);
break;
case 'UInt32Property':
overhead = Property_1.Uint32Property.CalcOverhead(property);
Property_1.Uint32Property.Serialize(writer, property);
overhead = Uint32Property.CalcOverhead(property);
Uint32Property.Serialize(writer, property);
break;
case 'Int64Property':
overhead = Property_1.Int64Property.CalcOverhead(property);
Property_1.Int64Property.Serialize(writer, property);
overhead = Int64Property.CalcOverhead(property);
Int64Property.Serialize(writer, property);
break;
case 'SingleProperty':
case 'FloatProperty':
overhead = Property_1.FloatProperty.CalcOverhead(property);
Property_1.FloatProperty.Serialize(writer, property);
overhead = FloatProperty.CalcOverhead(property);
FloatProperty.Serialize(writer, property);
break;
case 'DoubleProperty':
overhead = Property_1.DoubleProperty.CalcOverhead(property);
Property_1.DoubleProperty.Serialize(writer, property);
overhead = DoubleProperty.CalcOverhead(property);
DoubleProperty.Serialize(writer, property);
break;
case 'StrProperty':
case 'NameProperty':
overhead = Property_1.StrProperty.CalcOverhead(property);
Property_1.StrProperty.Serialize(writer, property);
overhead = StrProperty.CalcOverhead(property);
StrProperty.Serialize(writer, property);
break;
case 'ObjectProperty':
case 'InterfaceProperty':
overhead = Property_1.ObjectProperty.CalcOverhead(property);
Property_1.ObjectProperty.Serialize(writer, property);
overhead = ObjectProperty.CalcOverhead(property);
ObjectProperty.Serialize(writer, property);
break;
case 'EnumProperty':
overhead = Property_1.EnumProperty.CalcOverhead(property);
Property_1.EnumProperty.Serialize(writer, property);
overhead = EnumProperty.CalcOverhead(property);
EnumProperty.Serialize(writer, property);
break;
case 'ByteProperty':
overhead = Property_1.ByteProperty.CalcOverhead(property);
Property_1.ByteProperty.Serialize(writer, property);
overhead = ByteProperty.CalcOverhead(property);
ByteProperty.Serialize(writer, property);
break;
case 'StructProperty':
overhead = Property_1.StructProperty.CalcOverhead(property);
Property_1.StructProperty.Serialize(writer, property);
overhead = StructProperty.CalcOverhead(property);
StructProperty.Serialize(writer, property);
break;
case 'ArrayProperty':
overhead = Property_1.ArrayProperty.CalcOverhead(property);
Property_1.ArrayProperty.Serialize(writer, property, propertyName);
overhead = ArrayProperty.CalcOverhead(property);
ArrayProperty.Serialize(writer, property, propertyName);
break;
case 'MapProperty':
overhead = Property_1.MapProperty.CalcOverhead(property);
Property_1.MapProperty.Serialize(writer, property);
overhead = MapProperty.CalcOverhead(property);
MapProperty.Serialize(writer, property);
break;
case 'TextProperty':
overhead = Property_1.TextProperty.CalcOverhead(property);
Property_1.TextProperty.Serialize(writer, property);
overhead = TextProperty.CalcOverhead(property);
TextProperty.Serialize(writer, property);
break;
case 'SetProperty':
overhead = Property_1.SetProperty.CalcOverhead(property);
Property_1.SetProperty.Serialize(writer, property);
overhead = SetProperty.CalcOverhead(property);
SetProperty.Serialize(writer, property);
break;

@@ -318,2 +315,1 @@ default:

}
exports.DataFields = DataFields;

@@ -1,10 +0,7 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.GUIDInfo = void 0;
const GUID_1 = require("./ue/GUID");
var GUIDInfo;
import { GUID } from './ue/GUID';
export var GUIDInfo;
(function (GUIDInfo) {
GUIDInfo.read = (reader) => {
if (reader.readByte() === 1) {
return GUID_1.GUID.read(reader);
return GUID.read(reader);
}

@@ -22,6 +19,6 @@ else {

writer.writeByte(1);
GUID_1.GUID.write(writer, guid);
GUID.write(writer, guid);
}
};
})(GUIDInfo = exports.GUIDInfo || (exports.GUIDInfo = {}));
})(GUIDInfo || (GUIDInfo = {}));
;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ObjectReference = void 0;
var ObjectReference;
export var ObjectReference;
(function (ObjectReference) {

@@ -16,3 +13,3 @@ ObjectReference.read = (reader) => {

};
})(ObjectReference = exports.ObjectReference || (exports.ObjectReference = {}));
})(ObjectReference || (ObjectReference = {}));
;

@@ -1,9 +0,6 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SerializeFINLuaProcessorStateStorage = exports.ReadFINLuaProcessorStateStorage = exports.SerializeFINNetworkTrace = exports.ReadFINNetworkTrace = exports.SerializeDynamicStructData = exports.ParseDynamicStructData = exports.MapProperty = exports.SetProperty = exports.ArrayProperty = exports.StructProperty = exports.TextProperty = exports.EnumProperty = exports.ObjectProperty = exports.StrProperty = exports.DoubleProperty = exports.FloatProperty = exports.Int64Property = exports.Uint32Property = exports.Int32Property = exports.Uint8Property = exports.Int8Property = exports.ByteProperty = exports.BoolProperty = exports.BasicProperty = exports.AbstractBaseProperty = exports.AbstractProperty = void 0;
const util_types_1 = require("../structs/util.types");
const DataFields_1 = require("./DataFields");
const GUIDInfo_1 = require("./GUIDInfo");
const ObjectReference_1 = require("./ObjectReference");
class AbstractProperty {
import { ParseCol4BGRA, ParseCol4RGBA, ParseVec3, ParseVec3f, ParseVec4, ParseVec4f, SerializeCol4BGRA, SerializeCol4RGBA, SerializeVec3, SerializeVec3f, SerializeVec4 } from "../structs/util.types";
import { DataFields } from "./DataFields";
import { GUIDInfo } from './GUIDInfo';
import { ObjectReference } from "./ObjectReference";
export class AbstractProperty {
constructor(type, index) {

@@ -14,4 +11,3 @@ this.type = type;

}
exports.AbstractProperty = AbstractProperty;
class AbstractBaseProperty extends AbstractProperty {
export class AbstractBaseProperty extends AbstractProperty {
constructor(type, ueType, index) {

@@ -23,4 +19,3 @@ super(type, index && index !== 0 ? index : undefined);

}
exports.AbstractBaseProperty = AbstractBaseProperty;
class BasicProperty extends AbstractBaseProperty {
export class BasicProperty extends AbstractBaseProperty {
constructor(type, ueType, guidInfo, index = 0) {

@@ -31,4 +26,3 @@ super(type, ueType, index);

}
exports.BasicProperty = BasicProperty;
class BoolProperty extends BasicProperty {
export class BoolProperty extends BasicProperty {
constructor(value, ueType = 'BoolProperty', guidInfo = undefined, index = 0) {

@@ -40,3 +34,3 @@ super('BoolProperty', ueType, guidInfo, index);

const value = BoolProperty.ReadValue(reader);
const guidInfo = GUIDInfo_1.GUIDInfo.read(reader);
const guidInfo = GUIDInfo.read(reader);
return new BoolProperty(value, ueType, guidInfo, index);

@@ -52,3 +46,3 @@ }

BoolProperty.SerializeValue(writer, property.value);
GUIDInfo_1.GUIDInfo.write(writer, property.guidInfo);
GUIDInfo.write(writer, property.guidInfo);
}

@@ -59,4 +53,3 @@ static SerializeValue(writer, value) {

}
exports.BoolProperty = BoolProperty;
class ByteProperty extends BasicProperty {
export class ByteProperty extends BasicProperty {
constructor(value, ueType = 'ByteProperty', guidInfo = undefined, index = 0) {

@@ -68,3 +61,3 @@ super('ByteProperty', ueType, guidInfo, index);

const type = reader.readString();
const guidInfo = GUIDInfo_1.GUIDInfo.read(reader);
const guidInfo = GUIDInfo.read(reader);
let value;

@@ -94,3 +87,3 @@ if (type === 'None') {

writer.writeString(property.value.type);
GUIDInfo_1.GUIDInfo.write(writer, property.guidInfo);
GUIDInfo.write(writer, property.guidInfo);
if (property.value.type === 'None') {

@@ -107,4 +100,3 @@ ByteProperty.SerializeValue(writer, property.value.value);

}
exports.ByteProperty = ByteProperty;
class Int8Property extends BasicProperty {
export class Int8Property extends BasicProperty {
constructor(value, ueType = 'Int8Property', guidInfo = undefined, index = 0) {

@@ -115,3 +107,3 @@ super('Int8Property', ueType, guidInfo, index);

static Parse(reader, ueType, index = 0) {
const guidInfo = GUIDInfo_1.GUIDInfo.read(reader);
const guidInfo = GUIDInfo.read(reader);
const value = Int8Property.ReadValue(reader);

@@ -127,3 +119,3 @@ return new Int8Property(value, ueType, guidInfo, index);

static Serialize(writer, property) {
GUIDInfo_1.GUIDInfo.write(writer, property.guidInfo);
GUIDInfo.write(writer, property.guidInfo);
Int8Property.SerializeValue(writer, property.value);

@@ -135,4 +127,3 @@ }

}
exports.Int8Property = Int8Property;
class Uint8Property extends BasicProperty {
export class Uint8Property extends BasicProperty {
constructor(value, ueType = 'UInt8Property', guidInfo = undefined, index = 0) {

@@ -143,3 +134,3 @@ super('UInt8Property', ueType, guidInfo, index);

static Parse(reader, ueType, index = 0) {
const guidInfo = GUIDInfo_1.GUIDInfo.read(reader);
const guidInfo = GUIDInfo.read(reader);
const value = Uint8Property.ReadValue(reader);

@@ -155,3 +146,3 @@ return new Uint8Property(value, ueType, guidInfo, index);

static Serialize(writer, property) {
GUIDInfo_1.GUIDInfo.write(writer, property.guidInfo);
GUIDInfo.write(writer, property.guidInfo);
Uint8Property.SerializeValue(writer, property.value);

@@ -163,4 +154,3 @@ }

}
exports.Uint8Property = Uint8Property;
class Int32Property extends BasicProperty {
export class Int32Property extends BasicProperty {
constructor(value, ueType = 'IntProperty', guidInfo = undefined, index = 0) {

@@ -171,3 +161,3 @@ super('Int32Property', ueType, guidInfo, index);

static Parse(reader, ueType, index = 0) {
const guidInfo = GUIDInfo_1.GUIDInfo.read(reader);
const guidInfo = GUIDInfo.read(reader);
const value = Int32Property.ReadValue(reader);

@@ -183,3 +173,3 @@ return new Int32Property(value, ueType, guidInfo, index);

static Serialize(writer, property) {
GUIDInfo_1.GUIDInfo.write(writer, property.guidInfo);
GUIDInfo.write(writer, property.guidInfo);
Int32Property.SerializeValue(writer, property.value);

@@ -191,4 +181,3 @@ }

}
exports.Int32Property = Int32Property;
class Uint32Property extends BasicProperty {
export class Uint32Property extends BasicProperty {
constructor(value, ueType = 'UInt32Property', guidInfo = undefined, index = 0) {

@@ -199,3 +188,3 @@ super('UInt32Property', ueType, guidInfo, index);

static Parse(reader, ueType, index = 0) {
const guidInfo = GUIDInfo_1.GUIDInfo.read(reader);
const guidInfo = GUIDInfo.read(reader);
const value = Uint32Property.ReadValue(reader);

@@ -211,3 +200,3 @@ return new Uint32Property(value, ueType, guidInfo, index);

static Serialize(writer, property) {
GUIDInfo_1.GUIDInfo.write(writer, property.guidInfo);
GUIDInfo.write(writer, property.guidInfo);
Uint32Property.SerializeValue(writer, property.value);

@@ -219,4 +208,3 @@ }

}
exports.Uint32Property = Uint32Property;
class Int64Property extends BasicProperty {
export class Int64Property extends BasicProperty {
constructor(value, ueType = 'Int64Property', guidInfo = undefined, index = 0) {

@@ -227,3 +215,3 @@ super('Int64Property', ueType, guidInfo, index);

static Parse(reader, ueType, index = 0) {
const guidInfo = GUIDInfo_1.GUIDInfo.read(reader);
const guidInfo = GUIDInfo.read(reader);
const value = Int64Property.ReadValue(reader);

@@ -239,3 +227,3 @@ return new Int64Property(value, ueType, guidInfo, index);

static Serialize(writer, property) {
GUIDInfo_1.GUIDInfo.write(writer, property.guidInfo);
GUIDInfo.write(writer, property.guidInfo);
Int64Property.SerializeValue(writer, property.value);

@@ -247,4 +235,3 @@ }

}
exports.Int64Property = Int64Property;
class FloatProperty extends BasicProperty {
export class FloatProperty extends BasicProperty {
constructor(value, ueType = 'FloatProperty', guidInfo = undefined, index = 0) {

@@ -255,3 +242,3 @@ super('FloatProperty', ueType, guidInfo, index);

static Parse(reader, ueType, index = 0) {
const guidInfo = GUIDInfo_1.GUIDInfo.read(reader);
const guidInfo = GUIDInfo.read(reader);
const value = FloatProperty.ReadValue(reader);

@@ -267,3 +254,3 @@ return new FloatProperty(value, ueType, guidInfo, index);

static Serialize(writer, property) {
GUIDInfo_1.GUIDInfo.write(writer, property.guidInfo);
GUIDInfo.write(writer, property.guidInfo);
FloatProperty.SerializeValue(writer, property.value);

@@ -275,4 +262,3 @@ }

}
exports.FloatProperty = FloatProperty;
class DoubleProperty extends BasicProperty {
export class DoubleProperty extends BasicProperty {
constructor(value, ueType = 'DoubleProperty', guidInfo = undefined, index = 0) {

@@ -283,3 +269,3 @@ super('DoubleProperty', ueType, guidInfo, index);

static Parse(reader, ueType, index = 0) {
const guidInfo = GUIDInfo_1.GUIDInfo.read(reader);
const guidInfo = GUIDInfo.read(reader);
const value = DoubleProperty.ReadValue(reader);

@@ -295,3 +281,3 @@ return new DoubleProperty(value, ueType, guidInfo, index);

static Serialize(writer, property) {
GUIDInfo_1.GUIDInfo.write(writer, property.guidInfo);
GUIDInfo.write(writer, property.guidInfo);
DoubleProperty.SerializeValue(writer, property.value);

@@ -303,4 +289,3 @@ }

}
exports.DoubleProperty = DoubleProperty;
class StrProperty extends BasicProperty {
export class StrProperty extends BasicProperty {
constructor(value, ueType = 'StrProperty', guidInfo = undefined, index = 0) {

@@ -311,3 +296,3 @@ super('StrProperty', ueType, guidInfo, index);

static Parse(reader, ueType, index = 0) {
const guidInfo = GUIDInfo_1.GUIDInfo.read(reader);
const guidInfo = GUIDInfo.read(reader);
const value = StrProperty.ReadValue(reader);

@@ -323,3 +308,3 @@ return new StrProperty(value, ueType, guidInfo, index);

static Serialize(writer, property) {
GUIDInfo_1.GUIDInfo.write(writer, property.guidInfo);
GUIDInfo.write(writer, property.guidInfo);
StrProperty.SerializeValue(writer, property.value);

@@ -331,4 +316,3 @@ }

}
exports.StrProperty = StrProperty;
class ObjectProperty extends BasicProperty {
export class ObjectProperty extends BasicProperty {
constructor(value, ueType = 'ObjectProperty', guidInfo = undefined, index = 0) {

@@ -339,3 +323,3 @@ super('ObjectProperty', ueType, guidInfo, index);

static Parse(reader, ueType, index = 0) {
const guidInfo = GUIDInfo_1.GUIDInfo.read(reader);
const guidInfo = GUIDInfo.read(reader);
const value = ObjectProperty.ReadValue(reader);

@@ -355,3 +339,3 @@ return new ObjectProperty(value, ueType, guidInfo, index);

static Serialize(writer, property) {
GUIDInfo_1.GUIDInfo.write(writer, property.guidInfo);
GUIDInfo.write(writer, property.guidInfo);
ObjectProperty.SerializeValue(writer, property.value);

@@ -364,4 +348,3 @@ }

}
exports.ObjectProperty = ObjectProperty;
class EnumProperty extends BasicProperty {
export class EnumProperty extends BasicProperty {
constructor(value, ueType = 'EnumProperty', guidInfo = undefined, index = 0) {

@@ -373,3 +356,3 @@ super('EnumProperty', ueType, guidInfo, index);

let name = reader.readString();
const guidInfo = GUIDInfo_1.GUIDInfo.read(reader);
const guidInfo = GUIDInfo.read(reader);
const value = EnumProperty.ReadValue(reader);

@@ -387,3 +370,3 @@ const property = new EnumProperty({ name, value }, ueType, guidInfo, index);

writer.writeString(property.value.name);
GUIDInfo_1.GUIDInfo.write(writer, property.guidInfo);
GUIDInfo.write(writer, property.guidInfo);
EnumProperty.SerializeValue(writer, property.value.value);

@@ -395,4 +378,3 @@ }

}
exports.EnumProperty = EnumProperty;
class TextProperty extends BasicProperty {
export class TextProperty extends BasicProperty {
constructor(value, ueType = 'TextProperty', guidInfo = undefined, index = 0) {

@@ -403,3 +385,3 @@ super('TextProperty', ueType, guidInfo, index);

static Parse(reader, ueType, index = 0) {
const guidInfo = GUIDInfo_1.GUIDInfo.read(reader);
const guidInfo = GUIDInfo.read(reader);
const value = TextProperty.ParseValue(reader);

@@ -457,3 +439,3 @@ return new TextProperty(value, ueType, guidInfo, index);

static Serialize(writer, property) {
GUIDInfo_1.GUIDInfo.write(writer, property.guidInfo);
GUIDInfo.write(writer, property.guidInfo);
TextProperty.SerializeValue(writer, property.value);

@@ -502,4 +484,3 @@ }

}
exports.TextProperty = TextProperty;
class StructProperty extends AbstractBaseProperty {
export class StructProperty extends AbstractBaseProperty {
constructor(subtype, ueType = 'StructProperty', index = 0, guid = 0) {

@@ -541,6 +522,6 @@ super('StructProperty', ueType, index);

case 'Color':
value = (0, util_types_1.ParseCol4BGRA)(reader);
value = ParseCol4BGRA(reader);
break;
case 'LinearColor':
value = (0, util_types_1.ParseCol4RGBA)(reader);
value = ParseCol4RGBA(reader);
break;

@@ -550,3 +531,3 @@ case 'Vector':

case 'Vector2D':
value = (size === 12) ? (0, util_types_1.ParseVec3f)(reader) : (0, util_types_1.ParseVec3)(reader);
value = (size === 12) ? ParseVec3f(reader) : ParseVec3(reader);
break;

@@ -556,12 +537,12 @@ case 'Quat':

case 'Vector4D':
value = (size === 16) ? (0, util_types_1.ParseVec4f)(reader) : (0, util_types_1.ParseVec4)(reader);
value = (size === 16) ? ParseVec4f(reader) : ParseVec4(reader);
break;
case 'Box':
value = (size === 25) ? {
min: (0, util_types_1.ParseVec3f)(reader),
max: (0, util_types_1.ParseVec3f)(reader),
min: ParseVec3f(reader),
max: ParseVec3f(reader),
isValid: reader.readByte() >= 1
} : {
min: (0, util_types_1.ParseVec3)(reader),
max: (0, util_types_1.ParseVec3)(reader),
min: ParseVec3(reader),
max: ParseVec3(reader),
isValid: reader.readByte() >= 1

@@ -605,7 +586,7 @@ };

case 'FINNetworkTrace':
value = (0, exports.ReadFINNetworkTrace)(reader);
value = ReadFINNetworkTrace(reader);
break;
case 'FINLuaProcessorStateStorage':
value = {
values: (0, exports.ReadFINLuaProcessorStateStorage)(reader, size)
values: ReadFINLuaProcessorStateStorage(reader, size)
};

@@ -620,3 +601,3 @@ break;

default:
value = (0, exports.ParseDynamicStructData)(reader, 0, subtype);
value = ParseDynamicStructData(reader, 0, subtype);
}

@@ -641,7 +622,7 @@ return value;

value = value;
(0, util_types_1.SerializeCol4BGRA)(writer, value);
SerializeCol4BGRA(writer, value);
break;
case 'LinearColor':
value = value;
(0, util_types_1.SerializeCol4RGBA)(writer, value);
SerializeCol4RGBA(writer, value);
break;

@@ -652,3 +633,3 @@ case 'Vector':

value = value;
(0, util_types_1.SerializeVec3)(writer, value);
SerializeVec3(writer, value);
break;

@@ -659,8 +640,8 @@ case 'Quat':

value = value;
(0, util_types_1.SerializeVec4)(writer, value);
SerializeVec4(writer, value);
break;
case 'Box':
value = value;
(0, util_types_1.SerializeVec3)(writer, value.min);
(0, util_types_1.SerializeVec3)(writer, value.max);
SerializeVec3(writer, value.min);
SerializeVec3(writer, value.max);
writer.writeByte(value.isValid ? 1 : 0);

@@ -704,7 +685,7 @@ break;

value = value;
(0, exports.SerializeFINNetworkTrace)(writer, value);
SerializeFINNetworkTrace(writer, value);
break;
case 'FINLuaProcessorStateStorage':
value = value;
(0, exports.SerializeFINLuaProcessorStateStorage)(writer, value.values);
SerializeFINLuaProcessorStateStorage(writer, value.values);
break;

@@ -718,8 +699,7 @@ case 'FICFrameRange':

value = value;
(0, exports.SerializeDynamicStructData)(writer, 0, value);
SerializeDynamicStructData(writer, 0, value);
}
}
}
exports.StructProperty = StructProperty;
class ArrayProperty extends BasicProperty {
export class ArrayProperty extends BasicProperty {
constructor(subtype, values, ueType = 'ArrayProperty', index = 0, structValueFields) {

@@ -870,4 +850,3 @@ super('ArrayProperty', ueType, undefined, index);

}
exports.ArrayProperty = ArrayProperty;
class SetProperty extends BasicProperty {
export class SetProperty extends BasicProperty {
constructor(subtype, values, ueType, index) {

@@ -926,3 +905,3 @@ super('SetProperty', ueType, undefined, index);

case "StructProperty":
property.values.forEach(v => (0, util_types_1.SerializeVec3f)(writer, v));
property.values.forEach(v => SerializeVec3f(writer, v));
break;

@@ -934,4 +913,3 @@ default:

}
exports.SetProperty = SetProperty;
class MapProperty extends BasicProperty {
export class MapProperty extends BasicProperty {
constructor(keyType, valueType, ueType, index) {

@@ -963,3 +941,3 @@ super('MapProperty', ueType, undefined, index);

else {
key = (0, exports.ParseDynamicStructData)(reader, 0, property.keyType);
key = ParseDynamicStructData(reader, 0, property.keyType);
}

@@ -989,3 +967,3 @@ break;

case 'StructProperty':
value = (0, exports.ParseDynamicStructData)(reader, 0, property.valueType);
value = ParseDynamicStructData(reader, 0, property.valueType);
break;

@@ -1032,3 +1010,3 @@ case 'ObjectProperty':

else {
(0, exports.SerializeDynamicStructData)(writer, 0, entry[0]);
SerializeDynamicStructData(writer, 0, entry[0]);
}

@@ -1058,3 +1036,3 @@ break;

case 'StructProperty':
(0, exports.SerializeDynamicStructData)(writer, 0, entry[1]);
SerializeDynamicStructData(writer, 0, entry[1]);
break;

@@ -1084,4 +1062,3 @@ case 'ObjectProperty':

}
exports.MapProperty = MapProperty;
const ParseDynamicStructData = (reader, buildVersion, type) => {
export const ParseDynamicStructData = (reader, buildVersion, type) => {
const data = {

@@ -1093,3 +1070,3 @@ type, properties: {}

while (propertyName !== 'None') {
const parsedProperty = DataFields_1.DataFields.ParseProperty(reader, buildVersion, propertyName);
const parsedProperty = DataFields.ParseProperty(reader, buildVersion, propertyName);
if (data.properties[propertyName]) {

@@ -1108,8 +1085,7 @@ if (!Array.isArray(data.properties[propertyName])) {

};
exports.ParseDynamicStructData = ParseDynamicStructData;
const SerializeDynamicStructData = (writer, buildVersion, data) => {
export const SerializeDynamicStructData = (writer, buildVersion, data) => {
for (const key in data.properties) {
for (const prop of (Array.isArray(data.properties[key]) ? data.properties[key] : [data.properties[key]])) {
writer.writeString(key);
DataFields_1.DataFields.SerializeProperty(writer, prop, key, buildVersion);
DataFields.SerializeProperty(writer, prop, key, buildVersion);
}

@@ -1119,9 +1095,8 @@ }

};
exports.SerializeDynamicStructData = SerializeDynamicStructData;
const ReadFINNetworkTrace = (reader) => {
export const ReadFINNetworkTrace = (reader) => {
const networkTrace = {};
networkTrace.ref = ObjectReference_1.ObjectReference.read(reader);
networkTrace.ref = ObjectReference.read(reader);
networkTrace.hasPrev = reader.readInt32();
if (networkTrace.hasPrev) {
networkTrace.prev = (0, exports.ReadFINNetworkTrace)(reader);
networkTrace.prev = ReadFINNetworkTrace(reader);
}

@@ -1134,9 +1109,8 @@ networkTrace.hasStep = reader.readInt32();

};
exports.ReadFINNetworkTrace = ReadFINNetworkTrace;
const SerializeFINNetworkTrace = (writer, obj) => {
export const SerializeFINNetworkTrace = (writer, obj) => {
const networkTrace = {};
ObjectReference_1.ObjectReference.write(writer, obj.ref);
ObjectReference.write(writer, obj.ref);
writer.writeInt32(obj.hasPrev);
if (obj.hasPrev) {
(0, exports.SerializeFINNetworkTrace)(writer, obj.prev);
SerializeFINNetworkTrace(writer, obj.prev);
}

@@ -1148,4 +1122,3 @@ writer.writeInt32(obj.hasStep);

};
exports.SerializeFINNetworkTrace = SerializeFINNetworkTrace;
const ReadFINLuaProcessorStateStorage = (reader, size) => {
export const ReadFINLuaProcessorStateStorage = (reader, size) => {
const stateStorage = { traces: [], references: [], thread: '', globals: '', remainingStructData: {} };

@@ -1155,7 +1128,7 @@ const start = reader.getBufferPosition();

for (let i = 0; i < traceCount; i++) {
stateStorage.traces.push((0, exports.ReadFINNetworkTrace)(reader));
stateStorage.traces.push(ReadFINNetworkTrace(reader));
}
const refCount = reader.readInt32();
for (let i = 0; i < refCount; i++) {
stateStorage.references.push(ObjectReference_1.ObjectReference.read(reader));
stateStorage.references.push(ObjectReference.read(reader));
}

@@ -1168,11 +1141,10 @@ stateStorage.thread = reader.readString();

};
exports.ReadFINLuaProcessorStateStorage = ReadFINLuaProcessorStateStorage;
const SerializeFINLuaProcessorStateStorage = (writer, stateStorage) => {
export const SerializeFINLuaProcessorStateStorage = (writer, stateStorage) => {
writer.writeInt32(stateStorage.traces.length);
for (const trace of stateStorage.traces) {
(0, exports.SerializeFINNetworkTrace)(writer, trace);
SerializeFINNetworkTrace(writer, trace);
}
writer.writeInt32(stateStorage.references.length);
for (const ref of stateStorage.references) {
ObjectReference_1.ObjectReference.write(writer, ref);
ObjectReference.write(writer, ref);
}

@@ -1183,2 +1155,1 @@ writer.writeString(stateStorage.thread);

};
exports.SerializeFINLuaProcessorStateStorage = SerializeFINLuaProcessorStateStorage;

@@ -1,10 +0,6 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SaveComponent = exports.isSaveComponent = void 0;
const SaveObject_1 = require("./SaveObject");
const isSaveComponent = (obj) => {
import { SaveObject } from "./SaveObject";
export const isSaveComponent = (obj) => {
return obj.type === 'SaveComponent';
};
exports.isSaveComponent = isSaveComponent;
class SaveComponent extends SaveObject_1.SaveObject {
class SaveComponent extends SaveObject {
constructor(typePath, rootObject, instanceName, parentEntityName = '') {

@@ -19,14 +15,14 @@ super(typePath, rootObject, instanceName);

static ParseHeader(reader, obj) {
SaveObject_1.SaveObject.ParseHeader(reader, obj);
SaveObject.ParseHeader(reader, obj);
obj.parentEntityName = reader.readString();
}
static SerializeHeader(writer, component) {
SaveObject_1.SaveObject.SerializeHeader(writer, component);
SaveObject.SerializeHeader(writer, component);
writer.writeString(component.parentEntityName);
}
static ParseData(component, length, reader, buildVersion, typePath) {
SaveObject_1.SaveObject.ParseData(component, length, reader, buildVersion, typePath);
SaveObject.ParseData(component, length, reader, buildVersion, typePath);
}
}
SaveComponent.TypeID = 0;
exports.SaveComponent = SaveComponent;
export { SaveComponent };

@@ -1,12 +0,8 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SaveEntity = exports.isSaveEntity = void 0;
const util_types_1 = require("../structs/util.types");
const ObjectReference_1 = require("./ObjectReference");
const SaveObject_1 = require("./SaveObject");
const isSaveEntity = (obj) => {
import { ParseTransform, SerializeTransform } from "../structs/util.types";
import { ObjectReference } from "./ObjectReference";
import { SaveObject } from "./SaveObject";
export const isSaveEntity = (obj) => {
return obj.type === 'SaveEntity';
};
exports.isSaveEntity = isSaveEntity;
class SaveEntity extends SaveObject_1.SaveObject {
class SaveEntity extends SaveObject {
constructor(typePath, rootObject, instanceName, parentEntityName = '', needsTransform = false) {

@@ -31,5 +27,5 @@ super(typePath, rootObject, instanceName);

static ParseHeader(reader, obj) {
SaveObject_1.SaveObject.ParseHeader(reader, obj);
SaveObject.ParseHeader(reader, obj);
obj.needTransform = reader.readInt32() == 1;
obj.transform = (0, util_types_1.ParseTransform)(reader);
obj.transform = ParseTransform(reader);
obj.wasPlacedInLevel = reader.readInt32() == 1;

@@ -43,12 +39,12 @@ }

for (let i = 0; i < componentCount; i++) {
var componentRef = ObjectReference_1.ObjectReference.read(reader);
var componentRef = ObjectReference.read(reader);
entity.components.push(componentRef);
}
const remainingSize = length - (reader.getBufferPosition() - afterSizeIndicator);
return SaveObject_1.SaveObject.ParseData(entity, remainingSize, reader, buildVersion, typePath);
return SaveObject.ParseData(entity, remainingSize, reader, buildVersion, typePath);
}
static SerializeHeader(writer, entity) {
SaveObject_1.SaveObject.SerializeHeader(writer, entity);
SaveObject.SerializeHeader(writer, entity);
writer.writeInt32(entity.needTransform ? 1 : 0);
(0, util_types_1.SerializeTransform)(writer, entity.transform);
SerializeTransform(writer, entity.transform);
writer.writeInt32(entity.wasPlacedInLevel ? 1 : 0);

@@ -64,6 +60,6 @@ }

}
SaveObject_1.SaveObject.SerializeData(writer, entity, buildVersion);
SaveObject.SerializeData(writer, entity, buildVersion);
}
}
SaveEntity.TypeID = 1;
exports.SaveEntity = SaveEntity;
export { SaveEntity };

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SaveObject = void 0;
const DataFields_1 = require("./DataFields");
class SaveObject {
import { DataFields } from "./DataFields";
export class SaveObject {
constructor(typePath, rootObject, instanceName) {

@@ -27,8 +24,7 @@ this.typePath = typePath;

static ParseData(obj, length, reader, buildVersion, typePath) {
DataFields_1.DataFields.ParseProperties(obj, length, reader, buildVersion, typePath);
DataFields.ParseProperties(obj, length, reader, buildVersion, typePath);
}
static SerializeData(writer, obj, buildVersion) {
DataFields_1.DataFields.Serialize(obj, writer, buildVersion, obj.typePath);
DataFields.Serialize(obj, writer, buildVersion, obj.typePath);
}
}
exports.SaveObject = SaveObject;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.GUID = void 0;
var GUID;
export var GUID;
(function (GUID) {

@@ -20,3 +17,3 @@ GUID.read = (reader) => {

};
})(GUID = exports.GUID || (exports.GUID = {}));
})(GUID || (GUID = {}));
;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MD5Hash = void 0;
var MD5Hash;
export var MD5Hash;
(function (MD5Hash) {

@@ -20,3 +17,3 @@ MD5Hash.read = (reader) => {

};
})(MD5Hash = exports.MD5Hash || (exports.MD5Hash = {}));
})(MD5Hash || (MD5Hash = {}));
;

@@ -1,8 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.AsynchronousLevel = void 0;
const SaveComponent_1 = require("../objects/SaveComponent");
const SaveEntity_1 = require("../objects/SaveEntity");
const level_class_1 = require("./level.class");
class AsynchronousLevel {
import { isSaveComponent, SaveComponent } from "../objects/SaveComponent";
import { isSaveEntity, SaveEntity } from "../objects/SaveEntity";
import { Level } from "./level.class";
export class AsynchronousLevel {
static async StreamReadObjectContents(reader, objectsList, buildVersion, writer, onProgressCallback) {

@@ -30,7 +27,7 @@ const binarySize = reader.readInt32();

const obj = objectsList[i];
if ((0, SaveEntity_1.isSaveEntity)(obj)) {
SaveEntity_1.SaveEntity.ParseData(obj, len, reader, buildVersion, obj.typePath);
if (isSaveEntity(obj)) {
SaveEntity.ParseData(obj, len, reader, buildVersion, obj.typePath);
}
else if ((0, SaveComponent_1.isSaveComponent)(obj)) {
SaveComponent_1.SaveComponent.ParseData(obj, len, reader, buildVersion, obj.typePath);
else if (isSaveComponent(obj)) {
SaveComponent.ParseData(obj, len, reader, buildVersion, obj.typePath);
}

@@ -50,9 +47,9 @@ dynamicObjectBatch.push(obj);

static async StreamReadLevel(reader, levelName, buildVersion, writer) {
const level = new level_class_1.Level(levelName);
const level = new Level(levelName);
const binaryLength = reader.readInt32();
await reader.allocate(binaryLength);
await writer.openLevel(levelName);
level_class_1.Level.ReadObjectHeaders(reader, level.objects, reader.onProgressCallback);
Level.ReadObjectHeaders(reader, level.objects, reader.onProgressCallback);
reader.onProgressCallback(reader.getBufferProgress());
level.collectables = level_class_1.Level.ReadCollectablesList(reader, undefined);
level.collectables = Level.ReadCollectablesList(reader, undefined);
console.log(`before object contents buffer pos ${reader.getBufferPosition()}`);

@@ -63,3 +60,3 @@ await AsynchronousLevel.StreamReadObjectContents(reader, level.objects, buildVersion, writer, reader.onProgressCallback);

reader.onProgressCallback(reader.getBufferProgress());
level_class_1.Level.ReadCollectablesList(reader, undefined);
Level.ReadCollectablesList(reader, undefined);
await writer.endLevel();

@@ -69,2 +66,1 @@ return level;

}
exports.AsynchronousLevel = AsynchronousLevel;

@@ -1,8 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Level = void 0;
const ObjectReference_1 = require("../objects/ObjectReference");
const SaveComponent_1 = require("../objects/SaveComponent");
const SaveEntity_1 = require("../objects/SaveEntity");
class Level {
import { ObjectReference } from "../objects/ObjectReference";
import { SaveComponent, isSaveComponent } from "../objects/SaveComponent";
import { SaveEntity, isSaveEntity } from "../objects/SaveEntity";
export class Level {
constructor(name) {

@@ -19,8 +16,8 @@ this.name = name;

case 'SaveEntity':
writer.writeInt32(SaveEntity_1.SaveEntity.TypeID);
SaveEntity_1.SaveEntity.SerializeHeader(writer, obj);
writer.writeInt32(SaveEntity.TypeID);
SaveEntity.SerializeHeader(writer, obj);
break;
case 'SaveComponent':
writer.writeInt32(SaveComponent_1.SaveComponent.TypeID);
SaveComponent_1.SaveComponent.SerializeHeader(writer, obj);
writer.writeInt32(SaveComponent.TypeID);
SaveComponent.SerializeHeader(writer, obj);
break;

@@ -53,7 +50,7 @@ default:

writer.writeInt32(0);
if ((0, SaveEntity_1.isSaveEntity)(obj)) {
SaveEntity_1.SaveEntity.SerializeData(writer, obj, buildVersion);
if (isSaveEntity(obj)) {
SaveEntity.SerializeData(writer, obj, buildVersion);
}
else if ((0, SaveComponent_1.isSaveComponent)(obj)) {
SaveComponent_1.SaveComponent.SerializeData(writer, obj, buildVersion);
else if (isSaveComponent(obj)) {
SaveComponent.SerializeData(writer, obj, buildVersion);
}

@@ -72,10 +69,10 @@ writer.writeBinarySizeFromPosition(lenReplacementPosition, lenReplacementPosition + 4);

switch (objectType) {
case SaveEntity_1.SaveEntity.TypeID:
let object = new SaveEntity_1.SaveEntity('', '', '', '');
SaveEntity_1.SaveEntity.ParseHeader(reader, object);
case SaveEntity.TypeID:
let object = new SaveEntity('', '', '', '');
SaveEntity.ParseHeader(reader, object);
objectsList.push(object);
break;
case SaveComponent_1.SaveComponent.TypeID:
let component = new SaveComponent_1.SaveComponent('', '', '', '');
SaveComponent_1.SaveComponent.ParseHeader(reader, component);
case SaveComponent.TypeID:
let component = new SaveComponent('', '', '', '');
SaveComponent.ParseHeader(reader, component);
objectsList.push(component);

@@ -103,7 +100,7 @@ break;

const before = reader.getBufferPosition();
if ((0, SaveEntity_1.isSaveEntity)(obj)) {
SaveEntity_1.SaveEntity.ParseData(obj, binarySize, reader, buildVersion, obj.typePath);
if (isSaveEntity(obj)) {
SaveEntity.ParseData(obj, binarySize, reader, buildVersion, obj.typePath);
}
else if ((0, SaveComponent_1.isSaveComponent)(obj)) {
SaveComponent_1.SaveComponent.ParseData(obj, binarySize, reader, buildVersion, obj.typePath);
else if (isSaveComponent(obj)) {
SaveComponent.ParseData(obj, binarySize, reader, buildVersion, obj.typePath);
}

@@ -148,3 +145,3 @@ const after = reader.getBufferPosition();

for (const collectable of collectables) {
ObjectReference_1.ObjectReference.write(writer, collectable);
ObjectReference.write(writer, collectable);
}

@@ -157,3 +154,3 @@ }

for (let i = 0; i < countSmthing; i++) {
const collectable = ObjectReference_1.ObjectReference.read(reader);
const collectable = ObjectReference.read(reader);
collected.push(collectable);

@@ -168,2 +165,1 @@ }

}
exports.Level = Level;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SatisfactorySave = void 0;
class SatisfactorySave {
export class SatisfactorySave {
constructor(name, header) {

@@ -14,2 +11,1 @@ this.gridHash = [0, 0, 0, 0];

}
exports.SatisfactorySave = SatisfactorySave;

@@ -1,18 +0,12 @@

"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.SaveReader = exports.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE = void 0;
const pako_1 = __importDefault(require("pako"));
const alignment_enum_1 = require("../../byte/alignment.enum");
const byte_reader_class_1 = require("../../byte/byte-reader.class");
const parser_error_1 = require("../../error/parser.error");
const MD5Hash_1 = require("../objects/ue/MD5Hash");
const asynchronous_level_class_1 = require("./asynchronous-level.class");
const level_class_1 = require("./level.class");
exports.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE = 49;
class SaveReader extends byte_reader_class_1.ByteReader {
import Pako from "pako";
import { Alignment } from "../../byte/alignment.enum";
import { ByteReader } from "../../byte/byte-reader.class";
import { CompressionLibraryError, CorruptSaveError, ParserError, UnsupportedVersionError } from "../../error/parser.error";
import { MD5Hash } from '../objects/ue/MD5Hash';
import { AsynchronousLevel } from "./asynchronous-level.class";
import { Level } from "./level.class";
export const DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE = 49;
class SaveReader extends ByteReader {
constructor(fileBuffer, onProgressCallback = () => { }) {
super(fileBuffer, alignment_enum_1.Alignment.LITTLE_ENDIAN);
super(fileBuffer, Alignment.LITTLE_ENDIAN);
this.onProgressCallback = onProgressCallback;

@@ -24,3 +18,3 @@ this.levels = [];

maxUncompressedChunkContentSize: 0,
chunkHeaderSize: exports.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE
chunkHeaderSize: DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE
};

@@ -102,3 +96,3 @@ this.readSaveBodyHash = () => {

if (this.header.saveHeaderType >= 12) {
this.header.consistencyHashBytes = MD5Hash_1.MD5Hash.read(this);
this.header.consistencyHashBytes = MD5Hash.read(this);
}

@@ -111,3 +105,3 @@ if (this.header.saveHeaderType >= 13) {

else {
throw new parser_error_1.UnsupportedVersionError("The save version is too old to support encoding currently. Save in newer game version.");
throw new UnsupportedVersionError("The save version is too old to support encoding currently. Save in newer game version.");
}

@@ -128,9 +122,9 @@ return this.header;

if (this.compressionInfo.packageFileTag <= 0) {
this.compressionInfo.packageFileTag = chunkHeader.getUint32(0, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.compressionInfo.packageFileTag = chunkHeader.getUint32(0, this.alignment === Alignment.LITTLE_ENDIAN);
}
if (this.compressionInfo.maxUncompressedChunkContentSize <= 0) {
this.compressionInfo.maxUncompressedChunkContentSize = chunkHeader.getInt32(8, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.compressionInfo.maxUncompressedChunkContentSize = chunkHeader.getInt32(8, this.alignment === Alignment.LITTLE_ENDIAN);
}
const chunkCompressedLength = chunkHeader.getInt32(33, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
const chunkUncompressedLength = chunkHeader.getInt32(25, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
const chunkCompressedLength = chunkHeader.getInt32(33, this.alignment === Alignment.LITTLE_ENDIAN);
const chunkUncompressedLength = chunkHeader.getInt32(25, this.alignment === Alignment.LITTLE_ENDIAN);
totalUncompressedBodySize += chunkUncompressedLength;

@@ -145,7 +139,7 @@ const currentChunkSize = chunkCompressedLength;

let currentInflatedChunk = null;
currentInflatedChunk = pako_1.default.inflate(currentChunk);
currentInflatedChunk = Pako.inflate(currentChunk);
currentChunks.push(currentInflatedChunk);
}
catch (err) {
throw new parser_error_1.CompressionLibraryError("Failed to inflate compressed save data. " + err);
throw new CompressionLibraryError("Failed to inflate compressed save data. " + err);
}

@@ -165,3 +159,3 @@ }

if (totalUncompressedBodySize !== dataLength + 8) {
throw new parser_error_1.CorruptSaveError(`Possibly corrupt. Indicated size of total save body (${dataLength + 8}) does not match the uncompressed real size of ${totalUncompressedBodySize}.`);
throw new CorruptSaveError(`Possibly corrupt. Indicated size of total save body (${dataLength + 8}) does not match the uncompressed real size of ${totalUncompressedBodySize}.`);
}

@@ -175,9 +169,9 @@ return {

if (!this.header) {
throw new parser_error_1.ParserError('ParserError', 'Header must be set before objects can be read.');
throw new ParserError('ParserError', 'Header must be set before objects can be read.');
}
if (this.header.saveVersion < 29) {
throw new parser_error_1.UnsupportedVersionError('Game Version < U6 is not supported.');
throw new UnsupportedVersionError('Game Version < U6 is not supported.');
}
if (this.header.saveHeaderType < 13) {
throw new parser_error_1.UnsupportedVersionError('Game Version < U8 is not supported in this package version. Consider downgrading to the latest package version supporting it, which is 0.0.34');
throw new UnsupportedVersionError('Game Version < U8 is not supported in this package version. Consider downgrading to the latest package version supporting it, which is 0.0.34');
}

@@ -192,6 +186,6 @@ const levels = [];

}
levels.push(level_class_1.Level.ReadLevel(this, levelSingleName, this.header.buildVersion));
levels.push(Level.ReadLevel(this, levelSingleName, this.header.buildVersion));
}
levels.push(level_class_1.Level.ReadLevel(this, this.header.mapName, this.header.buildVersion));
const trailingStuffToIgnore = level_class_1.Level.ReadCollectablesList(this, 'collectables 6!');
levels.push(Level.ReadLevel(this, this.header.mapName, this.header.buildVersion));
const trailingStuffToIgnore = Level.ReadCollectablesList(this, 'collectables 6!');
this.onProgressCallback(this.getBufferProgress(), 'finished parsing.');

@@ -207,3 +201,3 @@ return levels;

this.onProgressCallback(this.getBufferProgress(), `reading level [${(j + 1)}/${(numSubLevels + 1)}] ${levelName}`);
this.levels[j] = await asynchronous_level_class_1.AsynchronousLevel.StreamReadLevel(this, levelName, this.header.buildVersion, writer);
this.levels[j] = await AsynchronousLevel.StreamReadLevel(this, levelName, this.header.buildVersion, writer);
}

@@ -226,2 +220,2 @@ await writer.endLevels();

};
exports.SaveReader = SaveReader;
export { SaveReader };

@@ -1,18 +0,12 @@

"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.SaveWriter = void 0;
const pako_1 = __importDefault(require("pako"));
const alignment_enum_1 = require("../../byte/alignment.enum");
const byte_writer_class_1 = require("../../byte/byte-writer.class");
const parser_error_1 = require("../../error/parser.error");
const file_types_1 = require("../../file.types");
const MD5Hash_1 = require("../objects/ue/MD5Hash");
const level_class_1 = require("./level.class");
const save_reader_1 = require("./save-reader");
class SaveWriter extends byte_writer_class_1.ByteWriter {
import Pako from "pako";
import { Alignment } from "../../byte/alignment.enum";
import { ByteWriter } from "../../byte/byte-writer.class";
import { CompressionLibraryError, ParserError, UnsupportedVersionError } from "../../error/parser.error";
import { CompressionAlgorithmCode } from "../../file.types";
import { MD5Hash } from '../objects/ue/MD5Hash';
import { Level } from "./level.class";
import { SaveReader } from "./save-reader";
class SaveWriter extends ByteWriter {
constructor() {
super(alignment_enum_1.Alignment.LITTLE_ENDIAN);
super(Alignment.LITTLE_ENDIAN);
}

@@ -27,3 +21,3 @@ static WriteHeader(writer, header) {

writer.writeInt32(header.playDurationSeconds);
writer.writeInt64(BigInt(header.saveDateTime) * 10000n + save_reader_1.SaveReader.EPOCH_TICKS);
writer.writeInt64(BigInt(header.saveDateTime) * 10000n + SaveReader.EPOCH_TICKS);
writer.writeByte(header.sessionVisibility);

@@ -49,3 +43,3 @@ if (header.saveHeaderType >= 7) {

if (header.saveHeaderType >= 12) {
MD5Hash_1.MD5Hash.write(writer, header.consistencyHashBytes);
MD5Hash.write(writer, header.consistencyHashBytes);
}

@@ -58,3 +52,3 @@ if (header.saveHeaderType >= 13) {

else {
throw new parser_error_1.UnsupportedVersionError("The save version is too old to be supported currently.");
throw new UnsupportedVersionError("The save version is too old to be supported currently.");
}

@@ -68,7 +62,7 @@ }

}
level_class_1.Level.WriteLevel(writer, level, buildVersion);
Level.WriteLevel(writer, level, buildVersion);
}
level_class_1.Level.SerializeCollectablesList(writer, save.trailingCollectedObjects ?? []);
Level.SerializeCollectablesList(writer, save.trailingCollectedObjects ?? []);
}
static GenerateCompressedChunksFromData(bufferArray, compressionInfo, onBinaryBeforeCompressing, onChunk, alignment = alignment_enum_1.Alignment.LITTLE_ENDIAN) {
static GenerateCompressedChunksFromData(bufferArray, compressionInfo, onBinaryBeforeCompressing, onChunk, alignment = Alignment.LITTLE_ENDIAN) {
const errors = [];

@@ -79,3 +73,3 @@ const totalUncompressedSize = bufferArray.byteLength;

const miniView = new DataView(saveBody.buffer);
miniView.setInt32(0, totalUncompressedSize, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
miniView.setInt32(0, totalUncompressedSize, alignment === Alignment.LITTLE_ENDIAN);
onBinaryBeforeCompressing(saveBody.buffer);

@@ -89,6 +83,6 @@ let handledByte = 0;

try {
compressedChunk = pako_1.default.deflate(uncompressedChunk);
compressedChunk = Pako.deflate(uncompressedChunk);
}
catch (err) {
throw new parser_error_1.CompressionLibraryError("Could not compress save data. " + err);
throw new CompressionLibraryError("Could not compress save data. " + err);
}

@@ -98,15 +92,15 @@ const chunk = new Uint8Array(compressionInfo.chunkHeaderSize + compressedChunk.byteLength);

const view = new DataView(chunk.buffer);
view.setInt32(0, compressionInfo.packageFileTag, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(4, 0x22222222, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(8, compressionInfo.maxUncompressedChunkContentSize, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(12, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setUint8(16, file_types_1.CompressionAlgorithmCode.ZLIB);
view.setInt32(17, compressedChunk.byteLength, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(21, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(25, uncompressedContentSize, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(29, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(33, compressedChunk.byteLength, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(37, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(41, uncompressedContentSize, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(45, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(0, compressionInfo.packageFileTag, alignment === Alignment.LITTLE_ENDIAN);
view.setInt32(4, 0x22222222, alignment === Alignment.LITTLE_ENDIAN);
view.setInt32(8, compressionInfo.maxUncompressedChunkContentSize, alignment === Alignment.LITTLE_ENDIAN);
view.setInt32(12, 0, alignment === Alignment.LITTLE_ENDIAN);
view.setUint8(16, CompressionAlgorithmCode.ZLIB);
view.setInt32(17, compressedChunk.byteLength, alignment === Alignment.LITTLE_ENDIAN);
view.setInt32(21, 0, alignment === Alignment.LITTLE_ENDIAN);
view.setInt32(25, uncompressedContentSize, alignment === Alignment.LITTLE_ENDIAN);
view.setInt32(29, 0, alignment === Alignment.LITTLE_ENDIAN);
view.setInt32(33, compressedChunk.byteLength, alignment === Alignment.LITTLE_ENDIAN);
view.setInt32(37, 0, alignment === Alignment.LITTLE_ENDIAN);
view.setInt32(41, uncompressedContentSize, alignment === Alignment.LITTLE_ENDIAN);
view.setInt32(45, 0, alignment === Alignment.LITTLE_ENDIAN);
onChunk(chunk);

@@ -123,3 +117,3 @@ chunkSummary.push({

if (posAfterHeader <= 0) {
throw new parser_error_1.ParserError('ParserError', 'Seems like this buffer has no header. Please write the header first before you can generate chunks.');
throw new ParserError('ParserError', 'Seems like this buffer has no header. Please write the header first before you can generate chunks.');
}

@@ -155,2 +149,2 @@ const header = new Uint8Array(this.bufferArray.slice(0, posAfterHeader));

};
exports.SaveWriter = SaveWriter;
export { SaveWriter };

@@ -1,2 +0,1 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
export {};

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SerializeTransform = exports.ParseTransform = exports.SerializeVec2f = exports.ParseVec2f = exports.SerializeVec2 = exports.ParseVec2 = exports.SerializeVec3f = exports.ParseVec3f = exports.SerializeVec3 = exports.ParseVec3 = exports.norm = exports.mult = exports.length = exports.add = exports.sub = exports.SerializeVec4f = exports.ParseVec4f = exports.SerializeVec4 = exports.ParseVec4 = exports.ParseCol4BGRA = exports.SerializeCol4BGRA = exports.ParseCol4RGBA = exports.SerializeCol4RGBA = void 0;
const SerializeCol4RGBA = (writer, value) => {
export const SerializeCol4RGBA = (writer, value) => {
writer.writeFloat(value.r);

@@ -10,4 +7,3 @@ writer.writeFloat(value.g);

};
exports.SerializeCol4RGBA = SerializeCol4RGBA;
const ParseCol4RGBA = (reader) => {
export const ParseCol4RGBA = (reader) => {
return {

@@ -20,4 +16,3 @@ r: reader.readFloat32(),

};
exports.ParseCol4RGBA = ParseCol4RGBA;
const SerializeCol4BGRA = (writer, value) => {
export const SerializeCol4BGRA = (writer, value) => {
writer.writeByte(value.b);

@@ -28,4 +23,3 @@ writer.writeByte(value.g);

};
exports.SerializeCol4BGRA = SerializeCol4BGRA;
const ParseCol4BGRA = (reader) => {
export const ParseCol4BGRA = (reader) => {
return {

@@ -38,62 +32,48 @@ b: reader.readByte(),

};
exports.ParseCol4BGRA = ParseCol4BGRA;
const ParseVec4 = (reader) => {
export const ParseVec4 = (reader) => {
return {
...((0, exports.ParseVec3)(reader)),
...(ParseVec3(reader)),
w: reader.readDouble()
};
};
exports.ParseVec4 = ParseVec4;
const SerializeVec4 = (writer, vec) => {
(0, exports.SerializeVec3)(writer, vec);
export const SerializeVec4 = (writer, vec) => {
SerializeVec3(writer, vec);
writer.writeDouble(vec.w);
};
exports.SerializeVec4 = SerializeVec4;
const ParseVec4f = (reader) => {
export const ParseVec4f = (reader) => {
return {
...((0, exports.ParseVec3f)(reader)),
...(ParseVec3f(reader)),
w: reader.readFloat32()
};
};
exports.ParseVec4f = ParseVec4f;
const SerializeVec4f = (writer, vec) => {
(0, exports.SerializeVec3f)(writer, vec);
export const SerializeVec4f = (writer, vec) => {
SerializeVec3f(writer, vec);
writer.writeFloat(vec.w);
};
exports.SerializeVec4f = SerializeVec4f;
const sub = (other, vec) => ({ x: other.x - vec.x, y: other.y - vec.y, z: other.z - vec.z });
exports.sub = sub;
const add = (vec, other) => ({ x: vec.x + other.x, y: vec.y + other.y, z: vec.z + other.z });
exports.add = add;
const length = (vec) => Math.sqrt(vec.x ** 2 + vec.y ** 2 + vec.z ** 2);
exports.length = length;
const mult = (vec, scale) => ({ x: vec.x * scale, y: vec.y * scale, z: vec.z * scale });
exports.mult = mult;
const norm = (vec) => (0, exports.mult)(vec, 1. / (0, exports.length)(vec));
exports.norm = norm;
const ParseVec3 = (reader) => {
export const sub = (other, vec) => ({ x: other.x - vec.x, y: other.y - vec.y, z: other.z - vec.z });
export const add = (vec, other) => ({ x: vec.x + other.x, y: vec.y + other.y, z: vec.z + other.z });
export const length = (vec) => Math.sqrt(vec.x ** 2 + vec.y ** 2 + vec.z ** 2);
export const mult = (vec, scale) => ({ x: vec.x * scale, y: vec.y * scale, z: vec.z * scale });
export const norm = (vec) => mult(vec, 1. / length(vec));
export const ParseVec3 = (reader) => {
return {
...((0, exports.ParseVec2)(reader)),
...(ParseVec2(reader)),
z: reader.readDouble()
};
};
exports.ParseVec3 = ParseVec3;
const SerializeVec3 = (writer, vec) => {
(0, exports.SerializeVec2)(writer, vec);
export const SerializeVec3 = (writer, vec) => {
SerializeVec2(writer, vec);
writer.writeDouble(vec.z);
};
exports.SerializeVec3 = SerializeVec3;
const ParseVec3f = (reader) => {
export const ParseVec3f = (reader) => {
return {
...((0, exports.ParseVec2f)(reader)),
...(ParseVec2f(reader)),
z: reader.readFloat32()
};
};
exports.ParseVec3f = ParseVec3f;
const SerializeVec3f = (writer, vec) => {
(0, exports.SerializeVec2f)(writer, vec);
export const SerializeVec3f = (writer, vec) => {
SerializeVec2f(writer, vec);
writer.writeFloat(vec.z);
};
exports.SerializeVec3f = SerializeVec3f;
const ParseVec2 = (reader) => {
export const ParseVec2 = (reader) => {
return {

@@ -104,9 +84,7 @@ x: reader.readDouble(),

};
exports.ParseVec2 = ParseVec2;
const SerializeVec2 = (writer, vec) => {
export const SerializeVec2 = (writer, vec) => {
writer.writeDouble(vec.x);
writer.writeDouble(vec.y);
};
exports.SerializeVec2 = SerializeVec2;
const ParseVec2f = (reader) => {
export const ParseVec2f = (reader) => {
return {

@@ -117,21 +95,17 @@ x: reader.readFloat32(),

};
exports.ParseVec2f = ParseVec2f;
const SerializeVec2f = (writer, vec) => {
export const SerializeVec2f = (writer, vec) => {
writer.writeFloat(vec.x);
writer.writeFloat(vec.y);
};
exports.SerializeVec2f = SerializeVec2f;
const ParseTransform = (reader) => {
export const ParseTransform = (reader) => {
return {
rotation: (0, exports.ParseVec4f)(reader),
translation: (0, exports.ParseVec3f)(reader),
scale3d: (0, exports.ParseVec3f)(reader),
rotation: ParseVec4f(reader),
translation: ParseVec3f(reader),
scale3d: ParseVec3f(reader),
};
};
exports.ParseTransform = ParseTransform;
const SerializeTransform = (writer, transform) => {
(0, exports.SerializeVec4f)(writer, transform.rotation);
(0, exports.SerializeVec3f)(writer, transform.translation);
(0, exports.SerializeVec3f)(writer, transform.scale3d);
export const SerializeTransform = (writer, transform) => {
SerializeVec4f(writer, transform.rotation);
SerializeVec3f(writer, transform.translation);
SerializeVec3f(writer, transform.scale3d);
};
exports.SerializeTransform = SerializeTransform;

@@ -1,9 +0,6 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ByteStreamReader = void 0;
const alignment_enum_1 = require("../byte/alignment.enum");
const byte_writer_class_1 = require("../byte/byte-writer.class");
const parser_error_1 = require("../error/parser.error");
class ByteStreamReader {
constructor(reader, onCloseCallback, timeout = 30000, maxBufferThreshold = 100 * 1000 * 1000, alignment = alignment_enum_1.Alignment.LITTLE_ENDIAN) {
import { Alignment } from "../byte/alignment.enum";
import { ByteWriter } from "../byte/byte-writer.class";
import { CorruptSaveError, TimeoutError, UnimplementedError } from "../error/parser.error";
export class ByteStreamReader {
constructor(reader, onCloseCallback, timeout = 30000, maxBufferThreshold = 100 * 1000 * 1000, alignment = Alignment.LITTLE_ENDIAN) {
this.reader = reader;

@@ -24,3 +21,3 @@ this.onCloseCallback = onCloseCallback;

const trailingPos = Math.max(0, this.currentByte - this.trailingBufferSize);
this.operatingStreamBuffer = new Uint8Array(byte_writer_class_1.ByteWriter.AppendBuffer(this.operatingStreamBuffer.slice(trailingPos), value));
this.operatingStreamBuffer = new Uint8Array(ByteWriter.AppendBuffer(this.operatingStreamBuffer.slice(trailingPos), value));
this.operatingDataView = new DataView(this.operatingStreamBuffer.buffer);

@@ -83,3 +80,3 @@ this.currentByte = Math.min(this.currentByte, this.trailingBufferSize);

if (this.inputStreamIsDone) {
throw new parser_error_1.CorruptSaveError('Input Stream has finished before needed data was received.');
throw new CorruptSaveError('Input Stream has finished before needed data was received.');
}

@@ -114,3 +111,3 @@ if (this.paused) {

if (!this.inputStreamIsDone) {
reject(new parser_error_1.TimeoutError(`Timed out before ${this.neededNextAmountOfBytes} bytes were available to read next.`));
reject(new TimeoutError(`Timed out before ${this.neededNextAmountOfBytes} bytes were available to read next.`));
}

@@ -155,3 +152,3 @@ return resolve();

this.totalNumberOfBytesRead += 2;
let data = this.operatingDataView.getInt16(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.operatingDataView.getInt16(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 2;

@@ -162,3 +159,3 @@ return data;

this.totalNumberOfBytesRead += 2;
let data = this.operatingDataView.getUint16(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.operatingDataView.getUint16(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 2;

@@ -169,3 +166,3 @@ return data;

this.totalNumberOfBytesRead += 4;
let data = this.operatingDataView.getInt32(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.operatingDataView.getInt32(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 4;

@@ -176,3 +173,3 @@ return data;

this.totalNumberOfBytesRead += 4;
let data = this.operatingDataView.getUint32(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.operatingDataView.getUint32(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 4;

@@ -183,3 +180,3 @@ return data;

this.totalNumberOfBytesRead += 8;
let data = this.operatingDataView.getBigInt64(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.operatingDataView.getBigInt64(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 8;

@@ -193,3 +190,3 @@ return data;

this.totalNumberOfBytesRead += 8;
let data = this.operatingDataView.getBigUint64(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.operatingDataView.getBigUint64(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 8;

@@ -200,3 +197,3 @@ return data;

this.totalNumberOfBytesRead += 4;
let data = this.operatingDataView.getFloat32(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.operatingDataView.getFloat32(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 4;

@@ -207,3 +204,3 @@ return data;

this.totalNumberOfBytesRead += 8;
let data = this.operatingDataView.getFloat64(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.operatingDataView.getFloat64(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 8;

@@ -243,3 +240,3 @@ return data;

getBufferProgress() {
throw new parser_error_1.UnimplementedError();
throw new UnimplementedError();
}

@@ -253,2 +250,1 @@ getBufferLength() {

}
exports.ByteStreamReader = ByteStreamReader;

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.JsonStreamStateWriter = void 0;
const json_stream_writable_1 = require("./json-stream-writable");
class JsonStreamStateWriter extends json_stream_writable_1.JsonStreamWritable {
import { JsonStreamWritable } from "./json-stream-writable";
export class JsonStreamStateWriter extends JsonStreamWritable {
constructor(pushWritable, startState) {

@@ -18,2 +15,1 @@ super(pushWritable);

}
exports.JsonStreamStateWriter = JsonStreamStateWriter;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.JsonStreamWritable = void 0;
class JsonStreamWritable {
export class JsonStreamWritable {
constructor(pushWritable) {

@@ -83,2 +80,1 @@ this.pushWritable = pushWritable;

}
exports.JsonStreamWritable = JsonStreamWritable;

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.JsonStreamWriter = void 0;
const json_stream_state_writer_1 = require("./json-stream-state-writer");
class JsonStreamWriter extends json_stream_state_writer_1.JsonStreamStateWriter {
import { JsonStreamStateWriter } from "./json-stream-state-writer";
export class JsonStreamWriter extends JsonStreamStateWriter {
constructor(writer) {

@@ -127,2 +124,1 @@ super(async (value) => await writer.write(value), 'BEFORE_START');

}
exports.JsonStreamWriter = JsonStreamWriter;

@@ -1,2 +0,2 @@

import { ReadableStream } from "stream/web";
import { ReadableStream } from 'web-streams-polyfill';
export declare class ReadableStreamParser {

@@ -3,0 +3,0 @@ private static CreateReadableStreamForParsingSave;

@@ -1,11 +0,8 @@

"use strict";
var _a;
Object.defineProperty(exports, "__esModule", { value: true });
exports.ReadableStreamParser = void 0;
const web_1 = require("stream/web");
const satisfactory_save_1 = require("../../satisfactory/save/satisfactory-save");
const save_reader_1 = require("../../satisfactory/save/save-reader");
const SaveComponent_1 = require("../../satisfactory/objects/SaveComponent");
const SaveEntity_1 = require("../../satisfactory/objects/SaveEntity");
const level_class_1 = require("../../satisfactory/save/level.class");
import { SatisfactorySave } from "../../satisfactory/save/satisfactory-save";
import { SaveReader } from "../../satisfactory/save/save-reader";
import { ReadableStream } from 'web-streams-polyfill';
import { SaveComponent, isSaveComponent } from "../../satisfactory/objects/SaveComponent";
import { SaveEntity, isSaveEntity } from "../../satisfactory/objects/SaveEntity";
import { Level } from "../../satisfactory/save/level.class";
const DEFAULT_BYTE_HIGHWATERMARK = 1024 * 1024 * 200;

@@ -100,3 +97,3 @@ const createStringLengthQueuingStrategy = (highWaterMark = DEFAULT_BYTE_HIGHWATERMARK / 4) => ({

await write('], "collectables": [', false);
const collectables = level_class_1.Level.ReadCollectablesList(reader);
const collectables = Level.ReadCollectablesList(reader);
await write(`${collectables.map(obj => JSON.stringify(obj)).join(', ')}`, true);

@@ -111,3 +108,3 @@ await write(']', false);

let ourController = null;
const stream = new web_1.ReadableStream({
const stream = new ReadableStream({
start: (controller) => {

@@ -153,5 +150,5 @@ ourController = controller;

const startStreaming = async () => {
const reader = new save_reader_1.SaveReader(bytes.buffer, onProgress);
const reader = new SaveReader(bytes.buffer, onProgress);
const header = reader.readHeader();
const save = new satisfactory_save_1.SatisfactorySave(name, header);
const save = new SatisfactorySave(name, header);
const inflateResult = reader.inflateChunks();

@@ -178,9 +175,9 @@ onDecompressedSaveBody(reader.getBuffer());

switch (objectType) {
case SaveEntity_1.SaveEntity.TypeID:
obj = new SaveEntity_1.SaveEntity('', '', '', '');
SaveEntity_1.SaveEntity.ParseHeader(reader, obj);
case SaveEntity.TypeID:
obj = new SaveEntity('', '', '', '');
SaveEntity.ParseHeader(reader, obj);
break;
case SaveComponent_1.SaveComponent.TypeID:
obj = new SaveComponent_1.SaveComponent('', '', '', '');
SaveComponent_1.SaveComponent.ParseHeader(reader, obj);
case SaveComponent.TypeID:
obj = new SaveComponent('', '', '', '');
SaveComponent.ParseHeader(reader, obj);
break;

@@ -200,10 +197,10 @@ default:

const before = reader.getBufferPosition();
if ((0, SaveEntity_1.isSaveEntity)(objects[i])) {
SaveEntity_1.SaveEntity.ParseData(objects[i], binarySize, reader, buildVersion, objects[i].typePath);
if (isSaveEntity(objects[i])) {
SaveEntity.ParseData(objects[i], binarySize, reader, buildVersion, objects[i].typePath);
}
else if ((0, SaveComponent_1.isSaveComponent)(objects[i])) {
SaveComponent_1.SaveComponent.ParseData(objects[i], binarySize, reader, buildVersion, objects[i].typePath);
else if (isSaveComponent(objects[i])) {
SaveComponent.ParseData(objects[i], binarySize, reader, buildVersion, objects[i].typePath);
}
}
};
exports.ReadableStreamParser = ReadableStreamParser;
export { ReadableStreamParser };

@@ -1,13 +0,10 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.StreamParser = void 0;
const __1 = require("../../..");
const SaveComponent_1 = require("../../satisfactory/objects/SaveComponent");
const SaveEntity_1 = require("../../satisfactory/objects/SaveEntity");
import { Level, SatisfactorySave, SaveComponent, SaveEntity, SaveReader } from '../../..';
import { isSaveComponent } from '../../satisfactory/objects/SaveComponent';
import { isSaveEntity } from '../../satisfactory/objects/SaveEntity';
class StreamParser {
static async ParseSaveFileToJsonStream(name, bytes, outputJson, onDecompressedSaveBody = () => { }, onProgress = () => { }) {
const reader = new __1.SaveReader(bytes.buffer, onProgress);
const reader = new SaveReader(bytes.buffer, onProgress);
const writer = outputJson.getWriter();
const header = reader.readHeader();
const save = new __1.SatisfactorySave(name, header);
const save = new SatisfactorySave(name, header);
const inflateResult = reader.inflateChunks();

@@ -66,3 +63,3 @@ onDecompressedSaveBody(reader.getBuffer());

await writer.write('], "collectables": [');
const collectables = __1.Level.ReadCollectablesList(reader);
const collectables = Level.ReadCollectablesList(reader);
await writer.write(`${collectables.map(obj => JSON.stringify(obj)).join(', ')}`);

@@ -81,9 +78,9 @@ await writer.write(']');

switch (objectType) {
case __1.SaveEntity.TypeID:
obj = new __1.SaveEntity('', '', '', '');
__1.SaveEntity.ParseHeader(reader, obj);
case SaveEntity.TypeID:
obj = new SaveEntity('', '', '', '');
SaveEntity.ParseHeader(reader, obj);
break;
case __1.SaveComponent.TypeID:
obj = new __1.SaveComponent('', '', '', '');
__1.SaveComponent.ParseHeader(reader, obj);
case SaveComponent.TypeID:
obj = new SaveComponent('', '', '', '');
SaveComponent.ParseHeader(reader, obj);
break;

@@ -103,10 +100,10 @@ default:

const before = reader.getBufferPosition();
if ((0, SaveEntity_1.isSaveEntity)(objects[i])) {
__1.SaveEntity.ParseData(objects[i], binarySize, reader, buildVersion, objects[i].typePath);
if (isSaveEntity(objects[i])) {
SaveEntity.ParseData(objects[i], binarySize, reader, buildVersion, objects[i].typePath);
}
else if ((0, SaveComponent_1.isSaveComponent)(objects[i])) {
__1.SaveComponent.ParseData(objects[i], binarySize, reader, buildVersion, objects[i].typePath);
else if (isSaveComponent(objects[i])) {
SaveComponent.ParseData(objects[i], binarySize, reader, buildVersion, objects[i].typePath);
}
}
};
exports.StreamParser = StreamParser;
export { StreamParser };

@@ -1,8 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SaveStreamJsonStringifier = void 0;
const save_stream_writer_class_1 = require("./save-stream-writer.class");
class SaveStreamJsonStringifier {
import { SaveStreamWriter } from "./save-stream-writer.class";
export class SaveStreamJsonStringifier {
static async StreamStringifySave(save, output) {
const writer = new save_stream_writer_class_1.SaveStreamWriter(output.getWriter());
const writer = new SaveStreamWriter(output.getWriter());
await writer.beginSave();

@@ -35,2 +32,1 @@ await writer.writeHeader(save.header);

}
exports.SaveStreamJsonStringifier = SaveStreamJsonStringifier;

@@ -1,15 +0,9 @@

"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.SaveStreamReader = void 0;
const pako_1 = __importDefault(require("pako"));
const __1 = require("../..");
const alignment_enum_1 = require("../byte/alignment.enum");
const byte_stream_reader_class_1 = require("./byte-stream-reader.class");
const stream_level_class_1 = require("./stream-level.class");
class SaveStreamReader extends byte_stream_reader_class_1.ByteStreamReader {
import Pako from "pako";
import { CompressionLibraryError, CorruptSaveError, UnsupportedVersionError } from "../..";
import { Alignment } from "../byte/alignment.enum";
import { ByteStreamReader } from "./byte-stream-reader.class";
import { StreamLevel } from "./stream-level.class";
class SaveStreamReader extends ByteStreamReader {
constructor(reader, maxBufferThreshold, onCloseCallback = async () => { }) {
super(reader, onCloseCallback, 30000, maxBufferThreshold, alignment_enum_1.Alignment.LITTLE_ENDIAN);
super(reader, onCloseCallback, 30000, maxBufferThreshold, Alignment.LITTLE_ENDIAN);
this.compressionInfo = {

@@ -64,3 +58,3 @@ packageFileTag: 0,

else {
throw new __1.UnsupportedVersionError("The save version is too old to support encoding currently. Save in newer game version.");
throw new UnsupportedVersionError("The save version is too old to support encoding currently. Save in newer game version.");
}

@@ -75,9 +69,9 @@ return header;

if (this.compressionInfo.packageFileTag <= 0) {
this.compressionInfo.packageFileTag = this.operatingDataView.getInt32(0, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.compressionInfo.packageFileTag = this.operatingDataView.getInt32(0, this.alignment === Alignment.LITTLE_ENDIAN);
}
if (this.compressionInfo.maxUncompressedChunkContentSize <= 0) {
this.compressionInfo.maxUncompressedChunkContentSize = this.operatingDataView.getInt32(8, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
this.compressionInfo.maxUncompressedChunkContentSize = this.operatingDataView.getInt32(8, this.alignment === Alignment.LITTLE_ENDIAN);
}
const chunkCompressedLength = this.operatingDataView.getInt32(32, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
const chunkUncompressedLength = this.operatingDataView.getInt32(40, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
const chunkCompressedLength = this.operatingDataView.getInt32(32, this.alignment === Alignment.LITTLE_ENDIAN);
const chunkUncompressedLength = this.operatingDataView.getInt32(40, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte = this.compressionInfo.chunkHeaderSize;

@@ -89,5 +83,5 @@ await this.allocate(chunkCompressedLength);

try {
const uncompressedChunk = pako_1.default.inflate(currentChunk);
const uncompressedChunk = Pako.inflate(currentChunk);
if (uncompressedChunk.byteLength !== chunkUncompressedLength) {
throw new __1.CorruptSaveError('indicated save body chunk size does not match the inflated result. Save is possibly corrupt.');
throw new CorruptSaveError('indicated save body chunk size does not match the inflated result. Save is possibly corrupt.');
}

@@ -100,3 +94,3 @@ if (this.debug) {

catch (err) {
throw new __1.CompressionLibraryError("Failed to inflate compressed save data. " + err);
throw new CompressionLibraryError("Failed to inflate compressed save data. " + err);
}

@@ -109,3 +103,3 @@ }

if (header.saveVersion < 29) {
throw new __1.UnsupportedVersionError('Support for < U6 is not yet implemented.');
throw new UnsupportedVersionError('Support for < U6 is not yet implemented.');
}

@@ -119,3 +113,3 @@ await this.allocate(100);

}
await stream_level_class_1.StreamLevel.ReadLevelAsync(this, writer, levelName, header.buildVersion);
await StreamLevel.ReadLevelAsync(this, writer, levelName, header.buildVersion);
}

@@ -127,2 +121,2 @@ return;

SaveStreamReader.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE = 48;
exports.SaveStreamReader = SaveStreamReader;
export { SaveStreamReader };

@@ -1,4 +0,1 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SaveStreamWriter = void 0;
class ModeStateTracker {

@@ -17,3 +14,3 @@ constructor(mode) {

}
class SaveStreamWriter {
export class SaveStreamWriter {
constructor(writer) {

@@ -87,2 +84,1 @@ this.writer = writer;

}
exports.SaveStreamWriter = SaveStreamWriter;

@@ -1,8 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.StreamLevel = void 0;
const __1 = require("../..");
const SaveComponent_1 = require("../satisfactory/objects/SaveComponent");
const SaveEntity_1 = require("../satisfactory/objects/SaveEntity");
class StreamLevel {
import { Level, ParseTransform } from "../..";
import { SaveComponent } from "../satisfactory/objects/SaveComponent";
import { SaveEntity } from "../satisfactory/objects/SaveEntity";
export class StreamLevel {
constructor(name) {

@@ -15,3 +12,3 @@ this.name = name;

const objectHeaders = StreamLevel.ReadObjectHeaders(reader);
const collectables = __1.Level.ReadCollectablesList(reader);
const collectables = Level.ReadCollectablesList(reader);
await writer.openLevel(levelName);

@@ -22,3 +19,3 @@ await StreamLevel.StreamObjectContentsAsync(reader, writer, objectHeaders, buildVersion);

await writer.endLevel();
__1.Level.ReadCollectablesList(reader);
Level.ReadCollectablesList(reader);
return;

@@ -38,3 +35,3 @@ }

needTransform: reader.readInt32() == 1,
transform: (0, __1.ParseTransform)(reader),
transform: ParseTransform(reader),
wasPlacedInLevel: reader.readInt32() == 1

@@ -84,9 +81,9 @@ });

if (!isComponent) {
const entity = new SaveEntity_1.SaveEntity(obj.typePath, obj.rootObject, obj.instanceName, '', obj.needTransform);
SaveEntity_1.SaveEntity.ParseData(entity, len, reader, buildVersion, obj.typePath);
const entity = new SaveEntity(obj.typePath, obj.rootObject, obj.instanceName, '', obj.needTransform);
SaveEntity.ParseData(entity, len, reader, buildVersion, obj.typePath);
bufferedObjects.push(entity);
}
else if (isComponent) {
const component = new SaveComponent_1.SaveComponent(obj.typePath, obj.rootObject, obj.instanceName, obj.parentEntityName);
SaveComponent_1.SaveComponent.ParseData(component, len, reader, buildVersion, obj.typePath);
const component = new SaveComponent(obj.typePath, obj.rootObject, obj.instanceName, obj.parentEntityName);
SaveComponent.ParseData(component, len, reader, buildVersion, obj.typePath);
bufferedObjects.push(component);

@@ -107,2 +104,1 @@ }

}
exports.StreamLevel = StreamLevel;

@@ -1,9 +0,6 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.StreamParserReader = void 0;
const alignment_enum_1 = require("../byte/alignment.enum");
class StreamParserReader {
import { Alignment } from "../byte/alignment.enum";
export class StreamParserReader {
constructor(minBufferSize) {
this.minBufferSize = minBufferSize;
this.alignment = alignment_enum_1.Alignment.LITTLE_ENDIAN;
this.alignment = Alignment.LITTLE_ENDIAN;
this.getAmountLeftToRead = () => {

@@ -157,3 +154,3 @@ return this.view.byteLength - this.currentByte;

readInt16() {
let data = this.view.getInt16(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.view.getInt16(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 2;

@@ -163,3 +160,3 @@ return data;

readUint16() {
let data = this.view.getUint16(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.view.getUint16(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 2;

@@ -169,3 +166,3 @@ return data;

readInt32() {
let data = this.view.getInt32(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.view.getInt32(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 4;

@@ -175,3 +172,3 @@ return data;

readUint32() {
let data = this.view.getUint32(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.view.getUint32(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 4;

@@ -181,3 +178,3 @@ return data;

readLong() {
let data = this.view.getBigInt64(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.view.getBigInt64(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 8;

@@ -190,3 +187,3 @@ return data;

readUint64() {
let data = this.view.getBigUint64(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.view.getBigUint64(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 8;

@@ -196,3 +193,3 @@ return data;

readFloat32() {
let data = this.view.getFloat32(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.view.getFloat32(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 4;

@@ -202,3 +199,3 @@ return data;

readDouble() {
let data = this.view.getFloat64(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
let data = this.view.getFloat64(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN);
this.currentByte += 8;

@@ -233,2 +230,1 @@ return data;

}
exports.StreamParserReader = StreamParserReader;
{
"name": "@etothepii/satisfactory-file-parser",
"author": "etothepii",
"version": "0.1.25",
"version": "0.1.26",
"description": "A file parser for satisfactory files. Includes save files and blueprint files.",

@@ -48,4 +48,6 @@ "types": "./build/index.d.ts",

"dependencies": {
"pako": "^2.1.0"
"pako": "^2.1.0",
"stream-browserify": "^3.0.0",
"web-streams-polyfill": "^4.0.0"
}
}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc