@etothepii/satisfactory-file-parser
Advanced tools
Comparing version 0.0.3 to 0.0.4
import { ByteReader } from "./byte/byte-reader.class"; | ||
import { BlueprintConfig, BlueprintHeader } from "./satisfactory/blueprint/blueprint"; | ||
import { SaveComponent } from "./satisfactory/objects/SaveComponent"; | ||
import { SaveEntity } from "./satisfactory/objects/SaveEntity"; | ||
import { SaveComponent } from "./satisfactory/objects/SaveComponent"; | ||
import { ChunkCompressionInfo } from "./satisfactory/satisfactory-save.class"; | ||
import { BlueprintConfig, BlueprintHeader } from "./satisfactory/blueprint/blueprint.interface"; | ||
export declare class BlueprintReader extends ByteReader { | ||
@@ -7,0 +7,0 @@ compressionInfo: ChunkCompressionInfo; |
@@ -1,16 +0,10 @@ | ||
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.BlueprintConfigReader = exports.BlueprintReader = void 0; | ||
const alignment_enum_1 = require("./byte/alignment.enum"); | ||
const byte_reader_class_1 = require("./byte/byte-reader.class"); | ||
const util_types_1 = require("./satisfactory/structs/util.types"); | ||
const pako_1 = __importDefault(require("pako")); | ||
const level_class_1 = require("./satisfactory/level.class"); | ||
const parser_error_1 = require("./error/parser.error"); | ||
class BlueprintReader extends byte_reader_class_1.ByteReader { | ||
import Pako from "pako"; | ||
import { Alignment } from "./byte/alignment.enum"; | ||
import { ByteReader } from "./byte/byte-reader.class"; | ||
import { ParserError } from "./error/parser.error"; | ||
import { Level } from "./satisfactory/level.class"; | ||
import { ParseCol4RGBA, ParseVec3 } from "./satisfactory/structs/util.types"; | ||
export class BlueprintReader extends ByteReader { | ||
constructor(bluePrintBuffer) { | ||
super(bluePrintBuffer, alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
super(bluePrintBuffer, Alignment.LITTLE_ENDIAN); | ||
this.compressionInfo = { | ||
@@ -24,3 +18,3 @@ packageFileTag: 0, | ||
const unk = reader.readBytes(3 * 4); // 02 00 00 00, 24 00 00 00, 7F 3B 03 00 - always | ||
const positionThingOrWhat = (0, util_types_1.ParseVec3)(reader); // 04 00 00 00, 04 00 00 00, 04 00 00 00 - always | ||
const positionThingOrWhat = ParseVec3(reader); // 04 00 00 00, 04 00 00 00, 04 00 00 00 - always | ||
// list of item costs. | ||
@@ -63,9 +57,9 @@ let itemTypeCount = reader.readInt32(); | ||
if (this.compressionInfo.packageFileTag <= 0) { | ||
this.compressionInfo.packageFileTag = chunkHeader.getUint32(0, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
this.compressionInfo.packageFileTag = chunkHeader.getUint32(0, this.alignment === Alignment.LITTLE_ENDIAN); | ||
} | ||
if (this.compressionInfo.maxChunkContentSize <= 0) { | ||
this.compressionInfo.maxChunkContentSize = chunkHeader.getInt32(8, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); //00 00 02 00 = 131072 | ||
this.compressionInfo.maxChunkContentSize = chunkHeader.getInt32(8, this.alignment === Alignment.LITTLE_ENDIAN); //00 00 02 00 = 131072 | ||
} | ||
const chunkCompressedLength = chunkHeader.getInt32(32, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
const chunkUncompressedLength = chunkHeader.getInt32(40, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
const chunkCompressedLength = chunkHeader.getInt32(32, this.alignment === Alignment.LITTLE_ENDIAN); | ||
const chunkUncompressedLength = chunkHeader.getInt32(40, this.alignment === Alignment.LITTLE_ENDIAN); | ||
totalUncompressedBodySize += chunkUncompressedLength; | ||
@@ -82,7 +76,7 @@ const currentChunkSize = chunkCompressedLength; | ||
let currentInflatedChunk = null; | ||
currentInflatedChunk = pako_1.default.inflate(currentChunk); | ||
currentInflatedChunk = Pako.inflate(currentChunk); | ||
currentChunks.push(currentInflatedChunk); | ||
} | ||
catch (err) { | ||
throw new parser_error_1.ParserError('ParserError', 'An error occurred while calling pako inflate.' + err); | ||
throw new ParserError('ParserError', 'An error occurred while calling pako inflate.' + err); | ||
} | ||
@@ -114,12 +108,11 @@ } | ||
let objects = []; | ||
level_class_1.Level.ReadObjectHeaders(reader, objects, () => { }); | ||
Level.ReadObjectHeaders(reader, objects, () => { }); | ||
// objects contents | ||
level_class_1.Level.ReadObjectContents(reader, objects, 0, () => { }); | ||
Level.ReadObjectContents(reader, objects, 0, () => { }); | ||
return objects; | ||
} | ||
} | ||
exports.BlueprintReader = BlueprintReader; | ||
class BlueprintConfigReader extends byte_reader_class_1.ByteReader { | ||
export class BlueprintConfigReader extends ByteReader { | ||
constructor(bluePrintConfigBuffer) { | ||
super(bluePrintConfigBuffer, alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
super(bluePrintConfigBuffer, Alignment.LITTLE_ENDIAN); | ||
this.bluePrintConfigBuffer = bluePrintConfigBuffer; | ||
@@ -132,3 +125,3 @@ this.parse = () => BlueprintConfigReader.ParseConfig(this); | ||
const unk3 = reader.readInt32(); // iconID. default is 0x0E03 = 782 | ||
const colorMaybe = (0, util_types_1.ParseCol4RGBA)(reader); // LinearColor, which is 4x float32 as RGBA | ||
const colorMaybe = ParseCol4RGBA(reader); // LinearColor, which is 4x float32 as RGBA | ||
return { | ||
@@ -141,3 +134,1 @@ description, | ||
} | ||
exports.BlueprintConfigReader = BlueprintConfigReader; | ||
//# sourceMappingURL=blueprint-reader.js.map |
@@ -1,6 +0,6 @@ | ||
import { ByteWriter } from "./byte/byte-writer.class"; | ||
import { BlueprintHeader, BlueprintConfig } from "../parser/satisfactory/blueprint/blueprint.interface"; | ||
import { SaveComponent } from "../parser/satisfactory/objects/SaveComponent"; | ||
import { SaveEntity } from "../parser/satisfactory/objects/SaveEntity"; | ||
import { ChunkCompressionInfo } from "../parser/satisfactory/satisfactory-save.class"; | ||
import { ByteWriter } from "./byte/byte-writer.class"; | ||
import { BlueprintConfig, BlueprintHeader } from "./satisfactory/blueprint/blueprint"; | ||
export declare class BlueprintWriter extends ByteWriter { | ||
@@ -7,0 +7,0 @@ constructor(); |
@@ -1,13 +0,10 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.BlueprintConfigWriter = exports.BlueprintWriter = void 0; | ||
const alignment_enum_1 = require("./byte/alignment.enum"); | ||
const byte_writer_class_1 = require("./byte/byte-writer.class"); | ||
const level_class_1 = require("../parser/satisfactory/level.class"); | ||
const util_types_1 = require("../parser/satisfactory/structs/util.types"); | ||
const save_writer_class_1 = require("../parser/save-writer.class"); | ||
const parser_error_1 = require("./error/parser.error"); | ||
class BlueprintWriter extends byte_writer_class_1.ByteWriter { | ||
import { Level } from "../parser/satisfactory/level.class"; | ||
import { SerializeCol4RGBA } from "../parser/satisfactory/structs/util.types"; | ||
import { Alignment } from "./byte/alignment.enum"; | ||
import { ByteWriter } from "./byte/byte-writer.class"; | ||
import { ParserError } from "./error/parser.error"; | ||
import { SaveWriter } from "./save-writer"; | ||
export class BlueprintWriter extends ByteWriter { | ||
constructor() { | ||
super(alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
super(Alignment.LITTLE_ENDIAN); | ||
} | ||
@@ -38,3 +35,3 @@ static SerializeHeader(writer, header) { | ||
if (posAfterHeader <= 0) { | ||
throw new parser_error_1.ParserError('ParserError', 'seems like this buffer has no header. Please write the header first before you can generate chunks.'); | ||
throw new ParserError('ParserError', 'seems like this buffer has no header. Please write the header first before you can generate chunks.'); | ||
} | ||
@@ -46,3 +43,3 @@ // send plain header first. | ||
this.bufferArray = this.bufferArray.slice(posAfterHeader); | ||
save_writer_class_1.SaveWriter.GenerateCompressedChunksFromData(this.bufferArray, compressionInfo, onBinaryBeforeCompressing, onChunk, onFinish, this.alignment); | ||
SaveWriter.GenerateCompressedChunksFromData(this.bufferArray, compressionInfo, onBinaryBeforeCompressing, onChunk, onFinish, this.alignment); | ||
} | ||
@@ -53,12 +50,11 @@ static SerializeObjects(writer, objects) { | ||
writer.writeInt32(0); | ||
level_class_1.Level.SerializeObjectHeaders(writer, objects); | ||
Level.SerializeObjectHeaders(writer, objects); | ||
writer.writeBinarySizeFromPosition(headersLenIndicator, headersLenIndicator + 4); | ||
// objects contents | ||
level_class_1.Level.SerializeObjectContents(writer, objects, 0); | ||
Level.SerializeObjectContents(writer, objects, 0); | ||
} | ||
} | ||
exports.BlueprintWriter = BlueprintWriter; | ||
class BlueprintConfigWriter extends byte_writer_class_1.ByteWriter { | ||
export class BlueprintConfigWriter extends ByteWriter { | ||
constructor() { | ||
super(alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
super(Alignment.LITTLE_ENDIAN); | ||
} | ||
@@ -69,6 +65,4 @@ static SerializeConfig(writer, config) { | ||
writer.writeInt32(config.iconID); | ||
(0, util_types_1.SerializeCol4RGBA)(writer, config.color); | ||
SerializeCol4RGBA(writer, config.color); | ||
} | ||
} | ||
exports.BlueprintConfigWriter = BlueprintConfigWriter; | ||
//# sourceMappingURL=blueprint-writer.js.map |
@@ -1,9 +0,5 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.Alignment = void 0; | ||
var Alignment; | ||
export var Alignment; | ||
(function (Alignment) { | ||
Alignment[Alignment["BIG_ENDIAN"] = 0] = "BIG_ENDIAN"; | ||
Alignment[Alignment["LITTLE_ENDIAN"] = 1] = "LITTLE_ENDIAN"; | ||
})(Alignment = exports.Alignment || (exports.Alignment = {})); | ||
//# sourceMappingURL=alignment.enum.js.map | ||
})(Alignment || (Alignment = {})); |
@@ -1,6 +0,3 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.ByteReader = void 0; | ||
const alignment_enum_1 = require("./alignment.enum"); | ||
class ByteReader { | ||
import { Alignment } from "./alignment.enum"; | ||
export class ByteReader { | ||
constructor(fileBuffer, alignment) { | ||
@@ -60,3 +57,3 @@ this.currentByte = 0; | ||
readUint16() { | ||
let data = this.bufferView.getUint16(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
let data = this.bufferView.getUint16(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 2; | ||
@@ -66,3 +63,3 @@ return data; | ||
readInt32() { | ||
let data = this.bufferView.getInt32(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
let data = this.bufferView.getInt32(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 4; | ||
@@ -72,3 +69,3 @@ return data; | ||
readUint32() { | ||
let data = this.bufferView.getUint32(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
let data = this.bufferView.getUint32(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 4; | ||
@@ -78,3 +75,3 @@ return data; | ||
readLong() { | ||
let data = this.bufferView.getBigInt64(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
let data = this.bufferView.getBigInt64(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 8; | ||
@@ -84,3 +81,3 @@ return data; | ||
readFloat() { | ||
let data = this.bufferView.getFloat32(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
let data = this.bufferView.getFloat32(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 4; | ||
@@ -90,3 +87,3 @@ return data; | ||
readDouble() { | ||
let data = this.bufferView.getFloat64(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
let data = this.bufferView.getFloat64(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 8; | ||
@@ -99,3 +96,3 @@ return data; | ||
try { | ||
let strLength = this.bufferView.getInt32(this.currentByte, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
let strLength = this.bufferView.getInt32(this.currentByte, this.alignment === Alignment.LITTLE_ENDIAN); | ||
counter += 4; | ||
@@ -139,3 +136,1 @@ payload = new Array(strLength - 1).fill('').map(c => String.fromCharCode(this.bufferView.getUint8(counter++))).join(''); | ||
} | ||
exports.ByteReader = ByteReader; | ||
//# sourceMappingURL=byte-reader.class.js.map |
@@ -1,5 +0,2 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.ByteWriter = void 0; | ||
const alignment_enum_1 = require("./alignment.enum"); | ||
import { Alignment } from "./alignment.enum"; | ||
class ByteWriter { | ||
@@ -47,3 +44,3 @@ constructor(alignment, bufferSize = 500) { | ||
this.extendBufferIfNeeded(2); | ||
this.bufferView.setInt16(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
this.bufferView.setInt16(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 2; | ||
@@ -53,3 +50,3 @@ } | ||
this.extendBufferIfNeeded(2); | ||
this.bufferView.setUint16(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
this.bufferView.setUint16(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 2; | ||
@@ -59,3 +56,3 @@ } | ||
this.extendBufferIfNeeded(4); | ||
this.bufferView.setInt32(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
this.bufferView.setInt32(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 4; | ||
@@ -65,3 +62,3 @@ } | ||
this.extendBufferIfNeeded(4); | ||
this.bufferView.setUint32(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
this.bufferView.setUint32(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 4; | ||
@@ -71,3 +68,3 @@ } | ||
this.extendBufferIfNeeded(8); | ||
this.bufferView.setBigInt64(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
this.bufferView.setBigInt64(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 8; | ||
@@ -77,3 +74,3 @@ } | ||
this.extendBufferIfNeeded(8); | ||
this.bufferView.setBigUint64(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
this.bufferView.setBigUint64(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 8; | ||
@@ -83,3 +80,3 @@ } | ||
this.extendBufferIfNeeded(4); | ||
this.bufferView.setFloat32(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
this.bufferView.setFloat32(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 4; | ||
@@ -89,3 +86,3 @@ } | ||
this.extendBufferIfNeeded(8); | ||
this.bufferView.setFloat64(this.currentByte, value, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
this.bufferView.setFloat64(this.currentByte, value, this.alignment === Alignment.LITTLE_ENDIAN); | ||
this.currentByte += 8; | ||
@@ -157,3 +154,2 @@ } | ||
ByteWriter.IsASCIICompatible = (value) => /^[\x00-\x7F]*$/.test(value); | ||
exports.ByteWriter = ByteWriter; | ||
//# sourceMappingURL=byte-writer.class.js.map | ||
export { ByteWriter }; |
@@ -1,5 +0,2 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.CompressionLibraryError = exports.CorruptSaveError = exports.UnsupportedVersionError = exports.ParserError = void 0; | ||
class ParserError extends Error { | ||
export class ParserError extends Error { | ||
constructor(name, message) { | ||
@@ -10,4 +7,3 @@ super(message); | ||
} | ||
exports.ParserError = ParserError; | ||
class UnsupportedVersionError extends ParserError { | ||
export class UnsupportedVersionError extends ParserError { | ||
constructor(message) { | ||
@@ -17,4 +13,3 @@ super('UnsupportedVersionError', message ?? 'This save version is not supported.'); | ||
} | ||
exports.UnsupportedVersionError = UnsupportedVersionError; | ||
class CorruptSaveError extends ParserError { | ||
export class CorruptSaveError extends ParserError { | ||
constructor(message) { | ||
@@ -24,4 +19,3 @@ super('CorruptSaveError', message ?? 'This save data is most likely corrupt.'); | ||
} | ||
exports.CorruptSaveError = CorruptSaveError; | ||
class CompressionLibraryError extends ParserError { | ||
export class CompressionLibraryError extends ParserError { | ||
constructor(message) { | ||
@@ -31,3 +25,1 @@ super('CompressionLibraryError', message ?? 'Failed to compress/decompress save data.'); | ||
} | ||
exports.CompressionLibraryError = CompressionLibraryError; | ||
//# sourceMappingURL=parser.error.js.map |
@@ -1,8 +0,5 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.Level = void 0; | ||
const ObjectReference_1 = require("./objects/ObjectReference"); | ||
const SaveComponent_1 = require("./objects/SaveComponent"); | ||
const SaveEntity_1 = require("./objects/SaveEntity"); | ||
class Level { | ||
import { ObjectReference } from "./objects/ObjectReference"; | ||
import { isSaveComponent, SaveComponent } from "./objects/SaveComponent"; | ||
import { isSaveEntity, SaveEntity } from "./objects/SaveEntity"; | ||
export class Level { | ||
constructor(name) { | ||
@@ -18,8 +15,8 @@ this.name = name; | ||
case 'SaveEntity': | ||
writer.writeInt32(SaveEntity_1.SaveEntity.TypeID); | ||
SaveEntity_1.SaveEntity.SerializeHeader(writer, obj); | ||
writer.writeInt32(SaveEntity.TypeID); | ||
SaveEntity.SerializeHeader(writer, obj); | ||
break; | ||
case 'SaveComponent': | ||
writer.writeInt32(SaveComponent_1.SaveComponent.TypeID); | ||
SaveComponent_1.SaveComponent.SerializeHeader(writer, obj); | ||
writer.writeInt32(SaveComponent.TypeID); | ||
SaveComponent.SerializeHeader(writer, obj); | ||
break; | ||
@@ -50,7 +47,7 @@ default: | ||
writer.writeInt32(0); | ||
if ((0, SaveEntity_1.isSaveEntity)(obj)) { | ||
SaveEntity_1.SaveEntity.SerializeData(writer, obj, buildVersion); | ||
if (isSaveEntity(obj)) { | ||
SaveEntity.SerializeData(writer, obj, buildVersion); | ||
} | ||
else if ((0, SaveComponent_1.isSaveComponent)(obj)) { | ||
SaveComponent_1.SaveComponent.SerializeData(writer, obj, buildVersion); | ||
else if (isSaveComponent(obj)) { | ||
SaveComponent.SerializeData(writer, obj, buildVersion); | ||
} | ||
@@ -69,10 +66,10 @@ writer.writeBinarySizeFromPosition(lenReplacementPosition, lenReplacementPosition + 4); | ||
switch (objectType) { | ||
case SaveEntity_1.SaveEntity.TypeID: | ||
let object = new SaveEntity_1.SaveEntity('', '', '', ''); | ||
SaveEntity_1.SaveEntity.ParseHeader(reader, object); | ||
case SaveEntity.TypeID: | ||
let object = new SaveEntity('', '', '', ''); | ||
SaveEntity.ParseHeader(reader, object); | ||
objectsList.push(object); | ||
break; | ||
case SaveComponent_1.SaveComponent.TypeID: | ||
let component = new SaveComponent_1.SaveComponent('', '', '', ''); | ||
SaveComponent_1.SaveComponent.ParseHeader(reader, component); | ||
case SaveComponent.TypeID: | ||
let component = new SaveComponent('', '', '', ''); | ||
SaveComponent.ParseHeader(reader, component); | ||
objectsList.push(component); | ||
@@ -101,7 +98,7 @@ break; | ||
const obj = objectsList[i]; | ||
if ((0, SaveEntity_1.isSaveEntity)(obj)) { | ||
SaveEntity_1.SaveEntity.ParseData(obj, len, reader, buildVersion); | ||
if (isSaveEntity(obj)) { | ||
SaveEntity.ParseData(obj, len, reader, buildVersion); | ||
} | ||
else if ((0, SaveComponent_1.isSaveComponent)(obj)) { | ||
SaveComponent_1.SaveComponent.ParseData(obj, len, reader, buildVersion); | ||
else if (isSaveComponent(obj)) { | ||
SaveComponent.ParseData(obj, len, reader, buildVersion); | ||
} | ||
@@ -138,3 +135,3 @@ const after = reader.getBufferPosition(); | ||
for (const collectable of collectables) { | ||
ObjectReference_1.ObjectReference.Serialize(writer, collectable); | ||
ObjectReference.Serialize(writer, collectable); | ||
} | ||
@@ -147,3 +144,3 @@ } | ||
for (let i = 0; i < countCollected; i++) { | ||
const collectable = ObjectReference_1.ObjectReference.Parse(reader); | ||
const collectable = ObjectReference.Parse(reader); | ||
collected.push(collectable); | ||
@@ -154,3 +151,1 @@ } | ||
} | ||
exports.Level = Level; | ||
//# sourceMappingURL=level.class.js.map |
import { ByteReader } from "../../byte/byte-reader.class"; | ||
import { SaveWriter } from "../../save-writer.class"; | ||
import { SaveWriter } from "../../save-writer"; | ||
import { AbstractBaseProperty } from "./Property"; | ||
@@ -4,0 +4,0 @@ export declare class DataFields { |
@@ -1,6 +0,3 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.DataFields = void 0; | ||
const Property_1 = require("./Property"); | ||
class DataFields { | ||
import { ArrayProperty, BoolProperty, ByteProperty, DoubleProperty, EnumProperty, FloatProperty, Int32Property, Int64Property, Int8Property, MapProperty, ObjectProperty, SetProperty, StrProperty, StructProperty, TextProperty, Uint32Property, Uint8Property } from "./Property"; | ||
export class DataFields { | ||
constructor() { | ||
@@ -58,29 +55,29 @@ this.properties = []; | ||
case 'BoolProperty': | ||
currentProperty = Property_1.BoolProperty.Parse(reader, propertyType, index); | ||
overhead = Property_1.BoolProperty.CalcOverhead(currentProperty); | ||
currentProperty = BoolProperty.Parse(reader, propertyType, index); | ||
overhead = BoolProperty.CalcOverhead(currentProperty); | ||
break; | ||
case 'ByteProperty': | ||
currentProperty = Property_1.ByteProperty.Parse(reader, propertyType, index); | ||
overhead = Property_1.ByteProperty.CalcOverhead(currentProperty); | ||
currentProperty = ByteProperty.Parse(reader, propertyType, index); | ||
overhead = ByteProperty.CalcOverhead(currentProperty); | ||
break; | ||
case 'Int8Property': | ||
currentProperty = Property_1.Int8Property.Parse(reader, propertyType, index); | ||
overhead = Property_1.Int8Property.CalcOverhead(currentProperty); | ||
currentProperty = Int8Property.Parse(reader, propertyType, index); | ||
overhead = Int8Property.CalcOverhead(currentProperty); | ||
break; | ||
case 'UInt8Property': | ||
currentProperty = Property_1.Uint8Property.Parse(reader, propertyType, index); | ||
overhead = Property_1.Uint8Property.CalcOverhead(currentProperty); | ||
currentProperty = Uint8Property.Parse(reader, propertyType, index); | ||
overhead = Uint8Property.CalcOverhead(currentProperty); | ||
break; | ||
case 'IntProperty': | ||
case 'Int32Property': | ||
currentProperty = Property_1.Int32Property.Parse(reader, propertyType, index); | ||
overhead = Property_1.Int32Property.CalcOverhead(currentProperty); | ||
currentProperty = Int32Property.Parse(reader, propertyType, index); | ||
overhead = Int32Property.CalcOverhead(currentProperty); | ||
break; | ||
case 'UInt32Property': | ||
currentProperty = Property_1.Uint32Property.Parse(reader, propertyType, index); | ||
overhead = Property_1.Uint32Property.CalcOverhead(currentProperty); | ||
currentProperty = Uint32Property.Parse(reader, propertyType, index); | ||
overhead = Uint32Property.CalcOverhead(currentProperty); | ||
break; | ||
case 'Int64Property': | ||
currentProperty = Property_1.Int64Property.Parse(reader, propertyType, index); | ||
overhead = Property_1.Int64Property.CalcOverhead(currentProperty); | ||
currentProperty = Int64Property.Parse(reader, propertyType, index); | ||
overhead = Int64Property.CalcOverhead(currentProperty); | ||
break; | ||
@@ -95,43 +92,43 @@ // TODO | ||
case 'FloatProperty': | ||
currentProperty = Property_1.FloatProperty.Parse(reader, propertyType, index); | ||
overhead = Property_1.FloatProperty.CalcOverhead(currentProperty); | ||
currentProperty = FloatProperty.Parse(reader, propertyType, index); | ||
overhead = FloatProperty.CalcOverhead(currentProperty); | ||
break; | ||
case 'DoubleProperty': | ||
currentProperty = Property_1.DoubleProperty.Parse(reader, propertyType, index); | ||
overhead = Property_1.DoubleProperty.CalcOverhead(currentProperty); | ||
currentProperty = DoubleProperty.Parse(reader, propertyType, index); | ||
overhead = DoubleProperty.CalcOverhead(currentProperty); | ||
break; | ||
case 'StrProperty': | ||
case 'NameProperty': | ||
currentProperty = Property_1.StrProperty.Parse(reader, propertyType, index); | ||
overhead = Property_1.StrProperty.CalcOverhead(currentProperty); | ||
currentProperty = StrProperty.Parse(reader, propertyType, index); | ||
overhead = StrProperty.CalcOverhead(currentProperty); | ||
break; | ||
case 'ObjectProperty': | ||
case 'InterfaceProperty': | ||
currentProperty = Property_1.ObjectProperty.Parse(reader, propertyType, index); | ||
overhead = Property_1.ObjectProperty.CalcOverhead(currentProperty); | ||
currentProperty = ObjectProperty.Parse(reader, propertyType, index); | ||
overhead = ObjectProperty.CalcOverhead(currentProperty); | ||
break; | ||
case 'EnumProperty': | ||
currentProperty = Property_1.EnumProperty.Parse(reader, propertyType, index); | ||
overhead = Property_1.EnumProperty.CalcOverhead(currentProperty); | ||
currentProperty = EnumProperty.Parse(reader, propertyType, index); | ||
overhead = EnumProperty.CalcOverhead(currentProperty); | ||
break; | ||
case 'StructProperty': | ||
currentProperty = Property_1.StructProperty.Parse(reader, propertyType, index, binarySize); | ||
overhead = Property_1.StructProperty.CalcOverhead(currentProperty); | ||
currentProperty = StructProperty.Parse(reader, propertyType, index, binarySize); | ||
overhead = StructProperty.CalcOverhead(currentProperty); | ||
break; | ||
case 'ArrayProperty': | ||
currentProperty = Property_1.ArrayProperty.Parse(reader, propertyType, index, propertyName); | ||
overhead = Property_1.ArrayProperty.CalcOverhead(currentProperty); | ||
currentProperty = ArrayProperty.Parse(reader, propertyType, index, propertyName); | ||
overhead = ArrayProperty.CalcOverhead(currentProperty); | ||
break; | ||
case 'MapProperty': | ||
//currentProperty = reader.readMapProperty(currentProperty, ''); | ||
currentProperty = Property_1.MapProperty.Parse(reader, propertyName, buildVersion, binarySize); | ||
overhead = Property_1.MapProperty.CalcOverhead(currentProperty); | ||
currentProperty = MapProperty.Parse(reader, propertyName, buildVersion, binarySize); | ||
overhead = MapProperty.CalcOverhead(currentProperty); | ||
break; | ||
case 'TextProperty': | ||
currentProperty = Property_1.TextProperty.Parse(reader, propertyType, index); | ||
overhead = Property_1.TextProperty.CalcOverhead(currentProperty); | ||
currentProperty = TextProperty.Parse(reader, propertyType, index); | ||
overhead = TextProperty.CalcOverhead(currentProperty); | ||
break; | ||
case 'SetProperty': | ||
currentProperty = Property_1.SetProperty.Parse(reader, propertyType, index, propertyName); | ||
overhead = Property_1.SetProperty.CalcOverhead(currentProperty); | ||
currentProperty = SetProperty.Parse(reader, propertyType, index, propertyName); | ||
overhead = SetProperty.CalcOverhead(currentProperty); | ||
break; | ||
@@ -168,29 +165,29 @@ default: | ||
case 'BoolProperty': | ||
overhead = Property_1.BoolProperty.CalcOverhead(property); | ||
Property_1.BoolProperty.Serialize(writer, property); | ||
overhead = BoolProperty.CalcOverhead(property); | ||
BoolProperty.Serialize(writer, property); | ||
break; | ||
case 'ByteProperty': | ||
overhead = Property_1.ByteProperty.CalcOverhead(property); | ||
Property_1.ByteProperty.Serialize(writer, property); | ||
overhead = ByteProperty.CalcOverhead(property); | ||
ByteProperty.Serialize(writer, property); | ||
break; | ||
case 'Int8Property': | ||
overhead = Property_1.Int8Property.CalcOverhead(property); | ||
Property_1.Int8Property.Serialize(writer, property); | ||
overhead = Int8Property.CalcOverhead(property); | ||
Int8Property.Serialize(writer, property); | ||
break; | ||
case 'UInt8Property': | ||
overhead = Property_1.Uint8Property.CalcOverhead(property); | ||
Property_1.Uint8Property.Serialize(writer, property); | ||
overhead = Uint8Property.CalcOverhead(property); | ||
Uint8Property.Serialize(writer, property); | ||
break; | ||
case 'IntProperty': | ||
case 'Int32Property': | ||
overhead = Property_1.Int32Property.CalcOverhead(property); | ||
Property_1.Int32Property.Serialize(writer, property); | ||
overhead = Int32Property.CalcOverhead(property); | ||
Int32Property.Serialize(writer, property); | ||
break; | ||
case 'UInt32Property': | ||
overhead = Property_1.Uint32Property.CalcOverhead(property); | ||
Property_1.Uint32Property.Serialize(writer, property); | ||
overhead = Uint32Property.CalcOverhead(property); | ||
Uint32Property.Serialize(writer, property); | ||
break; | ||
case 'Int64Property': | ||
overhead = Property_1.Int64Property.CalcOverhead(property); | ||
Property_1.Int64Property.Serialize(writer, property); | ||
overhead = Int64Property.CalcOverhead(property); | ||
Int64Property.Serialize(writer, property); | ||
break; | ||
@@ -200,46 +197,46 @@ // TODO: uint64Property | ||
case 'FloatProperty': | ||
overhead = Property_1.FloatProperty.CalcOverhead(property); | ||
Property_1.FloatProperty.Serialize(writer, property); | ||
overhead = FloatProperty.CalcOverhead(property); | ||
FloatProperty.Serialize(writer, property); | ||
break; | ||
case 'DoubleProperty': | ||
overhead = Property_1.DoubleProperty.CalcOverhead(property); | ||
Property_1.DoubleProperty.Serialize(writer, property); | ||
overhead = DoubleProperty.CalcOverhead(property); | ||
DoubleProperty.Serialize(writer, property); | ||
break; | ||
case 'StrProperty': | ||
case 'NameProperty': | ||
overhead = Property_1.StrProperty.CalcOverhead(property); | ||
Property_1.StrProperty.Serialize(writer, property); | ||
overhead = StrProperty.CalcOverhead(property); | ||
StrProperty.Serialize(writer, property); | ||
break; | ||
case 'ObjectProperty': | ||
case 'InterfaceProperty': | ||
overhead = Property_1.ObjectProperty.CalcOverhead(property); | ||
Property_1.ObjectProperty.Serialize(writer, property); | ||
overhead = ObjectProperty.CalcOverhead(property); | ||
ObjectProperty.Serialize(writer, property); | ||
break; | ||
case 'EnumProperty': | ||
overhead = Property_1.EnumProperty.CalcOverhead(property); | ||
Property_1.EnumProperty.Serialize(writer, property); | ||
overhead = EnumProperty.CalcOverhead(property); | ||
EnumProperty.Serialize(writer, property); | ||
break; | ||
case 'ByteProperty': | ||
overhead = Property_1.ByteProperty.CalcOverhead(property); | ||
Property_1.ByteProperty.Serialize(writer, property); | ||
overhead = ByteProperty.CalcOverhead(property); | ||
ByteProperty.Serialize(writer, property); | ||
break; | ||
case 'StructProperty': | ||
overhead = Property_1.StructProperty.CalcOverhead(property); | ||
Property_1.StructProperty.Serialize(writer, property); | ||
overhead = StructProperty.CalcOverhead(property); | ||
StructProperty.Serialize(writer, property); | ||
break; | ||
case 'ArrayProperty': | ||
overhead = Property_1.ArrayProperty.CalcOverhead(property); | ||
Property_1.ArrayProperty.Serialize(writer, property, propertyName); | ||
overhead = ArrayProperty.CalcOverhead(property); | ||
ArrayProperty.Serialize(writer, property, propertyName); | ||
break; | ||
case 'MapProperty': | ||
overhead = Property_1.MapProperty.CalcOverhead(property); | ||
Property_1.MapProperty.Serialize(writer, property); | ||
overhead = MapProperty.CalcOverhead(property); | ||
MapProperty.Serialize(writer, property); | ||
break; | ||
case 'TextProperty': | ||
overhead = Property_1.TextProperty.CalcOverhead(property); | ||
Property_1.TextProperty.Serialize(writer, property); | ||
overhead = TextProperty.CalcOverhead(property); | ||
TextProperty.Serialize(writer, property); | ||
break; | ||
case 'SetProperty': | ||
overhead = Property_1.SetProperty.CalcOverhead(property); | ||
Property_1.SetProperty.Serialize(writer, property); | ||
overhead = SetProperty.CalcOverhead(property); | ||
SetProperty.Serialize(writer, property); | ||
break; | ||
@@ -253,3 +250,1 @@ default: | ||
} | ||
exports.DataFields = DataFields; | ||
//# sourceMappingURL=DataFields.js.map |
@@ -1,5 +0,2 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.ObjectReference = void 0; | ||
class ObjectReference { | ||
export class ObjectReference { | ||
constructor(levelName, pathName) { | ||
@@ -17,3 +14,1 @@ this.levelName = levelName; | ||
} | ||
exports.ObjectReference = ObjectReference; | ||
//# sourceMappingURL=ObjectReference.js.map |
import { ByteReader } from "../../byte/byte-reader.class"; | ||
import { SaveWriter } from "../../save-writer.class"; | ||
import { SaveWriter } from "../../save-writer"; | ||
import { col4, vec3, vec4 } from "../structs/util.types"; | ||
@@ -4,0 +4,0 @@ import { ObjectReference } from "./ObjectReference"; |
@@ -1,8 +0,5 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.SerializeFINLuaProcessorStateStorage = exports.ReadFINLuaProcessorStateStorage = exports.SerializeFINNetworkTrace = exports.ReadFINNetworkTrace = exports.SerializeDynamicStructData = exports.ParseDynamicStructData = exports.MapProperty = exports.SetProperty = exports.ArrayProperty = exports.StructProperty = exports.TextProperty = exports.EnumProperty = exports.ObjectProperty = exports.StrProperty = exports.DoubleProperty = exports.FloatProperty = exports.Int64Property = exports.Uint32Property = exports.Int32Property = exports.Uint8Property = exports.Int8Property = exports.ByteProperty = exports.BoolProperty = exports.SerializeGUID = exports.ParseGUID = exports.BasicProperty = exports.AbstractBaseProperty = exports.AbstractProperty = void 0; | ||
const util_types_1 = require("../structs/util.types"); | ||
const DataFields_1 = require("./DataFields"); | ||
const ObjectReference_1 = require("./ObjectReference"); | ||
class AbstractProperty { | ||
import { ParseCol4BGRA, ParseCol4RGBA, ParseVec3, ParseVec4, SerializeCol4BGRA, SerializeCol4RGBA, SerializeVec3, SerializeVec4 } from "../structs/util.types"; | ||
import { DataFields } from "./DataFields"; | ||
import { ObjectReference } from "./ObjectReference"; | ||
export class AbstractProperty { | ||
constructor(type, index) { | ||
@@ -13,4 +10,3 @@ this.type = type; | ||
} | ||
exports.AbstractProperty = AbstractProperty; | ||
class AbstractBaseProperty extends AbstractProperty { | ||
export class AbstractBaseProperty extends AbstractProperty { | ||
// overhead like Guid is not calculated into property size | ||
@@ -23,4 +19,3 @@ constructor(type, ueType, index) { | ||
} | ||
exports.AbstractBaseProperty = AbstractBaseProperty; | ||
class BasicProperty extends AbstractBaseProperty { | ||
export class BasicProperty extends AbstractBaseProperty { | ||
constructor(type, ueType, guidInfo, index = 0) { | ||
@@ -31,4 +26,3 @@ super(type, ueType, index); | ||
} | ||
exports.BasicProperty = BasicProperty; | ||
const ParseGUID = (reader) => { | ||
export const ParseGUID = (reader) => { | ||
const hasGuid = reader.readByte() === 1; | ||
@@ -42,4 +36,3 @@ let guid; | ||
}; | ||
exports.ParseGUID = ParseGUID; | ||
const SerializeGUID = (writer, guid) => { | ||
export const SerializeGUID = (writer, guid) => { | ||
writer.writeByte(guid ? 1 : 0); | ||
@@ -50,4 +43,3 @@ if (guid) { | ||
}; | ||
exports.SerializeGUID = SerializeGUID; | ||
class BoolProperty extends BasicProperty { | ||
export class BoolProperty extends BasicProperty { | ||
constructor(value, ueType = 'BoolProperty', guidInfo = undefined, index = 0) { | ||
@@ -59,3 +51,3 @@ super('BoolProperty', ueType, guidInfo, index); | ||
const value = BoolProperty.ReadValue(reader); | ||
const guidInfo = (0, exports.ParseGUID)(reader); | ||
const guidInfo = ParseGUID(reader); | ||
return new BoolProperty(value, ueType, guidInfo, index); | ||
@@ -71,3 +63,3 @@ } | ||
BoolProperty.SerializeValue(writer, property.value); | ||
(0, exports.SerializeGUID)(writer, property.guidInfo); | ||
SerializeGUID(writer, property.guidInfo); | ||
} | ||
@@ -78,4 +70,3 @@ static SerializeValue(writer, value) { | ||
} | ||
exports.BoolProperty = BoolProperty; | ||
class ByteProperty extends BasicProperty { | ||
export class ByteProperty extends BasicProperty { | ||
constructor(value, ueType = 'ByteProperty', guidInfo = undefined, index = 0) { | ||
@@ -87,3 +78,3 @@ super('ByteProperty', ueType, guidInfo, index); | ||
const type = reader.readString(); | ||
const guidInfo = (0, exports.ParseGUID)(reader); | ||
const guidInfo = ParseGUID(reader); | ||
let value; | ||
@@ -113,3 +104,3 @@ if (type === 'None') { | ||
writer.writeString(property.value.type); | ||
(0, exports.SerializeGUID)(writer, property.guidInfo); | ||
SerializeGUID(writer, property.guidInfo); | ||
if (property.value.type === 'None') { | ||
@@ -126,4 +117,3 @@ ByteProperty.SerializeValue(writer, property.value.value); | ||
} | ||
exports.ByteProperty = ByteProperty; | ||
class Int8Property extends BasicProperty { | ||
export class Int8Property extends BasicProperty { | ||
constructor(value, ueType = 'Int8Property', guidInfo = undefined, index = 0) { | ||
@@ -134,3 +124,3 @@ super('Int8Property', ueType, guidInfo, index); | ||
static Parse(reader, ueType, index = 0) { | ||
const guidInfo = (0, exports.ParseGUID)(reader); | ||
const guidInfo = ParseGUID(reader); | ||
const value = Int8Property.ReadValue(reader); | ||
@@ -146,3 +136,3 @@ return new Int8Property(value, ueType, guidInfo, index); | ||
static Serialize(writer, property) { | ||
(0, exports.SerializeGUID)(writer, property.guidInfo); | ||
SerializeGUID(writer, property.guidInfo); | ||
Int8Property.SerializeValue(writer, property.value); | ||
@@ -154,4 +144,3 @@ } | ||
} | ||
exports.Int8Property = Int8Property; | ||
class Uint8Property extends BasicProperty { | ||
export class Uint8Property extends BasicProperty { | ||
constructor(value, ueType = 'UInt8Property', guidInfo = undefined, index = 0) { | ||
@@ -162,3 +151,3 @@ super('UInt8Property', ueType, guidInfo, index); | ||
static Parse(reader, ueType, index = 0) { | ||
const guidInfo = (0, exports.ParseGUID)(reader); | ||
const guidInfo = ParseGUID(reader); | ||
const value = Uint8Property.ReadValue(reader); | ||
@@ -174,3 +163,3 @@ return new Uint8Property(value, ueType, guidInfo, index); | ||
static Serialize(writer, property) { | ||
(0, exports.SerializeGUID)(writer, property.guidInfo); | ||
SerializeGUID(writer, property.guidInfo); | ||
Uint8Property.SerializeValue(writer, property.value); | ||
@@ -182,4 +171,3 @@ } | ||
} | ||
exports.Uint8Property = Uint8Property; | ||
class Int32Property extends BasicProperty { | ||
export class Int32Property extends BasicProperty { | ||
constructor(value, ueType = 'IntProperty', guidInfo = undefined, index = 0) { | ||
@@ -190,3 +178,3 @@ super('Int32Property', ueType, guidInfo, index); | ||
static Parse(reader, ueType, index = 0) { | ||
const guidInfo = (0, exports.ParseGUID)(reader); | ||
const guidInfo = ParseGUID(reader); | ||
const value = Int32Property.ReadValue(reader); | ||
@@ -202,3 +190,3 @@ return new Int32Property(value, ueType, guidInfo, index); | ||
static Serialize(writer, property) { | ||
(0, exports.SerializeGUID)(writer, property.guidInfo); | ||
SerializeGUID(writer, property.guidInfo); | ||
Int32Property.SerializeValue(writer, property.value); | ||
@@ -210,4 +198,3 @@ } | ||
} | ||
exports.Int32Property = Int32Property; | ||
class Uint32Property extends BasicProperty { | ||
export class Uint32Property extends BasicProperty { | ||
constructor(value, ueType = 'UInt32Property', guidInfo = undefined, index = 0) { | ||
@@ -218,3 +205,3 @@ super('UInt32Property', ueType, guidInfo, index); | ||
static Parse(reader, ueType, index = 0) { | ||
const guidInfo = (0, exports.ParseGUID)(reader); | ||
const guidInfo = ParseGUID(reader); | ||
const value = Uint32Property.ReadValue(reader); | ||
@@ -230,3 +217,3 @@ return new Uint32Property(value, ueType, guidInfo, index); | ||
static Serialize(writer, property) { | ||
(0, exports.SerializeGUID)(writer, property.guidInfo); | ||
SerializeGUID(writer, property.guidInfo); | ||
Uint32Property.SerializeValue(writer, property.value); | ||
@@ -238,4 +225,3 @@ } | ||
} | ||
exports.Uint32Property = Uint32Property; | ||
class Int64Property extends BasicProperty { | ||
export class Int64Property extends BasicProperty { | ||
constructor(value, ueType = 'Int64Property', guidInfo = undefined, index = 0) { | ||
@@ -246,3 +232,3 @@ super('Int64Property', ueType, guidInfo, index); | ||
static Parse(reader, ueType, index = 0) { | ||
const guidInfo = (0, exports.ParseGUID)(reader); | ||
const guidInfo = ParseGUID(reader); | ||
const value = Int64Property.ReadValue(reader); | ||
@@ -258,3 +244,3 @@ return new Int64Property(value, ueType, guidInfo, index); | ||
static Serialize(writer, property) { | ||
(0, exports.SerializeGUID)(writer, property.guidInfo); | ||
SerializeGUID(writer, property.guidInfo); | ||
Int64Property.SerializeValue(writer, property.value); | ||
@@ -266,4 +252,3 @@ } | ||
} | ||
exports.Int64Property = Int64Property; | ||
class FloatProperty extends BasicProperty { | ||
export class FloatProperty extends BasicProperty { | ||
constructor(value, ueType = 'FloatProperty', guidInfo = undefined, index = 0) { | ||
@@ -274,3 +259,3 @@ super('FloatProperty', ueType, guidInfo, index); | ||
static Parse(reader, ueType, index = 0) { | ||
const guidInfo = (0, exports.ParseGUID)(reader); | ||
const guidInfo = ParseGUID(reader); | ||
const value = FloatProperty.ReadValue(reader); | ||
@@ -286,3 +271,3 @@ return new FloatProperty(value, ueType, guidInfo, index); | ||
static Serialize(writer, property) { | ||
(0, exports.SerializeGUID)(writer, property.guidInfo); | ||
SerializeGUID(writer, property.guidInfo); | ||
FloatProperty.SerializeValue(writer, property.value); | ||
@@ -294,4 +279,3 @@ } | ||
} | ||
exports.FloatProperty = FloatProperty; | ||
class DoubleProperty extends BasicProperty { | ||
export class DoubleProperty extends BasicProperty { | ||
constructor(value, ueType = 'DoubleProperty', guidInfo = undefined, index = 0) { | ||
@@ -302,3 +286,3 @@ super('DoubleProperty', ueType, guidInfo, index); | ||
static Parse(reader, ueType, index = 0) { | ||
const guidInfo = (0, exports.ParseGUID)(reader); | ||
const guidInfo = ParseGUID(reader); | ||
const value = DoubleProperty.ReadValue(reader); | ||
@@ -314,3 +298,3 @@ return new DoubleProperty(value, ueType, guidInfo, index); | ||
static Serialize(writer, property) { | ||
(0, exports.SerializeGUID)(writer, property.guidInfo); | ||
SerializeGUID(writer, property.guidInfo); | ||
DoubleProperty.SerializeValue(writer, property.value); | ||
@@ -322,4 +306,3 @@ } | ||
} | ||
exports.DoubleProperty = DoubleProperty; | ||
class StrProperty extends BasicProperty { | ||
export class StrProperty extends BasicProperty { | ||
constructor(value, ueType = 'StrProperty', guidInfo = undefined, index = 0) { | ||
@@ -330,3 +313,3 @@ super('StrProperty', ueType, guidInfo, index); | ||
static Parse(reader, ueType, index = 0) { | ||
const guidInfo = (0, exports.ParseGUID)(reader); | ||
const guidInfo = ParseGUID(reader); | ||
const value = StrProperty.ReadValue(reader); | ||
@@ -342,3 +325,3 @@ return new StrProperty(value, ueType, guidInfo, index); | ||
static Serialize(writer, property) { | ||
(0, exports.SerializeGUID)(writer, property.guidInfo); | ||
SerializeGUID(writer, property.guidInfo); | ||
StrProperty.SerializeValue(writer, property.value); | ||
@@ -350,4 +333,3 @@ } | ||
} | ||
exports.StrProperty = StrProperty; | ||
class ObjectProperty extends BasicProperty { | ||
export class ObjectProperty extends BasicProperty { | ||
constructor(value, ueType = 'ObjectProperty', guidInfo = undefined, index = 0) { | ||
@@ -358,3 +340,3 @@ super('ObjectProperty', ueType, guidInfo, index); | ||
static Parse(reader, ueType, index = 0) { | ||
const guidInfo = (0, exports.ParseGUID)(reader); | ||
const guidInfo = ParseGUID(reader); | ||
const value = ObjectProperty.ReadValue(reader); | ||
@@ -374,3 +356,3 @@ return new ObjectProperty(value, ueType, guidInfo, index); | ||
static Serialize(writer, property) { | ||
(0, exports.SerializeGUID)(writer, property.guidInfo); | ||
SerializeGUID(writer, property.guidInfo); | ||
ObjectProperty.SerializeValue(writer, property.value); | ||
@@ -383,4 +365,3 @@ } | ||
} | ||
exports.ObjectProperty = ObjectProperty; | ||
class EnumProperty extends BasicProperty { | ||
export class EnumProperty extends BasicProperty { | ||
constructor(value, ueType = 'EnumProperty', guidInfo = undefined, index = 0) { | ||
@@ -392,3 +373,3 @@ super('EnumProperty', ueType, guidInfo, index); | ||
let name = reader.readString(); | ||
const guidInfo = (0, exports.ParseGUID)(reader); | ||
const guidInfo = ParseGUID(reader); | ||
const value = EnumProperty.ReadValue(reader); | ||
@@ -406,3 +387,3 @@ const property = new EnumProperty({ name, value }, ueType, guidInfo, index); | ||
writer.writeString(property.value.name); | ||
(0, exports.SerializeGUID)(writer, property.guidInfo); | ||
SerializeGUID(writer, property.guidInfo); | ||
EnumProperty.SerializeValue(writer, property.value.value); | ||
@@ -414,4 +395,3 @@ } | ||
} | ||
exports.EnumProperty = EnumProperty; | ||
class TextProperty extends BasicProperty { | ||
export class TextProperty extends BasicProperty { | ||
constructor(value, ueType = 'TextProperty', guidInfo = undefined, index = 0) { | ||
@@ -423,3 +403,3 @@ super('TextProperty', ueType, guidInfo, index); | ||
//let name = reader.readString(); | ||
const guidInfo = (0, exports.ParseGUID)(reader); | ||
const guidInfo = ParseGUID(reader); | ||
const value = TextProperty.ParseValue(reader); | ||
@@ -487,3 +467,3 @@ return new TextProperty(value, ueType, guidInfo, index); | ||
static Serialize(writer, property) { | ||
(0, exports.SerializeGUID)(writer, property.guidInfo); | ||
SerializeGUID(writer, property.guidInfo); | ||
TextProperty.SerializeValue(writer, property.value); | ||
@@ -542,4 +522,3 @@ } | ||
} | ||
exports.TextProperty = TextProperty; | ||
class StructProperty extends AbstractBaseProperty { | ||
export class StructProperty extends AbstractBaseProperty { | ||
constructor(subtype, ueType = 'StructProperty', index = 0, guid = 0) { | ||
@@ -581,6 +560,6 @@ super('StructProperty', ueType, index); | ||
case 'Color': | ||
value = (0, util_types_1.ParseCol4BGRA)(reader); | ||
value = ParseCol4BGRA(reader); | ||
break; | ||
case 'LinearColor': | ||
value = (0, util_types_1.ParseCol4RGBA)(reader); | ||
value = ParseCol4RGBA(reader); | ||
break; | ||
@@ -590,3 +569,3 @@ case 'Vector': | ||
case 'Vector2D': | ||
value = (0, util_types_1.ParseVec3)(reader); | ||
value = ParseVec3(reader); | ||
break; | ||
@@ -596,8 +575,8 @@ case 'Quat': | ||
case 'Vector4D': | ||
value = (0, util_types_1.ParseVec4)(reader); | ||
value = ParseVec4(reader); | ||
break; | ||
case 'Box': | ||
value = { | ||
min: (0, util_types_1.ParseVec3)(reader), | ||
max: (0, util_types_1.ParseVec3)(reader), | ||
min: ParseVec3(reader), | ||
max: ParseVec3(reader), | ||
isValid: reader.readByte() >= 1 | ||
@@ -643,7 +622,7 @@ }; | ||
case 'FINNetworkTrace': | ||
value = (0, exports.ReadFINNetworkTrace)(reader); | ||
value = ReadFINNetworkTrace(reader); | ||
break; | ||
case 'FINLuaProcessorStateStorage': | ||
value = { | ||
values: (0, exports.ReadFINLuaProcessorStateStorage)(reader, size) | ||
values: ReadFINLuaProcessorStateStorage(reader, size) | ||
}; | ||
@@ -659,3 +638,3 @@ break; | ||
//TODO: use buildversion | ||
value = (0, exports.ParseDynamicStructData)(reader, 0, subtype); | ||
value = ParseDynamicStructData(reader, 0, subtype); | ||
} | ||
@@ -680,7 +659,7 @@ return value; | ||
value = value; | ||
(0, util_types_1.SerializeCol4BGRA)(writer, value); | ||
SerializeCol4BGRA(writer, value); | ||
break; | ||
case 'LinearColor': | ||
value = value; | ||
(0, util_types_1.SerializeCol4RGBA)(writer, value); | ||
SerializeCol4RGBA(writer, value); | ||
break; | ||
@@ -691,3 +670,3 @@ case 'Vector': | ||
value = value; | ||
(0, util_types_1.SerializeVec3)(writer, value); | ||
SerializeVec3(writer, value); | ||
break; | ||
@@ -698,8 +677,8 @@ case 'Quat': | ||
value = value; | ||
(0, util_types_1.SerializeVec4)(writer, value); | ||
SerializeVec4(writer, value); | ||
break; | ||
case 'Box': | ||
value = value; | ||
(0, util_types_1.SerializeVec3)(writer, value.min); | ||
(0, util_types_1.SerializeVec3)(writer, value.max); | ||
SerializeVec3(writer, value.min); | ||
SerializeVec3(writer, value.max); | ||
writer.writeByte(value.isValid ? 1 : 0); | ||
@@ -745,7 +724,7 @@ break; | ||
value = value; | ||
(0, exports.SerializeFINNetworkTrace)(writer, value); | ||
SerializeFINNetworkTrace(writer, value); | ||
break; | ||
case 'FINLuaProcessorStateStorage': | ||
value = value; | ||
(0, exports.SerializeFINLuaProcessorStateStorage)(writer, value.values); | ||
SerializeFINLuaProcessorStateStorage(writer, value.values); | ||
break; | ||
@@ -760,8 +739,7 @@ case 'FICFrameRange': // https://github.com/Panakotta00/FicsIt-Cam/blob/c55e254a84722c56e1badabcfaef1159cd7d2ef1/Source/FicsItCam/Public/Data/FICTypes.h#L34 | ||
value = value; | ||
(0, exports.SerializeDynamicStructData)(writer, 0, value); | ||
SerializeDynamicStructData(writer, 0, value); | ||
} | ||
} | ||
} | ||
exports.StructProperty = StructProperty; | ||
class ArrayProperty extends BasicProperty { | ||
export class ArrayProperty extends BasicProperty { | ||
constructor(subtype, values, ueType = 'ArrayProperty', index = 0, structValueFields) { | ||
@@ -919,4 +897,3 @@ super('ArrayProperty', ueType, undefined, index); | ||
} | ||
exports.ArrayProperty = ArrayProperty; | ||
class SetProperty extends BasicProperty { | ||
export class SetProperty extends BasicProperty { | ||
constructor(subtype, values, ueType, index) { | ||
@@ -945,3 +922,3 @@ super('SetProperty', ueType, undefined, index); | ||
if (propertyName === 'mRemovalLocations') { | ||
property = new SetProperty(subtype, new Array(elementCount).fill(0).map(e => (0, util_types_1.ParseVec3)(reader)), ueType, index); | ||
property = new SetProperty(subtype, new Array(elementCount).fill(0).map(e => ParseVec3(reader)), ueType, index); | ||
} | ||
@@ -975,3 +952,3 @@ break; | ||
//TODO: this would only work for mRemovalLocations. Get a way to check for propertyname at least. | ||
property.values.forEach(v => (0, util_types_1.SerializeVec3)(writer, v)); | ||
property.values.forEach(v => SerializeVec3(writer, v)); | ||
break; | ||
@@ -983,4 +960,3 @@ default: | ||
} | ||
exports.SetProperty = SetProperty; | ||
class MapProperty extends BasicProperty { | ||
export class MapProperty extends BasicProperty { | ||
constructor(keyType, valueType, ueType, index) { | ||
@@ -1128,4 +1104,3 @@ super('MapProperty', ueType, undefined, index); | ||
} | ||
exports.MapProperty = MapProperty; | ||
const ParseDynamicStructData = (reader, buildVersion, type) => { | ||
export const ParseDynamicStructData = (reader, buildVersion, type) => { | ||
const data = { | ||
@@ -1136,3 +1111,3 @@ type, properties: {} | ||
while (propertyName !== 'None') { | ||
const property = DataFields_1.DataFields.ParseProperty(reader, buildVersion, propertyName); | ||
const property = DataFields.ParseProperty(reader, buildVersion, propertyName); | ||
data.properties[propertyName] = property; | ||
@@ -1143,17 +1118,15 @@ propertyName = reader.readString(); | ||
}; | ||
exports.ParseDynamicStructData = ParseDynamicStructData; | ||
const SerializeDynamicStructData = (writer, buildVersion, data) => { | ||
export const SerializeDynamicStructData = (writer, buildVersion, data) => { | ||
for (const key in data.properties) { | ||
writer.writeString(key); | ||
DataFields_1.DataFields.SerializeProperty(writer, data.properties[key], key, buildVersion); | ||
DataFields.SerializeProperty(writer, data.properties[key], key, buildVersion); | ||
} | ||
writer.writeString('None'); | ||
}; | ||
exports.SerializeDynamicStructData = SerializeDynamicStructData; | ||
const ReadFINNetworkTrace = (reader) => { | ||
export const ReadFINNetworkTrace = (reader) => { | ||
const networkTrace = {}; | ||
networkTrace.ref = ObjectReference_1.ObjectReference.Parse(reader); | ||
networkTrace.ref = ObjectReference.Parse(reader); | ||
networkTrace.hasPrev = reader.readInt32(); | ||
if (networkTrace.hasPrev) { | ||
networkTrace.prev = (0, exports.ReadFINNetworkTrace)(reader); | ||
networkTrace.prev = ReadFINNetworkTrace(reader); | ||
} | ||
@@ -1166,9 +1139,8 @@ networkTrace.hasStep = reader.readInt32(); | ||
}; | ||
exports.ReadFINNetworkTrace = ReadFINNetworkTrace; | ||
const SerializeFINNetworkTrace = (writer, obj) => { | ||
export const SerializeFINNetworkTrace = (writer, obj) => { | ||
const networkTrace = {}; | ||
ObjectReference_1.ObjectReference.Serialize(writer, obj.ref); | ||
ObjectReference.Serialize(writer, obj.ref); | ||
writer.writeInt32(obj.hasPrev); | ||
if (obj.hasPrev) { | ||
(0, exports.SerializeFINNetworkTrace)(writer, obj.prev); | ||
SerializeFINNetworkTrace(writer, obj.prev); | ||
} | ||
@@ -1180,4 +1152,3 @@ writer.writeInt32(obj.hasStep); | ||
}; | ||
exports.SerializeFINNetworkTrace = SerializeFINNetworkTrace; | ||
const ReadFINLuaProcessorStateStorage = (reader, size) => { | ||
export const ReadFINLuaProcessorStateStorage = (reader, size) => { | ||
const stateStorage = { traces: [], references: [], thread: '', globals: '', remainingStructData: {} }; | ||
@@ -1187,7 +1158,7 @@ const start = reader.getBufferPosition(); | ||
for (let i = 0; i < traceCount; i++) { | ||
stateStorage.traces.push((0, exports.ReadFINNetworkTrace)(reader)); | ||
stateStorage.traces.push(ReadFINNetworkTrace(reader)); | ||
} | ||
const refCount = reader.readInt32(); | ||
for (let i = 0; i < refCount; i++) { | ||
stateStorage.references.push(ObjectReference_1.ObjectReference.Parse(reader)); | ||
stateStorage.references.push(ObjectReference.Parse(reader)); | ||
} | ||
@@ -1200,11 +1171,10 @@ stateStorage.thread = reader.readString(); | ||
}; | ||
exports.ReadFINLuaProcessorStateStorage = ReadFINLuaProcessorStateStorage; | ||
const SerializeFINLuaProcessorStateStorage = (writer, stateStorage) => { | ||
export const SerializeFINLuaProcessorStateStorage = (writer, stateStorage) => { | ||
writer.writeInt32(stateStorage.traces.length); | ||
for (const trace of stateStorage.traces) { | ||
(0, exports.SerializeFINNetworkTrace)(writer, trace); | ||
SerializeFINNetworkTrace(writer, trace); | ||
} | ||
writer.writeInt32(stateStorage.references.length); | ||
for (const ref of stateStorage.references) { | ||
ObjectReference_1.ObjectReference.Serialize(writer, ref); | ||
ObjectReference.Serialize(writer, ref); | ||
} | ||
@@ -1215,3 +1185,1 @@ writer.writeString(stateStorage.thread); | ||
}; | ||
exports.SerializeFINLuaProcessorStateStorage = SerializeFINLuaProcessorStateStorage; | ||
//# sourceMappingURL=Property.js.map |
@@ -1,10 +0,6 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.SaveComponent = exports.isSaveComponent = void 0; | ||
const SaveObject_1 = require("./SaveObject"); | ||
const isSaveComponent = (obj) => { | ||
import { SaveObject } from "./SaveObject"; | ||
export const isSaveComponent = (obj) => { | ||
return obj.type === 'SaveComponent'; | ||
}; | ||
exports.isSaveComponent = isSaveComponent; | ||
class SaveComponent extends SaveObject_1.SaveObject { | ||
class SaveComponent extends SaveObject { | ||
constructor(typePath, rootObject, instanceName, parentEntityName = '') { | ||
@@ -19,15 +15,14 @@ super(typePath, rootObject, instanceName); | ||
static ParseHeader(reader, obj) { | ||
SaveObject_1.SaveObject.ParseHeader(reader, obj); | ||
SaveObject.ParseHeader(reader, obj); | ||
obj.parentEntityName = reader.readString(); | ||
} | ||
static SerializeHeader(writer, component) { | ||
SaveObject_1.SaveObject.SerializeHeader(writer, component); | ||
SaveObject.SerializeHeader(writer, component); | ||
writer.writeString(component.parentEntityName); | ||
} | ||
static ParseData(component, length, reader, buildVersion) { | ||
SaveObject_1.SaveObject.ParseData(component, length, reader, buildVersion); | ||
SaveObject.ParseData(component, length, reader, buildVersion); | ||
} | ||
} | ||
SaveComponent.TypeID = 0; | ||
exports.SaveComponent = SaveComponent; | ||
//# sourceMappingURL=SaveComponent.js.map | ||
export { SaveComponent }; |
@@ -1,12 +0,8 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.SaveEntity = exports.isSaveEntity = void 0; | ||
const util_types_1 = require("../structs/util.types"); | ||
const ObjectReference_1 = require("./ObjectReference"); | ||
const SaveObject_1 = require("./SaveObject"); | ||
const isSaveEntity = (obj) => { | ||
import { ParseTransform, SerializeTransform } from "../structs/util.types"; | ||
import { ObjectReference } from "./ObjectReference"; | ||
import { SaveObject } from "./SaveObject"; | ||
export const isSaveEntity = (obj) => { | ||
return obj.type === 'SaveEntity'; | ||
}; | ||
exports.isSaveEntity = isSaveEntity; | ||
class SaveEntity extends SaveObject_1.SaveObject { | ||
class SaveEntity extends SaveObject { | ||
constructor(typePath, rootObject, instanceName, parentEntityName = '', needsTransform = false) { | ||
@@ -31,5 +27,5 @@ super(typePath, rootObject, instanceName); | ||
static ParseHeader(reader, obj) { | ||
SaveObject_1.SaveObject.ParseHeader(reader, obj); | ||
SaveObject.ParseHeader(reader, obj); | ||
obj.needTransform = reader.readInt32() == 1; | ||
obj.transform = (0, util_types_1.ParseTransform)(reader); | ||
obj.transform = ParseTransform(reader); | ||
obj.wasPlacedInLevel = reader.readInt32() == 1; | ||
@@ -47,12 +43,12 @@ } | ||
for (let i = 0; i < componentCount; i++) { | ||
var componentRef = ObjectReference_1.ObjectReference.Parse(reader); | ||
var componentRef = ObjectReference.Parse(reader); | ||
entity.components.push(componentRef); | ||
newLen -= 10 + componentRef.levelName.length + componentRef.pathName.length; | ||
} | ||
SaveObject_1.SaveObject.ParseData(entity, newLen, reader, buildVersion); | ||
SaveObject.ParseData(entity, newLen, reader, buildVersion); | ||
} | ||
static SerializeHeader(writer, entity) { | ||
SaveObject_1.SaveObject.SerializeHeader(writer, entity); | ||
SaveObject.SerializeHeader(writer, entity); | ||
writer.writeInt32(entity.needTransform ? 1 : 0); | ||
(0, util_types_1.SerializeTransform)(writer, entity.transform); | ||
SerializeTransform(writer, entity.transform); | ||
writer.writeInt32(entity.wasPlacedInLevel ? 1 : 0); | ||
@@ -68,7 +64,6 @@ } | ||
} | ||
SaveObject_1.SaveObject.SerializeData(writer, entity, buildVersion); | ||
SaveObject.SerializeData(writer, entity, buildVersion); | ||
} | ||
} | ||
SaveEntity.TypeID = 1; | ||
exports.SaveEntity = SaveEntity; | ||
//# sourceMappingURL=SaveEntity.js.map | ||
export { SaveEntity }; |
@@ -1,6 +0,3 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.SaveObject = void 0; | ||
const DataFields_1 = require("./DataFields"); | ||
class SaveObject { | ||
import { DataFields } from "./DataFields"; | ||
export class SaveObject { | ||
constructor(typePath, rootObject, instanceName) { | ||
@@ -10,3 +7,3 @@ this.typePath = typePath; | ||
this.instanceName = instanceName; | ||
this.dataFields = new DataFields_1.DataFields(); | ||
this.dataFields = new DataFields(); | ||
} | ||
@@ -24,9 +21,7 @@ static ParseHeader(reader, obj) { | ||
static ParseData(obj, length, reader, buildVersion) { | ||
obj.dataFields = DataFields_1.DataFields.Parse(length, reader, buildVersion); | ||
obj.dataFields = DataFields.Parse(length, reader, buildVersion); | ||
} | ||
static SerializeData(writer, obj, buildVersion) { | ||
DataFields_1.DataFields.Serialize(writer, obj.dataFields, buildVersion); | ||
DataFields.Serialize(writer, obj.dataFields, buildVersion); | ||
} | ||
} | ||
exports.SaveObject = SaveObject; | ||
//# sourceMappingURL=SaveObject.js.map |
@@ -1,5 +0,2 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.SatisfactorySave = void 0; | ||
class SatisfactorySave { | ||
export class SatisfactorySave { | ||
constructor(header) { | ||
@@ -11,3 +8,1 @@ this.levels = []; | ||
} | ||
exports.SatisfactorySave = SatisfactorySave; | ||
//# sourceMappingURL=satisfactory-save.class.js.map |
@@ -1,5 +0,2 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.SerializeTransform = exports.ParseTransform = exports.SerializeVec2 = exports.ParseVec2 = exports.SerializeVec3 = exports.ParseVec3 = exports.norm = exports.mult = exports.length = exports.add = exports.sub = exports.SerializeVec4 = exports.ParseVec4 = exports.ParseCol4BGRA = exports.SerializeCol4BGRA = exports.ParseCol4RGBA = exports.SerializeCol4RGBA = void 0; | ||
const SerializeCol4RGBA = (writer, value) => { | ||
export const SerializeCol4RGBA = (writer, value) => { | ||
writer.writeFloat(value.r); | ||
@@ -10,4 +7,3 @@ writer.writeFloat(value.g); | ||
}; | ||
exports.SerializeCol4RGBA = SerializeCol4RGBA; | ||
const ParseCol4RGBA = (reader) => { | ||
export const ParseCol4RGBA = (reader) => { | ||
return { | ||
@@ -20,4 +16,3 @@ r: reader.readFloat(), | ||
}; | ||
exports.ParseCol4RGBA = ParseCol4RGBA; | ||
const SerializeCol4BGRA = (writer, value) => { | ||
export const SerializeCol4BGRA = (writer, value) => { | ||
writer.writeByte(value.b); | ||
@@ -28,4 +23,3 @@ writer.writeByte(value.g); | ||
}; | ||
exports.SerializeCol4BGRA = SerializeCol4BGRA; | ||
const ParseCol4BGRA = (reader) => { | ||
export const ParseCol4BGRA = (reader) => { | ||
return { | ||
@@ -38,38 +32,28 @@ b: reader.readByte(), | ||
}; | ||
exports.ParseCol4BGRA = ParseCol4BGRA; | ||
const ParseVec4 = (reader) => { | ||
export const ParseVec4 = (reader) => { | ||
return { | ||
...(0, exports.ParseVec3)(reader), | ||
...ParseVec3(reader), | ||
w: reader.readFloat() | ||
}; | ||
}; | ||
exports.ParseVec4 = ParseVec4; | ||
const SerializeVec4 = (writer, vec) => { | ||
(0, exports.SerializeVec3)(writer, vec); | ||
export const SerializeVec4 = (writer, vec) => { | ||
SerializeVec3(writer, vec); | ||
writer.writeFloat(vec.w); | ||
}; | ||
exports.SerializeVec4 = SerializeVec4; | ||
const sub = (other, vec) => ({ x: other.x - vec.x, y: other.y - vec.y, z: other.z - vec.z }); | ||
exports.sub = sub; | ||
const add = (vec, other) => ({ x: vec.x + other.x, y: vec.y + other.y, z: vec.z + other.z }); | ||
exports.add = add; | ||
const length = (vec) => Math.sqrt(vec.x ** 2 + vec.y ** 2 + vec.z ** 2); | ||
exports.length = length; | ||
const mult = (vec, scale) => ({ x: vec.x * scale, y: vec.y * scale, z: vec.z * scale }); | ||
exports.mult = mult; | ||
const norm = (vec) => (0, exports.mult)(vec, 1. / (0, exports.length)(vec)); | ||
exports.norm = norm; | ||
const ParseVec3 = (reader) => { | ||
export const sub = (other, vec) => ({ x: other.x - vec.x, y: other.y - vec.y, z: other.z - vec.z }); | ||
export const add = (vec, other) => ({ x: vec.x + other.x, y: vec.y + other.y, z: vec.z + other.z }); | ||
export const length = (vec) => Math.sqrt(vec.x ** 2 + vec.y ** 2 + vec.z ** 2); | ||
export const mult = (vec, scale) => ({ x: vec.x * scale, y: vec.y * scale, z: vec.z * scale }); | ||
export const norm = (vec) => mult(vec, 1. / length(vec)); | ||
export const ParseVec3 = (reader) => { | ||
return { | ||
...(0, exports.ParseVec2)(reader), | ||
...ParseVec2(reader), | ||
z: reader.readFloat() | ||
}; | ||
}; | ||
exports.ParseVec3 = ParseVec3; | ||
const SerializeVec3 = (writer, vec) => { | ||
(0, exports.SerializeVec2)(writer, vec); | ||
export const SerializeVec3 = (writer, vec) => { | ||
SerializeVec2(writer, vec); | ||
writer.writeFloat(vec.z); | ||
}; | ||
exports.SerializeVec3 = SerializeVec3; | ||
const ParseVec2 = (reader) => { | ||
export const ParseVec2 = (reader) => { | ||
return { | ||
@@ -80,22 +64,17 @@ x: reader.readFloat(), | ||
}; | ||
exports.ParseVec2 = ParseVec2; | ||
const SerializeVec2 = (writer, vec) => { | ||
export const SerializeVec2 = (writer, vec) => { | ||
writer.writeFloat(vec.x); | ||
writer.writeFloat(vec.y); | ||
}; | ||
exports.SerializeVec2 = SerializeVec2; | ||
const ParseTransform = (reader) => { | ||
export const ParseTransform = (reader) => { | ||
return { | ||
rotation: (0, exports.ParseVec4)(reader), | ||
translation: (0, exports.ParseVec3)(reader), | ||
scale3d: (0, exports.ParseVec3)(reader), | ||
rotation: ParseVec4(reader), | ||
translation: ParseVec3(reader), | ||
scale3d: ParseVec3(reader), | ||
}; | ||
}; | ||
exports.ParseTransform = ParseTransform; | ||
const SerializeTransform = (writer, transform) => { | ||
(0, exports.SerializeVec4)(writer, transform.rotation); | ||
(0, exports.SerializeVec3)(writer, transform.translation); | ||
(0, exports.SerializeVec3)(writer, transform.scale3d); | ||
export const SerializeTransform = (writer, transform) => { | ||
SerializeVec4(writer, transform.rotation); | ||
SerializeVec3(writer, transform.translation); | ||
SerializeVec3(writer, transform.scale3d); | ||
}; | ||
exports.SerializeTransform = SerializeTransform; | ||
//# sourceMappingURL=util.types.js.map |
@@ -7,3 +7,3 @@ import { ByteReader } from "./byte/byte-reader.class"; | ||
import { SaveObject } from "./satisfactory/objects/SaveObject"; | ||
import { SatisfactorySaveHeader, ChunkCompressionInfo } from "./satisfactory/satisfactory-save.class"; | ||
import { ChunkCompressionInfo, SatisfactorySaveHeader } from "./satisfactory/satisfactory-save.class"; | ||
export type EntityPathFilter = { | ||
@@ -10,0 +10,0 @@ behavior: 'whitelist' | 'blacklist'; |
@@ -1,21 +0,15 @@ | ||
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.Conveyor = exports.SaveReader = exports.projectionFilterApplies = void 0; | ||
const pako_1 = __importDefault(require("pako")); | ||
const alignment_enum_1 = require("./byte/alignment.enum"); | ||
const byte_reader_class_1 = require("./byte/byte-reader.class"); | ||
const level_class_1 = require("./satisfactory/level.class"); | ||
const SaveComponent_1 = require("./satisfactory/objects/SaveComponent"); | ||
const SaveEntity_1 = require("./satisfactory/objects/SaveEntity"); | ||
const parser_error_1 = require("./error/parser.error"); | ||
const projectionFilterApplies = (config, object) => { | ||
import Pako from "pako"; | ||
import { Alignment } from "./byte/alignment.enum"; | ||
import { ByteReader } from "./byte/byte-reader.class"; | ||
import { CompressionLibraryError, CorruptSaveError, ParserError, UnsupportedVersionError } from "./error/parser.error"; | ||
import { Level } from "./satisfactory/level.class"; | ||
import { isSaveComponent } from "./satisfactory/objects/SaveComponent"; | ||
import { isSaveEntity } from "./satisfactory/objects/SaveEntity"; | ||
export const projectionFilterApplies = (config, object) => { | ||
let cond1 = false; | ||
if ((0, SaveEntity_1.isSaveEntity)(object) || (config.includeComponents && (0, SaveComponent_1.isSaveComponent)(object))) { | ||
if (isSaveEntity(object) || (config.includeComponents && isSaveComponent(object))) { | ||
cond1 = true; | ||
} | ||
let cond2 = true; | ||
if ((0, SaveEntity_1.isSaveEntity)(object)) { | ||
if (isSaveEntity(object)) { | ||
cond2 = false; | ||
@@ -31,6 +25,5 @@ if (config.entityPathFilter.behavior === 'whitelist') { | ||
}; | ||
exports.projectionFilterApplies = projectionFilterApplies; | ||
class SaveReader extends byte_reader_class_1.ByteReader { | ||
class SaveReader extends ByteReader { | ||
constructor(fileBuffer, onProgressCallback = () => { }) { | ||
super(fileBuffer, alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
super(fileBuffer, Alignment.LITTLE_ENDIAN); | ||
this.onProgressCallback = onProgressCallback; | ||
@@ -88,3 +81,3 @@ this.levels = []; | ||
else { | ||
throw new parser_error_1.UnsupportedVersionError("The save version is too old to support encoding currently. Save in newer game version."); | ||
throw new UnsupportedVersionError("The save version is too old to support encoding currently. Save in newer game version."); | ||
} | ||
@@ -108,9 +101,9 @@ return this.header; | ||
if (this.compressionInfo.packageFileTag <= 0) { | ||
this.compressionInfo.packageFileTag = chunkHeader.getInt32(0, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
this.compressionInfo.packageFileTag = chunkHeader.getInt32(0, this.alignment === Alignment.LITTLE_ENDIAN); | ||
} | ||
if (this.compressionInfo.maxChunkContentSize <= 0) { | ||
this.compressionInfo.maxChunkContentSize = chunkHeader.getInt32(8, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
this.compressionInfo.maxChunkContentSize = chunkHeader.getInt32(8, this.alignment === Alignment.LITTLE_ENDIAN); | ||
} | ||
const chunkCompressedLength = chunkHeader.getInt32(32, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
const chunkUncompressedLength = chunkHeader.getInt32(40, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
const chunkCompressedLength = chunkHeader.getInt32(32, this.alignment === Alignment.LITTLE_ENDIAN); | ||
const chunkUncompressedLength = chunkHeader.getInt32(40, this.alignment === Alignment.LITTLE_ENDIAN); | ||
totalUncompressedBodySize += chunkUncompressedLength; | ||
@@ -128,7 +121,7 @@ const currentChunkSize = chunkCompressedLength; | ||
let currentInflatedChunk = null; | ||
currentInflatedChunk = pako_1.default.inflate(currentChunk); | ||
currentInflatedChunk = Pako.inflate(currentChunk); | ||
currentChunks.push(currentInflatedChunk); | ||
} | ||
catch (err) { | ||
throw new parser_error_1.CompressionLibraryError("Failed to inflate compressed save data. " + err); | ||
throw new CompressionLibraryError("Failed to inflate compressed save data. " + err); | ||
} | ||
@@ -150,3 +143,3 @@ } | ||
if (totalUncompressedBodySize !== dataLength + 4) { | ||
throw new parser_error_1.CorruptSaveError(`Possibly corrupt. Indicated size of total save body (${dataLength}) does not match the uncompressed real size of ${totalUncompressedBodySize}.`); | ||
throw new CorruptSaveError(`Possibly corrupt. Indicated size of total save body (${dataLength}) does not match the uncompressed real size of ${totalUncompressedBodySize}.`); | ||
} | ||
@@ -160,6 +153,6 @@ return { | ||
if (!this.header) { | ||
throw new parser_error_1.ParserError('ParserError', 'Header must be set before objects can be read.'); | ||
throw new ParserError('ParserError', 'Header must be set before objects can be read.'); | ||
} | ||
if (this.header.saveVersion < 29) { | ||
throw new parser_error_1.UnsupportedVersionError('Support for < U6 is not yet implemented.'); | ||
throw new UnsupportedVersionError('Support for < U6 is not yet implemented.'); | ||
} | ||
@@ -172,7 +165,7 @@ const numSubLevels = this.readInt32(); | ||
this.onProgressCallback(this.getBufferProgress(), `reading level [${(j + 1)}/${(numSubLevels + 1)}] ${levelName}`); | ||
this.levels[j] = level_class_1.Level.ReadLevel(this, levelName, this.header.buildVersion); | ||
this.levels[j] = Level.ReadLevel(this, levelName, this.header.buildVersion); | ||
} | ||
// in U6/U7 there were more collectibles added that do not belong to a particular level | ||
if (this.getBufferPosition() < this.bufferView.byteLength) { | ||
this.trailingCollectedObjects = level_class_1.Level.ReadCollectablesList(this); | ||
this.trailingCollectedObjects = Level.ReadCollectablesList(this); | ||
} | ||
@@ -184,4 +177,4 @@ return this.levels; | ||
SaveReader.EPOCH_TICKS = 621355968000000000n; | ||
exports.SaveReader = SaveReader; | ||
class Conveyor { | ||
export { SaveReader }; | ||
export class Conveyor { | ||
static getConveyorBeltRegex() { | ||
@@ -259,3 +252,1 @@ return /\/Game\/FactoryGame\/Buildable\/Factory\/(ConveyorBeltMk[0-9]+)\/Build_(\1)\.Build_(\1)_C/g; | ||
} | ||
exports.Conveyor = Conveyor; | ||
//# sourceMappingURL=save-reader.js.map |
{ | ||
"name": "@etothepii/satisfactory-file-parser", | ||
"author": "etothepii", | ||
"version": "0.0.3", | ||
"version": "0.0.4", | ||
"description": "A file parser for satisfactory files. Includes save files and blueprint files.", | ||
"main": "build/parser/parser.class.js", | ||
"main": "build/index.js", | ||
"keywords": [ | ||
@@ -17,3 +17,4 @@ "satisfactory", | ||
"test": "jest --config=jest.config.json", | ||
"build": "npm version patch && tsc" | ||
"build": "tsc && rollup -c", | ||
"prepublish": "npm version patch" | ||
}, | ||
@@ -34,2 +35,4 @@ "repository": { | ||
"jest": "^29.5.0", | ||
"rollup": "^3.20.2", | ||
"rollup-plugin-dts": "^5.3.0", | ||
"ts-jest": "^29.0.5", | ||
@@ -36,0 +39,0 @@ "typescript": "^5.0.2" |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
336359
8062
8
46
1