@etothepii/satisfactory-file-parser
Advanced tools
Comparing version 0.1.2 to 0.1.4
@@ -5,3 +5,2 @@ /// <reference types="node" /> | ||
import { SatisfactorySave } from "./satisfactory/save/satisfactory-save"; | ||
import { SaveProjectionConfig } from "./satisfactory/save/save-reader"; | ||
export declare class Parser { | ||
@@ -15,4 +14,3 @@ static WriteSave(save: SatisfactorySave, onBinaryBeforeCompressing: (buffer: ArrayBuffer) => void, onHeader: (header: Uint8Array) => void, onChunk: (chunk: Uint8Array) => void): ChunkSummary[]; | ||
static ParseBlueprintFiles(name: string, blueprintFile: Buffer, blueprintConfigFile: Buffer, onDecompressedBlueprintBody?: (buffer: ArrayBuffer) => void): Blueprint; | ||
static ProjectSave(save: SatisfactorySave, config: SaveProjectionConfig): SatisfactorySave; | ||
static JSONStringifyModified: (obj: any, indent?: number) => string; | ||
} |
@@ -6,3 +6,2 @@ "use strict"; | ||
const blueprint_writer_1 = require("./satisfactory/blueprint/blueprint-writer"); | ||
const level_class_1 = require("./satisfactory/save/level.class"); | ||
const satisfactory_save_1 = require("./satisfactory/save/satisfactory-save"); | ||
@@ -27,3 +26,4 @@ const save_reader_1 = require("./satisfactory/save/save-reader"); | ||
onDecompressedSaveBody(reader.getBuffer()); | ||
save.levels = reader.readLevelsU8(); | ||
reader.readGrids(); | ||
save.levels = reader.readLevels(); | ||
save.compressionInfo = reader.compressionInfo; | ||
@@ -67,14 +67,2 @@ save.trailingCollectedObjects = reader.trailingCollectedObjects; | ||
} | ||
static ProjectSave(save, config) { | ||
return { | ||
header: save.header, | ||
trailingCollectedObjects: save.trailingCollectedObjects, | ||
levels: save.levels.map(level => { | ||
const lvl = new level_class_1.Level(level.name); | ||
lvl.collectables = level.collectables; | ||
lvl.objects = level.objects.filter(obj => (0, save_reader_1.projectionFilterApplies)(config, obj)); | ||
return lvl; | ||
}) | ||
}; | ||
} | ||
} | ||
@@ -81,0 +69,0 @@ Parser.JSONStringifyModified = (obj, indent = 0) => JSON.stringify(obj, (key, value) => { |
@@ -13,2 +13,3 @@ import { BinaryReadable } from "../../byte/binary-readable.interface"; | ||
static ParseObjects(reader: ByteReader): (SaveEntity | SaveComponent)[]; | ||
private static ReadBlueprintObjectContents; | ||
} | ||
@@ -15,0 +16,0 @@ export declare class BlueprintConfigReader extends ByteReader { |
@@ -11,3 +11,6 @@ "use strict"; | ||
const parser_error_1 = require("../../error/parser.error"); | ||
const SaveComponent_1 = require("../objects/SaveComponent"); | ||
const SaveEntity_1 = require("../objects/SaveEntity"); | ||
const level_class_1 = require("../save/level.class"); | ||
const save_reader_1 = require("../save/save-reader"); | ||
const util_types_1 = require("../structs/util.types"); | ||
@@ -20,3 +23,3 @@ class BlueprintReader extends byte_reader_class_1.ByteReader { | ||
maxChunkContentSize: 0, | ||
chunkHeaderSize: 48 | ||
chunkHeaderSize: save_reader_1.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE | ||
}; | ||
@@ -64,4 +67,4 @@ } | ||
} | ||
const chunkCompressedLength = chunkHeader.getInt32(32, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
const chunkUncompressedLength = chunkHeader.getInt32(40, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
const chunkCompressedLength = chunkHeader.getInt32(33, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
const chunkUncompressedLength = chunkHeader.getInt32(25, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); | ||
totalUncompressedBodySize += chunkUncompressedLength; | ||
@@ -104,5 +107,24 @@ const currentChunkSize = chunkCompressedLength; | ||
level_class_1.Level.ReadObjectHeaders(reader, objects, () => { }); | ||
level_class_1.Level.ReadObjectContents('', reader, objects, 0, () => { }); | ||
const suomeChecksumThing = reader.readInt32(); | ||
BlueprintReader.ReadBlueprintObjectContents('', reader, objects, 0); | ||
const pos = reader.getBufferPosition(); | ||
console.log(pos, reader.getBufferLength(), reader.getBufferProgress()); | ||
return objects; | ||
} | ||
static ReadBlueprintObjectContents(levelName, reader, objectsList, buildVersion) { | ||
const countEntities = reader.readInt32(); | ||
for (let i = 0; i < countEntities; i++) { | ||
const len = reader.readInt32(); | ||
if (len === 0) { | ||
throw new parser_error_1.CorruptSaveError(`check number is a wrong value (${len}). This normally indicates a corrupt entity or save.`); | ||
} | ||
const obj = objectsList[i]; | ||
if ((0, SaveEntity_1.isSaveEntity)(obj)) { | ||
SaveEntity_1.SaveEntity.ParseData(obj, len, reader, buildVersion, obj.typePath); | ||
} | ||
else if ((0, SaveComponent_1.isSaveComponent)(obj)) { | ||
SaveComponent_1.SaveComponent.ParseData(obj, len, reader, buildVersion, obj.typePath); | ||
} | ||
} | ||
} | ||
} | ||
@@ -109,0 +131,0 @@ exports.BlueprintReader = BlueprintReader; |
@@ -7,2 +7,3 @@ import { ByteWriter } from "../../.."; | ||
import { AbstractBaseProperty } from "./Property"; | ||
import { SaveObject } from "./SaveObject"; | ||
export type SpecialAnyProperty = {} | PowerLineSpecialProperty; | ||
@@ -31,13 +32,9 @@ export type PowerLineSpecialProperty = { | ||
export declare class DataFields { | ||
properties: AbstractBaseProperty[]; | ||
specialProperties: SpecialAnyProperty; | ||
trailingData: number[]; | ||
shouldBeNulled: boolean; | ||
constructor(); | ||
static Parse(length: number, reader: BinaryReadable, buildVersion: number, typePath: string): DataFields; | ||
static ParseProperties(obj: SaveObject, length: number, reader: BinaryReadable, buildVersion: number, typePath: string): void; | ||
static ParseAdditionalSpecialProperties(reader: BinaryReadable, typePath: string, remainingLen: number): SpecialAnyProperty; | ||
static ParseProperty(reader: BinaryReadable, buildVersion: number, propertyName: string): AbstractBaseProperty | null; | ||
static Serialize(writer: SaveWriter, fields: DataFields, buildVersion: number, typePath: string): void; | ||
static Serialize(obj: SaveObject, writer: SaveWriter, buildVersion: number, typePath: string): void; | ||
static SerializeAdditionalSpecialProperties(writer: ByteWriter, typePath: string, property: SpecialAnyProperty): void; | ||
static SerializeProperty(writer: ByteWriter, property: AbstractBaseProperty, propertyName: string, buildVersion: number): void; | ||
} |
@@ -8,13 +8,8 @@ "use strict"; | ||
constructor() { | ||
this.properties = []; | ||
this.specialProperties = {}; | ||
this.trailingData = []; | ||
this.shouldBeNulled = false; | ||
} | ||
static Parse(length, reader, buildVersion, typePath) { | ||
static ParseProperties(obj, length, reader, buildVersion, typePath) { | ||
const start = reader.getBufferPosition(); | ||
const fields = new DataFields(); | ||
obj.properties = {}; | ||
if (length === 0) { | ||
fields.shouldBeNulled = true; | ||
return fields; | ||
return; | ||
} | ||
@@ -24,20 +19,18 @@ let propertyName = reader.readString(); | ||
const property = DataFields.ParseProperty(reader, buildVersion, propertyName); | ||
fields.properties.push(property); | ||
obj.properties[property.name] = property; | ||
propertyName = reader.readString(); | ||
} | ||
let int1 = reader.readInt32(); | ||
if (int1 !== 0) { | ||
let padding = reader.readInt32(); | ||
if (padding !== 0) { | ||
} | ||
const remainingLen = length - (reader.getBufferPosition() - start); | ||
const oldRemainingLen = start + length - reader.getBufferPosition() - 4; | ||
fields.specialProperties = DataFields.ParseAdditionalSpecialProperties(reader, typePath, remainingLen); | ||
obj.specialProperties = DataFields.ParseAdditionalSpecialProperties(reader, typePath, remainingLen); | ||
const end = reader.getBufferPosition(); | ||
const newWayOfCalculatingReminingSize = length - (end - start); | ||
if (newWayOfCalculatingReminingSize > 0) { | ||
reader.readBytes(newWayOfCalculatingReminingSize); | ||
obj.trailingData = Array.from(reader.readBytes(newWayOfCalculatingReminingSize)); | ||
} | ||
else if (newWayOfCalculatingReminingSize < 0) { | ||
console.warn(`Unexpected. Did i read too much or is save corrupt !? bytes left to read is ${newWayOfCalculatingReminingSize}`); | ||
throw new Error(`Unexpected. Read more bytes than are indicated for this etity. bytes left to read is ${newWayOfCalculatingReminingSize}`); | ||
} | ||
return fields; | ||
} | ||
@@ -185,4 +178,4 @@ static ParseAdditionalSpecialProperties(reader, typePath, remainingLen) { | ||
} | ||
static Serialize(writer, fields, buildVersion, typePath) { | ||
for (const property of fields.properties) { | ||
static Serialize(obj, writer, buildVersion, typePath) { | ||
for (const property of Object.values(obj.properties)) { | ||
writer.writeString(property.name); | ||
@@ -193,4 +186,4 @@ DataFields.SerializeProperty(writer, property, property.name, buildVersion); | ||
writer.writeInt32(0); | ||
DataFields.SerializeAdditionalSpecialProperties(writer, typePath, fields.specialProperties); | ||
writer.writeBytesArray(fields.trailingData); | ||
DataFields.SerializeAdditionalSpecialProperties(writer, typePath, obj.specialProperties); | ||
writer.writeBytesArray(obj.trailingData); | ||
} | ||
@@ -197,0 +190,0 @@ static SerializeAdditionalSpecialProperties(writer, typePath, property) { |
@@ -37,12 +37,5 @@ "use strict"; | ||
static ParseData(entity, length, reader, buildVersion, typePath) { | ||
const typeOrWhat = reader.readInt32(); | ||
const something = reader.readInt32(); | ||
const afterSizeIndicator = reader.getBufferPosition(); | ||
var newLen = length - 12; | ||
entity.parentObjectRoot = reader.readString(); | ||
if (entity.parentObjectRoot.length > 0) | ||
newLen -= entity.parentObjectRoot.length + 1; | ||
entity.parentObjectName = reader.readString(); | ||
if (entity.parentObjectName.length > 0) | ||
newLen -= entity.parentObjectName.length + 1; | ||
var componentCount = reader.readInt32(); | ||
@@ -52,5 +45,4 @@ for (let i = 0; i < componentCount; i++) { | ||
entity.components.push(componentRef); | ||
newLen -= 10 + componentRef.levelName.length + componentRef.pathName.length; | ||
} | ||
const remainingSize = something - (reader.getBufferPosition() - afterSizeIndicator); | ||
const remainingSize = length - (reader.getBufferPosition() - afterSizeIndicator); | ||
return SaveObject_1.SaveObject.ParseData(entity, remainingSize, reader, buildVersion, typePath); | ||
@@ -57,0 +49,0 @@ } |
import { BinaryReadable } from "../../byte/binary-readable.interface"; | ||
import { ByteWriter } from "../../byte/byte-writer.class"; | ||
import { DataFields } from "./DataFields"; | ||
import { SpecialAnyProperty } from "./DataFields"; | ||
import { AbstractBaseProperty } from "./Property"; | ||
export interface SaveObjectHeader { | ||
@@ -13,3 +14,9 @@ typePath: string; | ||
instanceName: string; | ||
dataFields: DataFields; | ||
properties: { | ||
[name: string]: AbstractBaseProperty; | ||
}; | ||
specialProperties: SpecialAnyProperty; | ||
trailingData: number[]; | ||
unknownType1: number; | ||
unknownType2: number; | ||
constructor(typePath: string, rootObject: string, instanceName: string); | ||
@@ -16,0 +23,0 @@ protected static ParseHeader(reader: BinaryReadable, obj: SaveObject): void; |
@@ -10,3 +10,7 @@ "use strict"; | ||
this.instanceName = instanceName; | ||
this.dataFields = new DataFields_1.DataFields(); | ||
this.properties = {}; | ||
this.specialProperties = {}; | ||
this.trailingData = []; | ||
this.unknownType1 = 0; | ||
this.unknownType2 = 0; | ||
} | ||
@@ -24,8 +28,8 @@ static ParseHeader(reader, obj) { | ||
static ParseData(obj, length, reader, buildVersion, typePath) { | ||
obj.dataFields = DataFields_1.DataFields.Parse(length, reader, buildVersion, typePath); | ||
DataFields_1.DataFields.ParseProperties(obj, length, reader, buildVersion, typePath); | ||
} | ||
static SerializeData(writer, obj, buildVersion) { | ||
DataFields_1.DataFields.Serialize(writer, obj.dataFields, buildVersion, obj.typePath); | ||
DataFields_1.DataFields.Serialize(obj, writer, buildVersion, obj.typePath); | ||
} | ||
} | ||
exports.SaveObject = SaveObject; |
@@ -80,6 +80,4 @@ "use strict"; | ||
} | ||
const unk = reader.readInt32(); | ||
} | ||
static async ReadObjectContents(levelName, reader, objectsList, buildVersion, onProgressCallback) { | ||
const posBefore = reader.getBufferPosition(); | ||
const countEntities = reader.readInt32(); | ||
@@ -93,21 +91,18 @@ if (countEntities !== objectsList.length) { | ||
} | ||
const len = reader.readInt32(); | ||
if (len === 0) { | ||
throw new Error(`check number is a wrong value (${len}). This normally indicates a corrupt entity or save.`); | ||
} | ||
const obj = objectsList[i]; | ||
obj.unknownType1 = reader.readInt32(); | ||
obj.unknownType2 = reader.readInt32(); | ||
const binarySize = reader.readInt32(); | ||
const before = reader.getBufferPosition(); | ||
const obj = objectsList[i]; | ||
if ((0, SaveEntity_1.isSaveEntity)(obj)) { | ||
SaveEntity_1.SaveEntity.ParseData(obj, len, reader, buildVersion, obj.typePath); | ||
SaveEntity_1.SaveEntity.ParseData(obj, binarySize, reader, buildVersion, obj.typePath); | ||
} | ||
else if ((0, SaveComponent_1.isSaveComponent)(obj)) { | ||
const wtf1 = reader.readInt32(); | ||
const wtf2 = reader.readInt32(); | ||
SaveComponent_1.SaveComponent.ParseData(obj, wtf2, reader, buildVersion, obj.typePath); | ||
SaveComponent_1.SaveComponent.ParseData(obj, binarySize, reader, buildVersion, obj.typePath); | ||
} | ||
const after = reader.getBufferPosition(); | ||
if (after - before !== len) { | ||
if (after - before !== binarySize) { | ||
console.warn('entity may be corrupt', obj.instanceName); | ||
} | ||
} | ||
const posAfter = reader.getBufferPosition(); | ||
} | ||
@@ -117,29 +112,21 @@ static ReadLevel(reader, levelName, buildVersion) { | ||
const levelStartPos = reader.getBufferPosition(); | ||
const placeholderHead = reader.readInt32(); | ||
if (placeholderHead === 8) { | ||
const morePlaceholder = reader.readBytes(28); | ||
} | ||
else { | ||
reader.skipBytes(-4); | ||
} | ||
const lookahead1 = reader.readInt32(); | ||
const lookahead2 = reader.readInt32(); | ||
if (lookahead1 !== 0 && lookahead2 !== 0) { | ||
reader.skipBytes(-8); | ||
return level; | ||
} | ||
else if (lookahead1 !== 0 && lookahead2 === 0) { | ||
reader.skipBytes(-8); | ||
} | ||
else { | ||
throw new Error('Unexpected. what now? TODO FIXME'); | ||
} | ||
const binLen = reader.readInt32(); | ||
const headersBinLen = reader.readInt32(); | ||
const unk = reader.readInt32(); | ||
const posBeforeHeaders = reader.getBufferPosition(); | ||
Level.ReadObjectHeaders(reader, level.objects, reader.onProgressCallback); | ||
const remainingSize = headersBinLen - (reader.getBufferPosition() - posBeforeHeaders); | ||
if (remainingSize > 0) { | ||
reader.readBytes(remainingSize); | ||
} | ||
const objectContentsBinLen = reader.readInt32(); | ||
reader.onProgressCallback(reader.getBufferProgress()); | ||
level.collectables = Level.ReadCollectablesList(reader); | ||
const posBeforeContents = reader.getBufferPosition(); | ||
Level.ReadObjectContents(levelName, reader, level.objects, buildVersion, reader.onProgressCallback); | ||
const posAfterContents = reader.getBufferPosition(); | ||
if (posAfterContents - posBeforeContents !== objectContentsBinLen) { | ||
console.warn('save seems corrupt.', this.name); | ||
} | ||
reader.onProgressCallback(reader.getBufferProgress()); | ||
Level.ReadCollectablesList(reader); | ||
level.collectables = Level.ReadCollectablesList(reader); | ||
return level; | ||
@@ -146,0 +133,0 @@ } |
import { ChunkCompressionInfo } from "../../file.types"; | ||
import { ObjectReference } from "../objects/ObjectReference"; | ||
import { Level } from "./level.class"; | ||
import { Grids } from "./save-reader"; | ||
import { SatisfactorySaveHeader } from "./save.types"; | ||
export declare class SatisfactorySave { | ||
header: SatisfactorySaveHeader; | ||
grids: Grids; | ||
levels: Level[]; | ||
@@ -8,0 +10,0 @@ trailingCollectedObjects: ObjectReference[]; |
@@ -6,2 +6,3 @@ "use strict"; | ||
constructor(header) { | ||
this.grids = {}; | ||
this.levels = []; | ||
@@ -8,0 +9,0 @@ this.trailingCollectedObjects = []; |
@@ -5,17 +5,11 @@ import { ByteReader } from "../../byte/byte-reader.class"; | ||
import { ObjectReference } from "../objects/ObjectReference"; | ||
import { SaveComponent } from "../objects/SaveComponent"; | ||
import { SaveEntity } from "../objects/SaveEntity"; | ||
import { Level } from "./level.class"; | ||
import { RoughSaveVersion, SatisfactorySaveHeader } from "./save.types"; | ||
export type EntityPathFilter = { | ||
behavior: 'whitelist' | 'blacklist'; | ||
list: string[]; | ||
}; | ||
export type SaveProjectionConfig = { | ||
entityPathFilter: EntityPathFilter; | ||
includeComponents: boolean; | ||
}; | ||
export declare const projectionFilterApplies: (config: SaveProjectionConfig, object: SaveComponent | SaveEntity) => boolean; | ||
export type ReadMode = 'stream' | 'whole'; | ||
export declare const DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE = 49; | ||
export type Grids = { | ||
[parentName: string]: { | ||
[level: number]: string[]; | ||
}; | ||
}; | ||
export declare class SaveReader extends ByteReader { | ||
@@ -35,4 +29,5 @@ onProgressCallback: (progress: number, msg?: string) => void; | ||
}; | ||
readLevelsU8(): Level[]; | ||
readGrids: () => Grids; | ||
readLevels(): Level[]; | ||
readLevelsAsynchronously(writer: SaveStreamWriter): Promise<Level[]>; | ||
} |
@@ -6,3 +6,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.SaveReader = exports.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE = exports.projectionFilterApplies = void 0; | ||
exports.SaveReader = exports.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE = void 0; | ||
const pako_1 = __importDefault(require("pako")); | ||
@@ -12,24 +12,4 @@ const alignment_enum_1 = require("../../byte/alignment.enum"); | ||
const parser_error_1 = require("../../error/parser.error"); | ||
const SaveComponent_1 = require("../objects/SaveComponent"); | ||
const SaveEntity_1 = require("../objects/SaveEntity"); | ||
const asynchronous_level_class_1 = require("./asynchronous-level.class"); | ||
const level_class_1 = require("./level.class"); | ||
const projectionFilterApplies = (config, object) => { | ||
let cond1 = false; | ||
if ((0, SaveEntity_1.isSaveEntity)(object) || (config.includeComponents && (0, SaveComponent_1.isSaveComponent)(object))) { | ||
cond1 = true; | ||
} | ||
let cond2 = true; | ||
if ((0, SaveEntity_1.isSaveEntity)(object)) { | ||
cond2 = false; | ||
if (config.entityPathFilter.behavior === 'whitelist') { | ||
cond2 = config.entityPathFilter.list.find(en => object.typePath.startsWith(en)) !== undefined; | ||
} | ||
else if (config.entityPathFilter.behavior === 'blacklist') { | ||
cond2 = config.entityPathFilter.list.find(en => object.typePath.startsWith(en)) === undefined; | ||
} | ||
} | ||
return cond1 && cond2; | ||
}; | ||
exports.projectionFilterApplies = projectionFilterApplies; | ||
exports.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE = 49; | ||
@@ -47,2 +27,36 @@ class SaveReader extends byte_reader_class_1.ByteReader { | ||
}; | ||
this.readGrids = () => { | ||
const unk = this.readInt32(); | ||
const cannotBeLevelCountWhatIsIt = this.readInt32(); | ||
const noneString = this.readString(); | ||
const unk2 = this.readInt32(); | ||
const hm1 = this.readInt32(); | ||
const unk3 = this.readInt32(); | ||
const noneString2 = this.readString(); | ||
const grids = {}; | ||
const readLevelsList = (childrenCount) => { | ||
const hmm = this.readInt32(); | ||
const parentName = this.readString(); | ||
const whatever1 = this.readInt32(); | ||
const whatever2 = this.readInt32(); | ||
grids[parentName] = {}; | ||
for (let i = 0; i < childrenCount; i++) { | ||
const binaryLenIGuess = this.readInt32(); | ||
const levelInstanceName = this.readString(); | ||
const lod = /\_L([0-9]+)\_/.exec(levelInstanceName)[1]; | ||
const arr = grids[parentName][Number(lod)]; | ||
if (!arr) { | ||
grids[parentName][Number(lod)] = []; | ||
} | ||
grids[parentName][Number(lod)].push(levelInstanceName); | ||
} | ||
}; | ||
readLevelsList(1379); | ||
readLevelsList(0); | ||
readLevelsList(972); | ||
readLevelsList(0); | ||
readLevelsList(0); | ||
const unk5 = this.readInt32(); | ||
return grids; | ||
}; | ||
} | ||
@@ -88,6 +102,3 @@ readHeader() { | ||
if (this.header.saveHeaderType >= 13) { | ||
const unk1 = this.readInt32(); | ||
const unk2 = this.readInt32(); | ||
const unk3 = this.readBytes(16); | ||
const unk4 = this.readInt32(); | ||
this.header.unknownStuff = Array.from(this.readBytes(28)); | ||
} | ||
@@ -155,3 +166,3 @@ if (this.header.saveVersion >= 21) { | ||
} | ||
readLevelsU8() { | ||
readLevels() { | ||
if (!this.header) { | ||
@@ -166,32 +177,2 @@ throw new parser_error_1.ParserError('ParserError', 'Header must be set before objects can be read.'); | ||
} | ||
const unk = this.readInt32(); | ||
const cannotBeLevelCountWhatIsIt = this.readInt32(); | ||
const noneString = this.readString(); | ||
const unk2 = this.readInt32(); | ||
const hm1 = this.readInt32(); | ||
const unk3 = this.readInt32(); | ||
const noneString2 = this.readString(); | ||
const grids = {}; | ||
const readLevelsList = (childrenCount) => { | ||
const hmm = this.readInt32(); | ||
const parentName = this.readString(); | ||
const whatever = this.readBytes(8); | ||
grids[parentName] = {}; | ||
for (let i = 0; i < childrenCount; i++) { | ||
const binaryLenIGuess = this.readInt32(); | ||
const levelInstanceName = this.readString(); | ||
const lod = /\_L([0-9]+)\_/.exec(levelInstanceName)[1]; | ||
const arr = grids[parentName][Number(lod)]; | ||
if (!arr) { | ||
grids[parentName][Number(lod)] = []; | ||
} | ||
grids[parentName][Number(lod)].push(levelInstanceName); | ||
} | ||
}; | ||
readLevelsList(1379); | ||
readLevelsList(0); | ||
readLevelsList(972); | ||
readLevelsList(0); | ||
readLevelsList(0); | ||
const unk5 = this.readInt32(); | ||
const levels = []; | ||
@@ -210,2 +191,3 @@ const levelCount = this.readInt32(); | ||
level_class_1.Level.ReadObjectHeaders(this, appendedLevel.objects, this.onProgressCallback); | ||
const unk = this.readInt32(); | ||
appendedLevel.collectables = level_class_1.Level.ReadCollectablesList(this); | ||
@@ -212,0 +194,0 @@ level_class_1.Level.ReadObjectContents(appendedLevel.name, this, appendedLevel.objects, this.header.buildVersion, this.onProgressCallback); |
@@ -41,2 +41,5 @@ "use strict"; | ||
} | ||
if (header.saveHeaderType >= 13) { | ||
writer.writeBytesArray(header.unknownStuff); | ||
} | ||
if (header.saveVersion >= 21) { | ||
@@ -43,0 +46,0 @@ } |
@@ -24,2 +24,3 @@ export interface ModData { | ||
fEditorObjectVersion?: number; | ||
unknownStuff?: number[]; | ||
} | ||
@@ -26,0 +27,0 @@ export type RoughSaveVersion = '<U6' | 'U6/U7' | 'U8+'; |
{ | ||
"name": "@etothepii/satisfactory-file-parser", | ||
"author": "etothepii", | ||
"version": "0.1.2", | ||
"version": "0.1.4", | ||
"description": "A file parser for satisfactory files. Includes save files and blueprint files.", | ||
@@ -6,0 +6,0 @@ "types": "./build/index.d.ts", |
@@ -22,3 +22,3 @@ # Satisfactory File Parser | ||
| U6 + U7 | ✅ 0.0.1 - 0.0.34 | | ||
| U8 | ⚠️ >= 0.1.0 ( .sav file Reading only so far) | | ||
| U8 | ⚠️ >= 0.1.3 (Reading only) | | ||
@@ -25,0 +25,0 @@ |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
226093
71
4889