Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@etothepii/satisfactory-file-parser

Package Overview
Dependencies
Maintainers
0
Versions
89
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@etothepii/satisfactory-file-parser - npm Package Compare versions

Comparing version 0.3.7 to 0.4.3

build/parser/satisfactory/objects/values/GUID.d.ts

4

build/index.d.ts
export * from './parser/satisfactory/blueprint/blueprint.types';
export { DataFields } from './parser/satisfactory/objects/DataFields';
export * from './parser/satisfactory/objects/GUIDInfo';
export { ObjectReference } from './parser/satisfactory/objects/ObjectReference';
export * from './parser/satisfactory/objects/Property';
export { SaveComponent } from './parser/satisfactory/objects/SaveComponent';
export { SaveEntity } from './parser/satisfactory/objects/SaveEntity';
export * from './parser/satisfactory/objects/ue/GUID';
export * from './parser/satisfactory/objects/ue/MD5Hash';
export * from './parser/satisfactory/objects/values/GUID';
export { ObjectReference } from './parser/satisfactory/objects/values/ObjectReference';
export { Level } from './parser/satisfactory/save/level.class';

@@ -11,0 +11,0 @@ export { SatisfactorySave } from './parser/satisfactory/save/satisfactory-save';

@@ -17,3 +17,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.ReadableStreamParser = exports.Parser = exports.SaveStreamWriter = exports.SaveStreamJsonStringifier = exports.SaveWriter = exports.SaveReader = exports.BlueprintWriter = exports.BlueprintConfigWriter = exports.BlueprintReader = exports.BlueprintConfigReader = exports.ByteWriter = exports.ByteReader = exports.SatisfactorySave = exports.Level = exports.SaveEntity = exports.SaveComponent = exports.ObjectReference = exports.DataFields = void 0;
exports.ReadableStreamParser = exports.Parser = exports.SaveStreamWriter = exports.SaveStreamJsonStringifier = exports.SaveWriter = exports.SaveReader = exports.BlueprintWriter = exports.BlueprintConfigWriter = exports.BlueprintReader = exports.BlueprintConfigReader = exports.ByteWriter = exports.ByteReader = exports.SatisfactorySave = exports.Level = exports.ObjectReference = exports.SaveEntity = exports.SaveComponent = exports.DataFields = void 0;
__exportStar(require("./parser/satisfactory/blueprint/blueprint.types"), exports);

@@ -23,4 +23,2 @@ var DataFields_1 = require("./parser/satisfactory/objects/DataFields");

__exportStar(require("./parser/satisfactory/objects/GUIDInfo"), exports);
var ObjectReference_1 = require("./parser/satisfactory/objects/ObjectReference");
Object.defineProperty(exports, "ObjectReference", { enumerable: true, get: function () { return ObjectReference_1.ObjectReference; } });
__exportStar(require("./parser/satisfactory/objects/Property"), exports);

@@ -31,4 +29,6 @@ var SaveComponent_1 = require("./parser/satisfactory/objects/SaveComponent");

Object.defineProperty(exports, "SaveEntity", { enumerable: true, get: function () { return SaveEntity_1.SaveEntity; } });
__exportStar(require("./parser/satisfactory/objects/ue/GUID"), exports);
__exportStar(require("./parser/satisfactory/objects/ue/MD5Hash"), exports);
__exportStar(require("./parser/satisfactory/objects/values/GUID"), exports);
var ObjectReference_1 = require("./parser/satisfactory/objects/values/ObjectReference");
Object.defineProperty(exports, "ObjectReference", { enumerable: true, get: function () { return ObjectReference_1.ObjectReference; } });
var level_class_1 = require("./parser/satisfactory/save/level.class");

@@ -35,0 +35,0 @@ Object.defineProperty(exports, "Level", { enumerable: true, get: function () { return level_class_1.Level; } });

@@ -8,2 +8,2 @@ "use strict";

Alignment[Alignment["LITTLE_ENDIAN"] = 1] = "LITTLE_ENDIAN";
})(Alignment = exports.Alignment || (exports.Alignment = {}));
})(Alignment || (exports.Alignment = Alignment = {}));

@@ -138,3 +138,3 @@ "use strict";

}
exports.ByteWriter = ByteWriter;
ByteWriter.IsASCIICompatible = (value) => /^[\x00-\x7F]*$/.test(value);
exports.ByteWriter = ByteWriter;

@@ -7,2 +7,2 @@ "use strict";

CompressionAlgorithmCode[CompressionAlgorithmCode["ZLIB"] = 3] = "ZLIB";
})(CompressionAlgorithmCode = exports.CompressionAlgorithmCode || (exports.CompressionAlgorithmCode = {}));
})(CompressionAlgorithmCode || (exports.CompressionAlgorithmCode = CompressionAlgorithmCode = {}));

@@ -1,2 +0,1 @@

/// <reference types="node" />
import { ChunkSummary } from "./file.types";

@@ -3,0 +2,0 @@ import { Blueprint } from "./satisfactory/blueprint/blueprint.types";

@@ -71,2 +71,3 @@ "use strict";

}
exports.Parser = Parser;
Parser.JSONStringifyModified = (obj, indent = 0) => JSON.stringify(obj, (key, value) => {

@@ -81,2 +82,1 @@ if (typeof value === 'bigint') {

}, indent);
exports.Parser = Parser;

@@ -28,3 +28,3 @@ "use strict";

const versionThing = reader.readBytes(2 * 4);
const dimensions = (0, util_types_1.ParseVec3Int)(reader);
const dimensions = util_types_1.vec3.ParseInt(reader);
let itemTypeCount = reader.readInt32();

@@ -140,3 +140,3 @@ const itemCosts = new Array(itemTypeCount).fill(['', 0]);

const unk3 = reader.readInt32();
const colorMaybe = (0, util_types_1.ParseCol4RGBA)(reader);
const colorMaybe = util_types_1.col4.ParseRGBA(reader);
return {

@@ -143,0 +143,0 @@ description,

@@ -84,5 +84,5 @@ "use strict";

writer.writeInt32(config.iconID);
(0, util_types_1.SerializeCol4RGBA)(writer, config.color);
util_types_1.col4.SerializeRGBA(writer, config.color);
}
}
exports.BlueprintConfigWriter = BlueprintConfigWriter;

@@ -1,8 +0,8 @@

import { ByteWriter } from "../../..";
import { BinaryReadable } from "../../byte/binary-readable.interface";
import { ByteWriter } from '../../byte/byte-writer.class';
import { SaveWriter } from "../save/save-writer";
import { vec3 } from "../structs/util.types";
import { ObjectReference } from "./ObjectReference";
import { AbstractBaseProperty } from "./Property";
import { SaveObject } from "./SaveObject";
import { ObjectReference } from "./values/ObjectReference";
export type SpecialAnyProperty = {} | PowerLineSpecialProperty | PlayerSpecialProperty;

@@ -9,0 +9,0 @@ export type PowerLineSpecialProperty = {

@@ -66,4 +66,4 @@ "use strict";

if (remainingLen - (reader.getBufferPosition() - start) >= 24) {
property.sourceTranslation = (0, util_types_1.ParseVec3f)(reader);
property.targetTranslation = (0, util_types_1.ParseVec3f)(reader);
property.sourceTranslation = util_types_1.vec3.ParseF(reader);
property.targetTranslation = util_types_1.vec3.ParseF(reader);
}

@@ -70,0 +70,0 @@ break;

@@ -1,3 +0,4 @@

import { BinaryReadable, ByteWriter } from '../../../';
import { GUID } from './ue/GUID';
import { BinaryReadable } from '../../byte/binary-readable.interface';
import { ByteWriter } from '../../byte/byte-writer.class';
import { GUID } from './values/GUID';
export type GUIDInfo = undefined | GUID;

@@ -4,0 +5,0 @@ export declare namespace GUIDInfo {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.GUIDInfo = void 0;
const GUID_1 = require("./ue/GUID");
const GUID_1 = require("./values/GUID");
var GUIDInfo;

@@ -25,3 +25,3 @@ (function (GUIDInfo) {

};
})(GUIDInfo = exports.GUIDInfo || (exports.GUIDInfo = {}));
})(GUIDInfo || (exports.GUIDInfo = GUIDInfo = {}));
;

@@ -1,6 +0,6 @@

import { ByteWriter } from "../../..";
import { BinaryReadable } from "../../byte/binary-readable.interface";
import { ByteWriter } from '../../byte/byte-writer.class';
import { col4, vec3, vec4 } from "../structs/util.types";
import { GUIDInfo } from './GUIDInfo';
import { ObjectReference } from "./ObjectReference";
import { ObjectReference } from "./values/ObjectReference";
export type PropertiesMap = {

@@ -189,5 +189,9 @@ [name: string]: AbstractBaseProperty | AbstractBaseProperty[];

itemName: string;
unk2: string;
unk3: string;
properties: any[];
hasItemState: number;
itemState?: {
unk: number;
pathName: string;
binarySize: number;
itemStateRaw: number[];
};
};

@@ -202,3 +206,10 @@ export type FICFrameRangeStructPropertyValue = {

};
export type GENERIC_STRUCT_PROPERTY_VALUE = BasicMultipleStructPropertyValue | BasicStructPropertyValue | BoxStructPropertyValue | RailroadTrackPositionStructPropertyValue | InventoryItemStructPropertyValue | FICFrameRangeStructPropertyValue | DynamicStructPropertyValue | col4 | vec3 | vec4 | string;
export type ClientIdentityInfo = {
id: string;
clientUnk1: number;
clientUnk2: number;
clientHashSize: number;
someClientHash: number[];
};
export type GENERIC_STRUCT_PROPERTY_VALUE = BasicMultipleStructPropertyValue | BasicStructPropertyValue | BoxStructPropertyValue | RailroadTrackPositionStructPropertyValue | InventoryItemStructPropertyValue | FICFrameRangeStructPropertyValue | ClientIdentityInfo | DynamicStructPropertyValue | col4 | vec3 | vec4 | string;
export declare class StructProperty extends AbstractBaseProperty {

@@ -251,3 +262,2 @@ subtype: string;

valueType: string;
structKeyProxy: MAP_STRUCT_KEY_PROXY;
modeType: number;

@@ -254,0 +264,0 @@ modeUnk1: string | undefined;

@@ -7,3 +7,5 @@ "use strict";

const GUIDInfo_1 = require("./GUIDInfo");
const ObjectReference_1 = require("./ObjectReference");
const GUID_1 = require("./values/GUID");
const ObjectReference_1 = require("./values/ObjectReference");
const SoftObjectReference_1 = require("./values/SoftObjectReference");
class AbstractProperty {

@@ -500,6 +502,6 @@ constructor(type, index) {

case 'Color':
value = (0, util_types_1.ParseCol4BGRA)(reader);
value = util_types_1.col4.ParseBGRA(reader);
break;
case 'LinearColor':
value = (0, util_types_1.ParseCol4RGBA)(reader);
value = util_types_1.col4.ParseRGBA(reader);
break;

@@ -509,3 +511,3 @@ case 'Vector':

case 'Vector2D':
value = (size === 12) ? (0, util_types_1.ParseVec3f)(reader) : (0, util_types_1.ParseVec3)(reader);
value = (size === 12) ? util_types_1.vec3.ParseF(reader) : util_types_1.vec3.Parse(reader);
break;

@@ -515,12 +517,12 @@ case 'Quat':

case 'Vector4D':
value = (size === 16) ? (0, util_types_1.ParseVec4f)(reader) : (0, util_types_1.ParseVec4)(reader);
value = (size === 16) ? util_types_1.vec4.ParseF(reader) : util_types_1.vec4.Parse(reader);
break;
case 'Box':
value = (size === 25) ? {
min: (0, util_types_1.ParseVec3f)(reader),
max: (0, util_types_1.ParseVec3f)(reader),
min: util_types_1.vec3.ParseF(reader),
max: util_types_1.vec3.ParseF(reader),
isValid: reader.readByte() >= 1
} : {
min: (0, util_types_1.ParseVec3)(reader),
max: (0, util_types_1.ParseVec3)(reader),
min: util_types_1.vec3.Parse(reader),
max: util_types_1.vec3.Parse(reader),
isValid: reader.readByte() >= 1

@@ -543,2 +545,10 @@ };

break;
case 'ClientIdentityInfo':
const id = reader.readString();
const clientUnk1 = reader.readInt32();
const clientUnk2 = reader.readByte();
const clientHashSize = reader.readInt32();
const someClientHash = Array.from(reader.readBytes(clientHashSize));
value = { id, clientUnk1, clientUnk2, clientHashSize, someClientHash };
break;
case 'InventoryItem':

@@ -548,6 +558,16 @@ value = {

itemName: reader.readString(),
unk2: reader.readString(),
unk3: reader.readString(),
properties: [],
hasItemState: reader.readInt32(),
};
if (value.hasItemState >= 1) {
const stateUnk = reader.readInt32();
const statePathName = reader.readString();
const stateBinarySize = reader.readInt32();
const itemStateRaw = Array.from(reader.readBytes(stateBinarySize));
value.itemState = {
unk: stateUnk,
pathName: statePathName,
binarySize: stateBinarySize,
itemStateRaw: itemStateRaw
};
}
break;

@@ -600,7 +620,7 @@ case 'FluidBox':

value = value;
(0, util_types_1.SerializeCol4BGRA)(writer, value);
util_types_1.col4.SerializeBGRA(writer, value);
break;
case 'LinearColor':
value = value;
(0, util_types_1.SerializeCol4RGBA)(writer, value);
util_types_1.col4.SerializeRGBA(writer, value);
break;

@@ -611,3 +631,3 @@ case 'Vector':

value = value;
(0, util_types_1.SerializeVec3)(writer, value);
util_types_1.vec3.Serialize(writer, value);
break;

@@ -618,8 +638,8 @@ case 'Quat':

value = value;
(0, util_types_1.SerializeVec4)(writer, value);
util_types_1.vec4.Serialize(writer, value);
break;
case 'Box':
value = value;
(0, util_types_1.SerializeVec3)(writer, value.min);
(0, util_types_1.SerializeVec3)(writer, value.max);
util_types_1.vec3.Serialize(writer, value.min);
util_types_1.vec3.Serialize(writer, value.max);
writer.writeByte(value.isValid ? 1 : 0);

@@ -642,2 +662,10 @@ break;

break;
case 'ClientIdentityInfo':
value = value;
writer.writeString(value.id);
writer.writeInt32(value.clientUnk1);
writer.writeByte(value.clientUnk2);
writer.writeInt32(value.clientHashSize);
writer.writeBytesArray(value.someClientHash);
break;
case 'InventoryItem':

@@ -647,5 +675,11 @@ value = value;

writer.writeString(value.itemName);
writer.writeString(value.unk2);
writer.writeString(value.unk3);
writer.writeInt32(value.hasItemState);
if (value.hasItemState >= 1) {
writer.writeInt32(value.itemState.unk);
writer.writeString(value.itemState.pathName);
writer.writeInt32(value.itemState.binarySize);
writer.writeBytesArray(value.itemState.itemStateRaw);
}
break;
break;
case 'FluidBox':

@@ -692,3 +726,3 @@ value = value;

const subtype = reader.readString();
reader.skipBytes();
reader.skipBytes(1);
let property;

@@ -728,2 +762,5 @@ const elementCount = reader.readInt32();

break;
case "SoftObjectProperty":
property = new ArrayProperty(subtype, new Array(elementCount).fill(0).map(() => SoftObjectReference_1.SoftObjectReference.read(reader)), ueType, index);
break;
case "StructProperty":

@@ -810,2 +847,5 @@ const name = reader.readString();

break;
case "SoftObjectProperty":
property.values.forEach(v => SoftObjectReference_1.SoftObjectReference.write(writer, v));
break;
case "StructProperty":

@@ -858,2 +898,10 @@ writer.writeString(propertyName);

break;
case "StructProperty":
if (propertyName === 'mDestroyedPickups') {
property = new SetProperty(subtype, new Array(elementCount).fill(0).map(() => GUID_1.GUID.read(reader)), ueType, index);
}
else {
throw new Error(`Not Implemented SetProperty of StructProperty for property ${propertyName}.`);
}
break;
default:

@@ -888,3 +936,12 @@ throw new Error(`Not Implemented SetProperty of ${subtype}.`);

case "StructProperty":
property.values.forEach(v => (0, util_types_1.SerializeVec3f)(writer, v));
if (property.name === 'mRemovalLocations') {
console.warn('serializing mRemovalLocations, this is still under investigation.');
property.values.forEach(v => util_types_1.vec3.SerializeF(writer, v));
}
else if (property.name === 'mDestroyedPickups') {
property.values.forEach(v => GUID_1.GUID.write(writer, v));
}
else {
throw new Error(`Not Implemented serializing SetProperty of StructProperty for property ${property.name}.`);
}
break;

@@ -902,3 +959,2 @@ default:

this.valueType = valueType;
this.structKeyProxy = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
this.modeType = 0;

@@ -921,4 +977,3 @@ this.modeUnk2 = '';

if (propertyName === 'mSaveData' || propertyName === 'mUnresolvedSaveData') {
key = propertyName;
property.structKeyProxy = Array.from(reader.readBytes(12));
key = Array.from(reader.readBytes(12));
}

@@ -967,2 +1022,5 @@ else {

break;
case 'Int64Property':
value = Int64Property.ReadValue(reader);
break;
case 'ByteProperty':

@@ -991,3 +1049,3 @@ value = ByteProperty.ReadValue(reader);

if (property.name === 'mSaveData' || property.name === 'mUnresolvedSaveData') {
writer.writeBytesArray(property.structKeyProxy);
writer.writeBytesArray(entry[0]);
}

@@ -1036,2 +1094,5 @@ else {

break;
case 'Int64Property':
Int64Property.SerializeValue(writer, entry[1]);
break;
case 'ByteProperty':

@@ -1038,0 +1099,0 @@ ByteProperty.SerializeValue(writer, entry[1]);

@@ -30,3 +30,3 @@ "use strict";

}
exports.SaveComponent = SaveComponent;
SaveComponent.TypeID = 0;
exports.SaveComponent = SaveComponent;
import { BinaryReadable } from "../../byte/binary-readable.interface";
import { ByteWriter } from "../../byte/byte-writer.class";
import { Transform } from "../structs/util.types";
import { ObjectReference } from "./ObjectReference";
import { SaveObject, SaveObjectHeader } from "./SaveObject";
import { ObjectReference } from "./values/ObjectReference";
export declare const isSaveEntity: (obj: any) => obj is SaveEntity;

@@ -7,0 +7,0 @@ export interface SaveEntityHeader extends SaveObjectHeader {

@@ -5,4 +5,4 @@ "use strict";

const util_types_1 = require("../structs/util.types");
const ObjectReference_1 = require("./ObjectReference");
const SaveObject_1 = require("./SaveObject");
const ObjectReference_1 = require("./values/ObjectReference");
const isSaveEntity = (obj) => {

@@ -34,3 +34,3 @@ return obj.type === 'SaveEntity';

obj.needTransform = reader.readInt32() == 1;
obj.transform = (0, util_types_1.ParseTransform)(reader);
obj.transform = util_types_1.Transform.Parse(reader);
obj.wasPlacedInLevel = reader.readInt32() == 1;

@@ -53,3 +53,3 @@ }

writer.writeInt32(entity.needTransform ? 1 : 0);
(0, util_types_1.SerializeTransform)(writer, entity.transform);
util_types_1.Transform.Serialize(writer, entity.transform);
writer.writeInt32(entity.wasPlacedInLevel ? 1 : 0);

@@ -68,3 +68,3 @@ }

}
exports.SaveEntity = SaveEntity;
SaveEntity.TypeID = 1;
exports.SaveEntity = SaveEntity;

@@ -1,2 +0,3 @@

import { ByteReader, ByteWriter } from '../../../..';
import { ByteReader } from '../../../byte/byte-reader.class';
import { ByteWriter } from '../../../byte/byte-writer.class';
export type MD5Hash = {

@@ -3,0 +4,0 @@ isValid: boolean;

@@ -20,3 +20,3 @@ "use strict";

};
})(MD5Hash = exports.MD5Hash || (exports.MD5Hash = {}));
})(MD5Hash || (exports.MD5Hash = MD5Hash = {}));
;
import { BinaryReadable } from "../../byte/binary-readable.interface";
import { ByteWriter } from "../../byte/byte-writer.class";
import { ObjectReference } from "../objects/ObjectReference";
import { SaveComponent } from "../objects/SaveComponent";
import { SaveEntity } from "../objects/SaveEntity";
import { SaveObject } from "../objects/SaveObject";
import { ObjectReference } from "../objects/values/ObjectReference";
import { SaveReader } from "./save-reader";

@@ -8,0 +8,0 @@ export declare class Level {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Level = void 0;
const ObjectReference_1 = require("../objects/ObjectReference");
const SaveComponent_1 = require("../objects/SaveComponent");
const SaveEntity_1 = require("../objects/SaveEntity");
const ObjectReference_1 = require("../objects/values/ObjectReference");
class Level {

@@ -8,0 +8,0 @@ constructor(name) {

import { ChunkCompressionInfo } from "../../file.types";
import { ObjectReference } from "../objects/ObjectReference";
import { ObjectReference } from "../objects/values/ObjectReference";
import { Level } from "./level.class";

@@ -4,0 +4,0 @@ import { ByteArray4, Grids } from "./save-reader";

import { ByteReader } from "../../byte/byte-reader.class";
import { ChunkCompressionInfo } from "../../file.types";
import { ObjectReference } from "../objects/ObjectReference";
import { ObjectReference } from "../objects/values/ObjectReference";
import { Level } from "./level.class";

@@ -5,0 +5,0 @@ import { RoughSaveVersion, SatisfactorySaveHeader } from "./save.types";

@@ -44,10 +44,10 @@ "use strict";

for (let i = 0; i < childrenCount; i++) {
const cellHash = this.readUint32();
const cellBinSizeMaybe = this.readUint32();
const levelInstanceName = this.readString();
grids[parentName].children[levelInstanceName] = cellHash;
grids[parentName].children[levelInstanceName] = cellBinSizeMaybe;
}
};
readGrid(1379);
readGrid(1289);
readGrid(0);
readGrid(972);
readGrid(758);
readGrid(0);

@@ -193,7 +193,11 @@ readGrid(0);

}
exports.SaveReader = SaveReader;
SaveReader.EPOCH_TICKS = 621355968000000000n;
SaveReader.getRoughSaveVersion = (saveVersion, headerTypeVersion) => {
if (headerTypeVersion >= 13) {
return 'U8+';
if (saveVersion >= 46) {
return 'U1.0+';
}
else if (headerTypeVersion >= 13) {
return 'U8';
}
else if (saveVersion >= 29) {

@@ -206,2 +210,1 @@ return 'U6/U7';

};
exports.SaveReader = SaveReader;

@@ -67,3 +67,2 @@ "use strict";

}
level_class_1.Level.SerializeCollectablesList(writer, save.trailingCollectedObjects ?? []);
}

@@ -126,2 +125,3 @@ static GenerateCompressedChunksFromData(bufferArray, compressionInfo, onBinaryBeforeCompressing, onChunk, alignment = alignment_enum_1.Alignment.LITTLE_ENDIAN) {

}
exports.SaveWriter = SaveWriter;
SaveWriter.WriteSaveBodyHash = (writer, hash) => {

@@ -149,2 +149,1 @@ writer.writeInt32(0);

};
exports.SaveWriter = SaveWriter;

@@ -31,3 +31,3 @@ import { MD5Hash } from '../objects/ue/MD5Hash';

}
export type RoughSaveVersion = '<U6' | 'U6/U7' | 'U8+';
export type RoughSaveVersion = '<U6' | 'U6/U7' | 'U8' | 'U1.0+';
export type ResourceNodeEntry = {

@@ -34,0 +34,0 @@ purity: string;

@@ -9,27 +9,33 @@ import { BinaryReadable } from "../../byte/binary-readable.interface";

};
export declare const SerializeCol4RGBA: (writer: ByteWriter, value: col4) => void;
export declare const ParseCol4RGBA: (reader: BinaryReadable) => col4;
export declare const SerializeCol4BGRA: (writer: ByteWriter, value: col4) => void;
export declare const ParseCol4BGRA: (reader: BinaryReadable) => col4;
export declare namespace col4 {
const SerializeRGBA: (writer: ByteWriter, value: col4) => void;
const ParseRGBA: (reader: BinaryReadable) => col4;
const SerializeBGRA: (writer: ByteWriter, value: col4) => void;
const ParseBGRA: (reader: BinaryReadable) => col4;
}
export type vec4 = vec3 & {
w: number;
};
export declare const ParseVec4: (reader: BinaryReadable) => vec4;
export declare const SerializeVec4: (writer: ByteWriter, vec: vec4) => void;
export declare const ParseVec4f: (reader: BinaryReadable) => vec4;
export declare const SerializeVec4f: (writer: ByteWriter, vec: vec4) => void;
export declare const sub: (other: vec3, vec: vec3) => vec3;
export declare const add: (vec: vec3, other: vec3) => vec3;
export declare const length: (vec: vec3) => number;
export declare const mult: (vec: vec3, scale: number) => vec3;
export declare const norm: (vec: vec3) => vec3;
export declare namespace vec4 {
const Parse: (reader: BinaryReadable) => vec4;
const Serialize: (writer: ByteWriter, vec: vec4) => void;
const ParseF: (reader: BinaryReadable) => vec4;
const SerializeF: (writer: ByteWriter, vec: vec4) => void;
}
export type vec3 = vec2 & {
z: number;
};
export declare const ParseVec3: (reader: BinaryReadable) => vec3;
export declare const SerializeVec3: (writer: ByteWriter, vec: vec3) => void;
export declare const ParseVec3Int: (reader: BinaryReadable) => vec3;
export declare const SerializeVec3Int: (writer: ByteWriter, vec: vec3) => void;
export declare const ParseVec3f: (reader: BinaryReadable) => vec3;
export declare const SerializeVec3f: (writer: ByteWriter, vec: vec3) => void;
export declare namespace vec3 {
const Parse: (reader: BinaryReadable) => vec3;
const Serialize: (writer: ByteWriter, vec: vec3) => void;
const ParseInt: (reader: BinaryReadable) => vec3;
const SerializeInt: (writer: ByteWriter, vec: vec3) => void;
const ParseF: (reader: BinaryReadable) => vec3;
const SerializeF: (writer: ByteWriter, vec: vec3) => void;
const sub: (other: vec3, vec: vec3) => vec3;
const add: (vec: vec3, other: vec3) => vec3;
const length: (vec: vec3) => number;
const mult: (vec: vec3, scale: number) => vec3;
const norm: (vec: vec3) => vec3;
}
export type vec2 = {

@@ -39,6 +45,8 @@ x: number;

};
export declare const ParseVec2: (reader: BinaryReadable) => vec2;
export declare const SerializeVec2: (writer: ByteWriter, vec: vec2) => void;
export declare const ParseVec2f: (reader: BinaryReadable) => vec2;
export declare const SerializeVec2f: (writer: ByteWriter, vec: vec2) => void;
export declare namespace vec2 {
const Parse: (reader: BinaryReadable) => vec2;
const Serialize: (writer: ByteWriter, vec: vec2) => void;
const ParseF: (reader: BinaryReadable) => vec2;
const SerializeF: (writer: ByteWriter, vec: vec2) => void;
}
export type Transform = {

@@ -49,3 +57,5 @@ rotation: vec4;

};
export declare const ParseTransform: (reader: BinaryReadable) => Transform;
export declare const SerializeTransform: (writer: ByteWriter, transform: Transform) => void;
export declare namespace Transform {
const Parse: (reader: BinaryReadable) => Transform;
const Serialize: (writer: ByteWriter, transform: Transform) => void;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SerializeTransform = exports.ParseTransform = exports.SerializeVec2f = exports.ParseVec2f = exports.SerializeVec2 = exports.ParseVec2 = exports.SerializeVec3f = exports.ParseVec3f = exports.SerializeVec3Int = exports.ParseVec3Int = exports.SerializeVec3 = exports.ParseVec3 = exports.norm = exports.mult = exports.length = exports.add = exports.sub = exports.SerializeVec4f = exports.ParseVec4f = exports.SerializeVec4 = exports.ParseVec4 = exports.ParseCol4BGRA = exports.SerializeCol4BGRA = exports.ParseCol4RGBA = exports.SerializeCol4RGBA = void 0;
const SerializeCol4RGBA = (writer, value) => {
writer.writeFloat(value.r);
writer.writeFloat(value.g);
writer.writeFloat(value.b);
writer.writeFloat(value.a);
};
exports.SerializeCol4RGBA = SerializeCol4RGBA;
const ParseCol4RGBA = (reader) => {
return {
r: reader.readFloat32(),
g: reader.readFloat32(),
b: reader.readFloat32(),
a: reader.readFloat32(),
exports.Transform = exports.vec2 = exports.vec3 = exports.vec4 = exports.col4 = void 0;
var col4;
(function (col4) {
col4.SerializeRGBA = (writer, value) => {
writer.writeFloat(value.r);
writer.writeFloat(value.g);
writer.writeFloat(value.b);
writer.writeFloat(value.a);
};
};
exports.ParseCol4RGBA = ParseCol4RGBA;
const SerializeCol4BGRA = (writer, value) => {
writer.writeByte(value.b);
writer.writeByte(value.g);
writer.writeByte(value.r);
writer.writeByte(value.a);
};
exports.SerializeCol4BGRA = SerializeCol4BGRA;
const ParseCol4BGRA = (reader) => {
return {
b: reader.readByte(),
g: reader.readByte(),
r: reader.readByte(),
a: reader.readByte(),
col4.ParseRGBA = (reader) => {
return {
r: reader.readFloat32(),
g: reader.readFloat32(),
b: reader.readFloat32(),
a: reader.readFloat32(),
};
};
};
exports.ParseCol4BGRA = ParseCol4BGRA;
const ParseVec4 = (reader) => {
return {
...((0, exports.ParseVec3)(reader)),
w: reader.readDouble()
col4.SerializeBGRA = (writer, value) => {
writer.writeByte(value.b);
writer.writeByte(value.g);
writer.writeByte(value.r);
writer.writeByte(value.a);
};
};
exports.ParseVec4 = ParseVec4;
const SerializeVec4 = (writer, vec) => {
(0, exports.SerializeVec3)(writer, vec);
writer.writeDouble(vec.w);
};
exports.SerializeVec4 = SerializeVec4;
const ParseVec4f = (reader) => {
return {
...((0, exports.ParseVec3f)(reader)),
w: reader.readFloat32()
col4.ParseBGRA = (reader) => {
return {
b: reader.readByte(),
g: reader.readByte(),
r: reader.readByte(),
a: reader.readByte(),
};
};
};
exports.ParseVec4f = ParseVec4f;
const SerializeVec4f = (writer, vec) => {
(0, exports.SerializeVec3f)(writer, vec);
writer.writeFloat(vec.w);
};
exports.SerializeVec4f = SerializeVec4f;
const sub = (other, vec) => ({ x: other.x - vec.x, y: other.y - vec.y, z: other.z - vec.z });
exports.sub = sub;
const add = (vec, other) => ({ x: vec.x + other.x, y: vec.y + other.y, z: vec.z + other.z });
exports.add = add;
const length = (vec) => Math.sqrt(vec.x ** 2 + vec.y ** 2 + vec.z ** 2);
exports.length = length;
const mult = (vec, scale) => ({ x: vec.x * scale, y: vec.y * scale, z: vec.z * scale });
exports.mult = mult;
const norm = (vec) => (0, exports.mult)(vec, 1. / (0, exports.length)(vec));
exports.norm = norm;
const ParseVec3 = (reader) => {
return {
...((0, exports.ParseVec2)(reader)),
z: reader.readDouble()
})(col4 || (exports.col4 = col4 = {}));
var vec4;
(function (vec4) {
vec4.Parse = (reader) => {
return {
...(vec3.Parse(reader)),
w: reader.readDouble()
};
};
};
exports.ParseVec3 = ParseVec3;
const SerializeVec3 = (writer, vec) => {
(0, exports.SerializeVec2)(writer, vec);
writer.writeDouble(vec.z);
};
exports.SerializeVec3 = SerializeVec3;
const ParseVec3Int = (reader) => {
return {
x: reader.readInt32(),
y: reader.readInt32(),
z: reader.readInt32()
vec4.Serialize = (writer, vec) => {
vec3.Serialize(writer, vec);
writer.writeDouble(vec.w);
};
};
exports.ParseVec3Int = ParseVec3Int;
const SerializeVec3Int = (writer, vec) => {
writer.writeInt32(vec.x);
writer.writeInt32(vec.y);
writer.writeInt32(vec.z);
};
exports.SerializeVec3Int = SerializeVec3Int;
const ParseVec3f = (reader) => {
return {
...((0, exports.ParseVec2f)(reader)),
z: reader.readFloat32()
vec4.ParseF = (reader) => {
return {
...(vec3.ParseF(reader)),
w: reader.readFloat32()
};
};
};
exports.ParseVec3f = ParseVec3f;
const SerializeVec3f = (writer, vec) => {
(0, exports.SerializeVec2f)(writer, vec);
writer.writeFloat(vec.z);
};
exports.SerializeVec3f = SerializeVec3f;
const ParseVec2 = (reader) => {
return {
x: reader.readDouble(),
y: reader.readDouble(),
vec4.SerializeF = (writer, vec) => {
vec3.SerializeF(writer, vec);
writer.writeFloat(vec.w);
};
};
exports.ParseVec2 = ParseVec2;
const SerializeVec2 = (writer, vec) => {
writer.writeDouble(vec.x);
writer.writeDouble(vec.y);
};
exports.SerializeVec2 = SerializeVec2;
const ParseVec2f = (reader) => {
return {
x: reader.readFloat32(),
y: reader.readFloat32(),
})(vec4 || (exports.vec4 = vec4 = {}));
var vec3;
(function (vec3) {
vec3.Parse = (reader) => {
return {
...(vec2.Parse(reader)),
z: reader.readDouble()
};
};
};
exports.ParseVec2f = ParseVec2f;
const SerializeVec2f = (writer, vec) => {
writer.writeFloat(vec.x);
writer.writeFloat(vec.y);
};
exports.SerializeVec2f = SerializeVec2f;
const ParseTransform = (reader) => {
return {
rotation: (0, exports.ParseVec4f)(reader),
translation: (0, exports.ParseVec3f)(reader),
scale3d: (0, exports.ParseVec3f)(reader),
vec3.Serialize = (writer, vec) => {
vec2.Serialize(writer, vec);
writer.writeDouble(vec.z);
};
};
exports.ParseTransform = ParseTransform;
const SerializeTransform = (writer, transform) => {
(0, exports.SerializeVec4f)(writer, transform.rotation);
(0, exports.SerializeVec3f)(writer, transform.translation);
(0, exports.SerializeVec3f)(writer, transform.scale3d);
};
exports.SerializeTransform = SerializeTransform;
vec3.ParseInt = (reader) => {
return {
x: reader.readInt32(),
y: reader.readInt32(),
z: reader.readInt32()
};
};
vec3.SerializeInt = (writer, vec) => {
writer.writeInt32(vec.x);
writer.writeInt32(vec.y);
writer.writeInt32(vec.z);
};
vec3.ParseF = (reader) => {
return {
...(vec2.ParseF(reader)),
z: reader.readFloat32()
};
};
vec3.SerializeF = (writer, vec) => {
vec2.SerializeF(writer, vec);
writer.writeFloat(vec.z);
};
vec3.sub = (other, vec) => ({ x: other.x - vec.x, y: other.y - vec.y, z: other.z - vec.z });
vec3.add = (vec, other) => ({ x: vec.x + other.x, y: vec.y + other.y, z: vec.z + other.z });
vec3.length = (vec) => Math.sqrt(vec.x ** 2 + vec.y ** 2 + vec.z ** 2);
vec3.mult = (vec, scale) => ({ x: vec.x * scale, y: vec.y * scale, z: vec.z * scale });
vec3.norm = (vec) => vec3.mult(vec, 1. / vec3.length(vec));
})(vec3 || (exports.vec3 = vec3 = {}));
var vec2;
(function (vec2) {
vec2.Parse = (reader) => {
return {
x: reader.readDouble(),
y: reader.readDouble(),
};
};
vec2.Serialize = (writer, vec) => {
writer.writeDouble(vec.x);
writer.writeDouble(vec.y);
};
vec2.ParseF = (reader) => {
return {
x: reader.readFloat32(),
y: reader.readFloat32(),
};
};
vec2.SerializeF = (writer, vec) => {
writer.writeFloat(vec.x);
writer.writeFloat(vec.y);
};
})(vec2 || (exports.vec2 = vec2 = {}));
var Transform;
(function (Transform) {
Transform.Parse = (reader) => {
return {
rotation: vec4.ParseF(reader),
translation: vec3.ParseF(reader),
scale3d: vec3.ParseF(reader),
};
};
Transform.Serialize = (writer, transform) => {
vec4.SerializeF(writer, transform.rotation);
vec3.SerializeF(writer, transform.translation);
vec3.SerializeF(writer, transform.scale3d);
};
})(Transform || (exports.Transform = Transform = {}));

@@ -71,3 +71,3 @@ "use strict";

const objectCountToRead = Math.min(countObjectHeaders - totalReadObjectsInLevel, batchingSizeOfObjects);
const objects = ReadableStreamParser.ReadNObjectHeaders(reader, objectCountToRead);
const objects = _a.ReadNObjectHeaders(reader, objectCountToRead);
afterHeadersOfBatch = reader.getBufferPosition();

@@ -85,3 +85,3 @@ if (totalReadObjectsInLevel === 0) {

}
ReadableStreamParser.ReadNObjects(reader, objectCountToRead, objects, buildVersion);
_a.ReadNObjects(reader, objectCountToRead, objects, buildVersion);
afterObjectsOfBatch = reader.getBufferPosition();

@@ -109,2 +109,3 @@ totalReadObjectsInLevel += objectCountToRead;

}
exports.ReadableStreamParser = ReadableStreamParser;
_a = ReadableStreamParser;

@@ -143,3 +144,3 @@ ReadableStreamParser.CreateReadableStreamForParsingSave = (onStart, onCancel, onPullRequest, highWaterMark = DEFAULT_BYTE_HIGHWATERMARK / 4) => {

};
const { stream, controller, finish } = ReadableStreamParser.CreateReadableStreamForParsingSave((controller) => {
const { stream, controller, finish } = _a.CreateReadableStreamForParsingSave((controller) => {
}, (reason) => { }, (desiredSize) => {

@@ -162,4 +163,4 @@ waitForConsumerLock.unlock();

const grids = reader.readGrids();
await ReadableStreamParser.WriteHeaderAndGrids(write, reader.compressionInfo, header, grids, gridHash);
await ReadableStreamParser.WriteLevels(write, reader, save.header.mapName, save.header.buildVersion);
await _a.WriteHeaderAndGrids(write, reader.compressionInfo, header, grids, gridHash);
await _a.WriteLevels(write, reader, save.header.mapName, save.header.buildVersion);
await write(`]}`);

@@ -209,2 +210,1 @@ finish();

};
exports.ReadableStreamParser = ReadableStreamParser;

@@ -1,6 +0,5 @@

/// <reference types="node" />
import { WritableStream } from "stream/web";
import { SatisfactorySave } from "../../..";
import { SatisfactorySave } from '../../satisfactory/save/satisfactory-save';
export declare class SaveStreamJsonStringifier {
static StreamStringifySave(save: SatisfactorySave, output: WritableStream<string>): Promise<void>;
}

@@ -1,6 +0,7 @@

/// <reference types="node" />
import { WritableStreamDefaultWriter } from "stream/web";
import { ChunkCompressionInfo, ObjectReference, SatisfactorySaveHeader } from "../../..";
import { ChunkCompressionInfo } from '../../file.types';
import { SaveObject } from "../../satisfactory/objects/SaveObject";
import { ObjectReference } from '../../satisfactory/objects/values/ObjectReference';
import { ByteArray4, Grids } from '../../satisfactory/save/save-reader';
import { SatisfactorySaveHeader } from '../../satisfactory/save/save.types';
export declare class SaveStreamWriter {

@@ -7,0 +8,0 @@ private writer;

{
"name": "@etothepii/satisfactory-file-parser",
"author": "etothepii",
"version": "0.3.7",
"version": "0.4.3",
"description": "A file parser for satisfactory files. Includes save files and blueprint files.",

@@ -6,0 +6,0 @@ "types": "./build/index.d.ts",

@@ -20,3 +20,4 @@ # Satisfactory File Parser

| U6 + U7 | ✅ 0.0.1 - 0.0.34 |
| U8 | ✅ >= 0.1.20 |
| U8 | ✅ 0.1.20 - 0.3.7 |
| U1.0 | ✅ >= 0.4.2 (Work in Progress)|

@@ -23,0 +24,0 @@

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc