New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@babylonjs/loaders

Package Overview
Dependencies
Maintainers
1
Versions
582
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@babylonjs/loaders - npm Package Compare versions

Comparing version

to
7.31.0

6

package.json
{
"name": "@babylonjs/loaders",
"version": "7.30.1",
"version": "7.31.0",
"main": "index.js",

@@ -21,6 +21,6 @@ "module": "index.js",

"devDependencies": {
"@babylonjs/core": "^7.30.1",
"@babylonjs/core": "^7.31.0",
"@dev/build-tools": "^1.0.0",
"@lts/loaders": "^1.0.0",
"babylonjs-gltf2interface": "^7.30.1"
"babylonjs-gltf2interface": "^7.31.0"
},

@@ -27,0 +27,0 @@ "peerDependencies": {

@@ -7,7 +7,6 @@ import { registerSceneLoaderPlugin } from "@babylonjs/core/Loading/sceneLoader.js";

import { Logger } from "@babylonjs/core/Misc/logger.js";
import { Quaternion, TmpVectors, Vector3 } from "@babylonjs/core/Maths/math.vector.js";
import { Vector3 } from "@babylonjs/core/Maths/math.vector.js";
import { PointsCloudSystem } from "@babylonjs/core/Particles/pointsCloudSystem.js";
import { Color4 } from "@babylonjs/core/Maths/math.color.js";
import { VertexData } from "@babylonjs/core/Meshes/mesh.vertexData.js";
import { Scalar } from "@babylonjs/core/Maths/math.scalar.js";
/**

@@ -22,8 +21,2 @@ * Indicator of the parsed ply buffer. A standard ready to use splat or an array of positions for a point cloud

})(Mode || (Mode = {}));
/** @internal */
var ElementMode;
(function (ElementMode) {
ElementMode[ElementMode["Vertex"] = 0] = "Vertex";
ElementMode[ElementMode["Chunk"] = 1] = "Chunk";
})(ElementMode || (ElementMode = {}));
/**

@@ -228,8 +221,8 @@ * @experimental

const faceElement = /element face (\d+)\n/.exec(header);
const chunkElement = /element chunk (\d+)\n/.exec(header);
let faceCount = 0;
let chunkCount = 0;
if (faceElement) {
faceCount = parseInt(faceElement[1]);
}
const chunkElement = /element chunk (\d+)\n/.exec(header);
let chunkCount = 0;
if (chunkElement) {

@@ -250,2 +243,7 @@ chunkCount = parseInt(chunkElement[1]);

};
let ElementMode;
(function (ElementMode) {
ElementMode[ElementMode["Vertex"] = 0] = "Vertex";
ElementMode[ElementMode["Chunk"] = 1] = "Chunk";
})(ElementMode || (ElementMode = {}));
let chunkMode = 1 /* ElementMode.Chunk */;

@@ -282,233 +280,5 @@ const vertexProperties = [];

const rowChunkLength = rowChunkOffset;
const rowOutputLength = 3 * 4 + 3 * 4 + 4 + 4; // Vector3 position, Vector3 scale, 1 u8 quaternion, 1 color with alpha
const SH_C0 = 0.28209479177387814;
let offset = 0;
const buffer = GaussianSplattingMesh.ConvertPLYToSplat(data);
const dataView = new DataView(data, headerEndIndex + headerEnd.length);
const buffer = new ArrayBuffer(rowOutputLength * vertexCount);
const q = new Quaternion();
const temp3 = TmpVectors.Vector3[0];
const unpackUnorm = (value, bits) => {
const t = (1 << bits) - 1;
return (value & t) / t;
};
const unpack111011 = (value, result) => {
result.x = unpackUnorm(value >>> 21, 11);
result.y = unpackUnorm(value >>> 11, 10);
result.z = unpackUnorm(value, 11);
};
const unpack8888 = (value, result) => {
result[0] = unpackUnorm(value >>> 24, 8) * 255;
result[1] = unpackUnorm(value >>> 16, 8) * 255;
result[2] = unpackUnorm(value >>> 8, 8) * 255;
result[3] = unpackUnorm(value, 8) * 255;
};
// unpack quaternion with 2,10,10,10 format (largest element, 3x10bit element)
const unpackRot = (value, result) => {
const norm = 1.0 / (Math.sqrt(2) * 0.5);
const a = (unpackUnorm(value >>> 20, 10) - 0.5) * norm;
const b = (unpackUnorm(value >>> 10, 10) - 0.5) * norm;
const c = (unpackUnorm(value, 10) - 0.5) * norm;
const m = Math.sqrt(1.0 - (a * a + b * b + c * c));
switch (value >>> 30) {
case 0:
result.set(m, a, b, c);
break;
case 1:
result.set(a, m, b, c);
break;
case 2:
result.set(a, b, m, c);
break;
case 3:
result.set(a, b, c, m);
break;
}
};
const compressedChunks = new Array(chunkCount);
for (let i = 0; i < chunkCount; i++) {
const currentChunk = { min: new Vector3(), max: new Vector3(), minScale: new Vector3(), maxScale: new Vector3() };
compressedChunks[i] = currentChunk;
for (let propertyIndex = 0; propertyIndex < chunkProperties.length; propertyIndex++) {
const property = chunkProperties[propertyIndex];
let value;
switch (property.type) {
case "float":
value = dataView.getFloat32(property.offset + offset, true);
break;
default:
//throw new Error(`Unsupported property type: ${property.type}`);
continue;
}
switch (property.name) {
case "min_x":
currentChunk.min.x = value;
break;
case "min_y":
currentChunk.min.y = value;
break;
case "min_z":
currentChunk.min.z = value;
break;
case "max_x":
currentChunk.max.x = value;
break;
case "max_y":
currentChunk.max.y = value;
break;
case "max_z":
currentChunk.max.z = value;
break;
case "min_scale_x":
currentChunk.minScale.x = value;
break;
case "min_scale_y":
currentChunk.minScale.y = value;
break;
case "min_scale_z":
currentChunk.minScale.z = value;
break;
case "max_scale_x":
currentChunk.maxScale.x = value;
break;
case "max_scale_y":
currentChunk.maxScale.y = value;
break;
case "max_scale_z":
currentChunk.maxScale.z = value;
break;
}
}
offset += rowChunkLength;
}
for (let i = 0; i < vertexCount; i++) {
const position = new Float32Array(buffer, i * rowOutputLength, 3);
const scale = new Float32Array(buffer, i * rowOutputLength + 12, 3);
const rgba = new Uint8ClampedArray(buffer, i * rowOutputLength + 24, 4);
const rot = new Uint8ClampedArray(buffer, i * rowOutputLength + 28, 4);
const chunkIndex = i >> 8;
let r0 = 255;
let r1 = 0;
let r2 = 0;
let r3 = 0;
for (let propertyIndex = 0; propertyIndex < vertexProperties.length; propertyIndex++) {
const property = vertexProperties[propertyIndex];
let value;
switch (property.type) {
case "float":
value = dataView.getFloat32(offset + property.offset, true);
break;
case "int":
value = dataView.getInt32(offset + property.offset, true);
break;
case "uint":
value = dataView.getUint32(offset + property.offset, true);
break;
case "double":
value = dataView.getFloat64(offset + property.offset, true);
break;
case "uchar":
value = dataView.getUint8(offset + property.offset);
break;
default:
//throw new Error(`Unsupported property type: ${property.type}`);
continue;
}
switch (property.name) {
case "packed_position":
{
const compressedChunk = compressedChunks[chunkIndex];
unpack111011(value, temp3);
position[0] = Scalar.Lerp(compressedChunk.min.x, compressedChunk.max.x, temp3.x);
position[1] = -Scalar.Lerp(compressedChunk.min.y, compressedChunk.max.y, temp3.y);
position[2] = Scalar.Lerp(compressedChunk.min.z, compressedChunk.max.z, temp3.z);
}
break;
case "packed_rotation":
{
unpackRot(value, q);
r0 = q.w;
r1 = q.z;
r2 = q.y;
r3 = q.x;
}
break;
case "packed_scale":
{
const compressedChunk = compressedChunks[chunkIndex];
unpack111011(value, temp3);
scale[0] = Math.exp(Scalar.Lerp(compressedChunk.minScale.x, compressedChunk.maxScale.x, temp3.x));
scale[1] = Math.exp(Scalar.Lerp(compressedChunk.minScale.y, compressedChunk.maxScale.y, temp3.y));
scale[2] = Math.exp(Scalar.Lerp(compressedChunk.minScale.z, compressedChunk.maxScale.z, temp3.z));
}
break;
case "packed_color":
unpack8888(value, rgba);
break;
case "x":
position[0] = value;
break;
case "y":
position[1] = value;
break;
case "z":
position[2] = value;
break;
case "scale_0":
scale[0] = Math.exp(value);
break;
case "scale_1":
scale[1] = Math.exp(value);
break;
case "scale_2":
scale[2] = Math.exp(value);
break;
case "diffuse_red":
case "red":
rgba[0] = value;
break;
case "diffuse_green":
case "green":
rgba[1] = value;
break;
case "diffuse_blue":
case "blue":
rgba[2] = value;
break;
case "f_dc_0":
rgba[0] = (0.5 + SH_C0 * value) * 255;
break;
case "f_dc_1":
rgba[1] = (0.5 + SH_C0 * value) * 255;
break;
case "f_dc_2":
rgba[2] = (0.5 + SH_C0 * value) * 255;
break;
case "f_dc_3":
rgba[3] = (0.5 + SH_C0 * value) * 255;
break;
case "opacity":
rgba[3] = (1 / (1 + Math.exp(-value))) * 255;
break;
case "rot_0":
r0 = value;
break;
case "rot_1":
r1 = value;
break;
case "rot_2":
r2 = value;
break;
case "rot_3":
r3 = value;
break;
}
}
q.set(r1, r2, r3, r0);
q.normalize();
rot[0] = q.w * 128 + 128;
rot[1] = q.x * 128 + 128;
rot[2] = q.y * 128 + 128;
rot[3] = q.z * 128 + 128;
offset += rowVertexLength;
}
let offset = rowChunkLength * chunkCount + rowVertexLength * vertexCount;
// faces

@@ -515,0 +285,0 @@ const faces = [];

Sorry, the diff of this file is not supported yet