New Research: Supply Chain Attack on Axios Pulls Malicious Dependency from npm.Details →
Socket
Book a DemoSign in
Socket

three

Package Overview
Dependencies
Maintainers
2
Versions
309
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

three - npm Package Compare versions

Comparing version
0.182.0
to
0.183.0
+59
examples/jsm/environments/ColorEnvironment.js
import {
BackSide,
Mesh,
MeshBasicMaterial,
SphereGeometry,
Scene
} from 'three';
/**
* This class represents a scene with a uniform color that can be used as
* input for {@link PMREMGenerator#fromScene}. The resulting PMREM represents
* uniform ambient lighting and can be used for Image Based Lighting by
* assigning it to {@link Scene#environment}.
*
* ```js
* const environment = new ColorEnvironment( 0x00ff00 );
* const pmremGenerator = new THREE.PMREMGenerator( renderer );
*
* const envMap = pmremGenerator.fromScene( environment ).texture;
* scene.environment = envMap;
* ```
*
* @augments Scene
* @three_import import { ColorEnvironment } from 'three/addons/environments/ColorEnvironment.js';
*/
class ColorEnvironment extends Scene {
/**
* Constructs a new color environment.
*
* @param {number|Color} color - The color of the environment.
*/
constructor( color = 0xffffff ) {
super();
this.name = 'ColorEnvironment';
const geometry = new SphereGeometry( 1, 16, 16 );
const material = new MeshBasicMaterial( { color: color, side: BackSide } );
this.add( new Mesh( geometry, material ) );
}
/**
* Frees internal resources. This method should be called
* when the environment is no longer required.
*/
dispose() {
this.children[ 0 ].geometry.dispose();
this.children[ 0 ].material.dispose();
}
}
export { ColorEnvironment };
import {
BufferGeometry,
Float32BufferAttribute,
Line,
LineBasicMaterial,
Object3D,
Points,
PointsMaterial
} from 'three';
/**
* Visualizes the motion path of an animated object based on position keyframes
* from an AnimationClip.
*
* ```js
* const clip = model.animations[ 0 ];
* const helper = new AnimationPathHelper( model, clip, object );
* scene.add( helper );
* ```
*
* @augments Object3D
* @three_import import { AnimationPathHelper } from 'three/addons/helpers/AnimationPathHelper.js';
*/
class AnimationPathHelper extends Object3D {
/**
* Constructs a new animation path helper.
*
* @param {Object3D} root - The root object containing the animation clips.
* @param {AnimationClip} clip - The animation clip containing position keyframes.
* @param {Object3D} object - The specific object to show the path for.
* @param {Object} [options={}] - Configuration options.
* @param {number|Color|string} [options.color=0x00ff00] - The path line color.
* @param {number|Color|string} [options.markerColor=0xff0000] - The keyframe marker color.
* @param {number} [options.divisions=100] - Number of samples for smooth path interpolation.
* @param {boolean} [options.showMarkers=true] - Whether to show markers at keyframe positions.
* @param {number} [options.markerSize=5] - Size of keyframe markers in pixels.
*/
constructor( root, clip, object, options = {} ) {
super();
const {
color = 0x00ff00,
markerColor = 0xff0000,
divisions = 100,
showMarkers = true,
markerSize = 5
} = options;
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isAnimationPathHelper = true;
this.type = 'AnimationPathHelper';
/**
* The root object containing the animation clips.
*
* @type {Object3D}
*/
this.root = root;
/**
* The animation clip containing position keyframes.
*
* @type {AnimationClip}
*/
this.clip = clip;
/**
* The object whose path is being visualized.
*
* @type {Object3D}
*/
this.object = object;
/**
* Number of samples for smooth path interpolation.
*
* @type {number}
* @default 100
*/
this.divisions = divisions;
/**
* The position track for the object.
*
* @type {KeyframeTrack|null}
* @private
*/
this._track = this._findTrackForObject( object );
if ( this._track === null ) {
console.warn( 'AnimationPathHelper: No position track found for object', object.name );
return;
}
// Create line for path
const lineGeometry = new BufferGeometry();
const lineMaterial = new LineBasicMaterial( {
color: color,
toneMapped: false
} );
/**
* The line representing the animation path.
*
* @type {Line}
*/
this.line = new Line( lineGeometry, lineMaterial );
this.add( this.line );
// Create points for keyframe markers
if ( showMarkers ) {
const pointsGeometry = new BufferGeometry();
const pointsMaterial = new PointsMaterial( {
color: markerColor,
size: markerSize,
sizeAttenuation: false,
toneMapped: false
} );
/**
* Points marking keyframe positions.
*
* @type {Points|null}
*/
this.points = new Points( pointsGeometry, pointsMaterial );
this.add( this.points );
} else {
this.points = null;
}
// Sync matrix with object's parent
this.matrixAutoUpdate = false;
this._updateGeometry();
}
/**
* Finds the position track for the given object.
*
* @private
* @param {Object3D} object - The object to find the track for.
* @returns {KeyframeTrack|null} The position track, or null if not found.
*/
_findTrackForObject( object ) {
const targetName = object.uuid + '.position';
for ( const track of this.clip.tracks ) {
if ( track.name === targetName && track.getValueSize() === 3 ) {
return track;
}
}
return null;
}
/**
* Samples the track at regular intervals.
*
* @private
* @returns {Float32Array} Array of sampled positions.
*/
_sampleTrack() {
const track = this._track;
const interpolant = track.createInterpolant();
const duration = this.clip.duration;
const positions = [];
for ( let i = 0; i <= this.divisions; i ++ ) {
const t = ( i / this.divisions ) * duration;
const result = interpolant.evaluate( t );
positions.push( result[ 0 ], result[ 1 ], result[ 2 ] );
}
return new Float32Array( positions );
}
/**
* Updates the geometry with sampled path data.
*
* @private
*/
_updateGeometry() {
if ( this._track === null ) return;
// Update line geometry
const sampledPositions = this._sampleTrack();
this.line.geometry.setAttribute( 'position', new Float32BufferAttribute( sampledPositions, 3 ) );
this.line.geometry.computeBoundingSphere();
// Update keyframe markers
if ( this.points !== null ) {
this.points.geometry.setAttribute( 'position', new Float32BufferAttribute( new Float32Array( this._track.values ), 3 ) );
this.points.geometry.computeBoundingSphere();
}
}
/**
* Updates the helper's transform to match the object's parent.
*
* @param {boolean} force - Force matrix update.
*/
updateMatrixWorld( force ) {
// Position the helper at the object's parent so the path appears in correct local space
if ( this.object && this.object.parent ) {
this.object.parent.updateWorldMatrix( true, false );
this.matrix.copy( this.object.parent.matrixWorld );
} else {
this.matrix.identity();
}
this.matrixWorld.copy( this.matrix );
// Update children
for ( let i = 0; i < this.children.length; i ++ ) {
this.children[ i ].updateMatrixWorld( force );
}
}
/**
* Sets the path line color.
*
* @param {number|Color|string} color - The new color.
*/
setColor( color ) {
if ( this.line ) this.line.material.color.set( color );
}
/**
* Sets the keyframe marker color.
*
* @param {number|Color|string} color - The new color.
*/
setMarkerColor( color ) {
if ( this.points ) this.points.material.color.set( color );
}
/**
* Frees the GPU-related resources allocated by this instance.
*/
dispose() {
if ( this.line ) {
this.line.geometry.dispose();
this.line.material.dispose();
}
if ( this.points ) {
this.points.geometry.dispose();
this.points.material.dispose();
}
}
}
export { AnimationPathHelper };

Sorry, the diff of this file is too big to display

import {
Color,
ColorManagement,
MathUtils,
Matrix4,
Vector3,
SRGBColorSpace
} from 'three';
/**
* Utility functions for parsing
*/
function getElementsByTagName( xml, name ) {
// Non recursive xml.getElementsByTagName() ...
const array = [];
const childNodes = xml.childNodes;
for ( let i = 0, l = childNodes.length; i < l; i ++ ) {
const child = childNodes[ i ];
if ( child.nodeName === name ) {
array.push( child );
}
}
return array;
}
function parseStrings( text ) {
if ( text.length === 0 ) return [];
return text.trim().split( /\s+/ );
}
function parseFloats( text ) {
if ( text.length === 0 ) return [];
return text.trim().split( /\s+/ ).map( parseFloat );
}
function parseInts( text ) {
if ( text.length === 0 ) return [];
return text.trim().split( /\s+/ ).map( s => parseInt( s ) );
}
function parseId( text ) {
return text.substring( 1 );
}
/**
* ColladaParser handles XML parsing and converts Collada XML to intermediate data structures.
*/
class ColladaParser {
constructor() {
this.count = 0;
}
generateId() {
return 'three_default_' + ( this.count ++ );
}
parse( text ) {
if ( text.length === 0 ) {
return null;
}
const xml = new DOMParser().parseFromString( text, 'application/xml' );
const collada = getElementsByTagName( xml, 'COLLADA' )[ 0 ];
const parserError = xml.getElementsByTagName( 'parsererror' )[ 0 ];
if ( parserError !== undefined ) {
// Chrome will return parser error with a div in it
const errorElement = getElementsByTagName( parserError, 'div' )[ 0 ];
let errorText;
if ( errorElement ) {
errorText = errorElement.textContent;
} else {
errorText = this.parserErrorToText( parserError );
}
console.error( 'THREE.ColladaLoader: Failed to parse collada file.\n', errorText );
return null;
}
// metadata
const version = collada.getAttribute( 'version' );
console.debug( 'THREE.ColladaLoader: File version', version );
const asset = this.parseAsset( getElementsByTagName( collada, 'asset' )[ 0 ] );
//
const library = {
animations: {},
clips: {},
controllers: {},
images: {},
effects: {},
materials: {},
cameras: {},
lights: {},
geometries: {},
nodes: {},
visualScenes: {},
kinematicsModels: {},
physicsModels: {},
kinematicsScenes: {}
};
this.library = library;
this.collada = collada;
this.parseLibrary( collada, 'library_animations', 'animation', this.parseAnimation.bind( this ) );
this.parseLibrary( collada, 'library_animation_clips', 'animation_clip', this.parseAnimationClip.bind( this ) );
this.parseLibrary( collada, 'library_controllers', 'controller', this.parseController.bind( this ) );
this.parseLibrary( collada, 'library_images', 'image', this.parseImage.bind( this ) );
this.parseLibrary( collada, 'library_effects', 'effect', this.parseEffect.bind( this ) );
this.parseLibrary( collada, 'library_materials', 'material', this.parseMaterial.bind( this ) );
this.parseLibrary( collada, 'library_cameras', 'camera', this.parseCamera.bind( this ) );
this.parseLibrary( collada, 'library_lights', 'light', this.parseLight.bind( this ) );
this.parseLibrary( collada, 'library_geometries', 'geometry', this.parseGeometry.bind( this ) );
this.parseLibrary( collada, 'library_nodes', 'node', this.parseNode.bind( this ) );
this.parseLibrary( collada, 'library_visual_scenes', 'visual_scene', this.parseVisualScene.bind( this ) );
this.parseLibrary( collada, 'library_kinematics_models', 'kinematics_model', this.parseKinematicsModel.bind( this ) );
this.parseLibrary( collada, 'library_physics_models', 'physics_model', this.parsePhysicsModel.bind( this ) );
this.parseLibrary( collada, 'scene', 'instance_kinematics_scene', this.parseKinematicsScene.bind( this ) );
return {
library: library,
asset: asset,
collada: collada
};
}
// convert the parser error element into text with each child elements text
// separated by new lines.
parserErrorToText( parserError ) {
const parts = [];
const stack = [ parserError ];
while ( stack.length ) {
const node = stack.shift();
if ( node.nodeType === Node.TEXT_NODE ) {
parts.push( node.textContent );
} else {
parts.push( '\n' );
stack.push( ...node.childNodes );
}
}
return parts.join( '' ).trim();
}
// asset
parseAsset( xml ) {
return {
unit: this.parseAssetUnit( getElementsByTagName( xml, 'unit' )[ 0 ] ),
upAxis: this.parseAssetUpAxis( getElementsByTagName( xml, 'up_axis' )[ 0 ] )
};
}
parseAssetUnit( xml ) {
if ( ( xml !== undefined ) && ( xml.hasAttribute( 'meter' ) === true ) ) {
return parseFloat( xml.getAttribute( 'meter' ) );
} else {
return 1; // default 1 meter
}
}
parseAssetUpAxis( xml ) {
return xml !== undefined ? xml.textContent : 'Y_UP';
}
// library
parseLibrary( xml, libraryName, nodeName, parser ) {
const library = getElementsByTagName( xml, libraryName )[ 0 ];
if ( library !== undefined ) {
const elements = getElementsByTagName( library, nodeName );
for ( let i = 0; i < elements.length; i ++ ) {
parser( elements[ i ] );
}
}
}
// animation
parseAnimation( xml ) {
const data = {
sources: {},
samplers: {},
channels: {}
};
let hasChildren = false;
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
let id;
switch ( child.nodeName ) {
case 'source':
id = child.getAttribute( 'id' );
data.sources[ id ] = this.parseSource( child );
break;
case 'sampler':
id = child.getAttribute( 'id' );
data.samplers[ id ] = this.parseAnimationSampler( child );
break;
case 'channel':
id = child.getAttribute( 'target' );
data.channels[ id ] = this.parseAnimationChannel( child );
break;
case 'animation':
// hierarchy of related animations
this.parseAnimation( child );
hasChildren = true;
break;
default:
}
}
if ( hasChildren === false ) {
// since 'id' attributes can be optional, it's necessary to generate a UUID for unique assignment
this.library.animations[ xml.getAttribute( 'id' ) || MathUtils.generateUUID() ] = data;
}
}
parseAnimationSampler( xml ) {
const data = {
inputs: {},
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'input':
const id = parseId( child.getAttribute( 'source' ) );
const semantic = child.getAttribute( 'semantic' );
data.inputs[ semantic ] = id;
break;
}
}
return data;
}
parseAnimationChannel( xml ) {
const data = {};
const target = xml.getAttribute( 'target' );
// parsing SID Addressing Syntax
let parts = target.split( '/' );
const id = parts.shift();
let sid = parts.shift();
// check selection syntax
const arraySyntax = ( sid.indexOf( '(' ) !== - 1 );
const memberSyntax = ( sid.indexOf( '.' ) !== - 1 );
if ( memberSyntax ) {
// member selection access
parts = sid.split( '.' );
sid = parts.shift();
data.member = parts.shift();
} else if ( arraySyntax ) {
// array-access syntax. can be used to express fields in one-dimensional vectors or two-dimensional matrices.
const indices = sid.split( '(' );
sid = indices.shift();
for ( let i = 0; i < indices.length; i ++ ) {
indices[ i ] = parseInt( indices[ i ].replace( /\)/, '' ) );
}
data.indices = indices;
}
data.id = id;
data.sid = sid;
data.arraySyntax = arraySyntax;
data.memberSyntax = memberSyntax;
data.sampler = parseId( xml.getAttribute( 'source' ) );
return data;
}
// animation clips
parseAnimationClip( xml ) {
const data = {
name: xml.getAttribute( 'id' ) || 'default',
start: parseFloat( xml.getAttribute( 'start' ) || 0 ),
end: parseFloat( xml.getAttribute( 'end' ) || 0 ),
animations: []
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'instance_animation':
data.animations.push( parseId( child.getAttribute( 'url' ) ) );
break;
}
}
this.library.clips[ xml.getAttribute( 'id' ) ] = data;
}
// controller
parseController( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'skin':
// there is exactly one skin per controller
data.id = parseId( child.getAttribute( 'source' ) );
data.skin = this.parseSkin( child );
break;
case 'morph':
data.id = parseId( child.getAttribute( 'source' ) );
console.warn( 'THREE.ColladaLoader: Morph target animation not supported yet.' );
break;
}
}
this.library.controllers[ xml.getAttribute( 'id' ) ] = data;
}
parseSkin( xml ) {
const data = {
sources: {}
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'bind_shape_matrix':
data.bindShapeMatrix = parseFloats( child.textContent );
break;
case 'source':
const id = child.getAttribute( 'id' );
data.sources[ id ] = this.parseSource( child );
break;
case 'joints':
data.joints = this.parseJoints( child );
break;
case 'vertex_weights':
data.vertexWeights = this.parseVertexWeights( child );
break;
}
}
return data;
}
parseJoints( xml ) {
const data = {
inputs: {}
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'input':
const semantic = child.getAttribute( 'semantic' );
const id = parseId( child.getAttribute( 'source' ) );
data.inputs[ semantic ] = id;
break;
}
}
return data;
}
parseVertexWeights( xml ) {
const data = {
inputs: {}
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'input':
const semantic = child.getAttribute( 'semantic' );
const id = parseId( child.getAttribute( 'source' ) );
const offset = parseInt( child.getAttribute( 'offset' ) );
data.inputs[ semantic ] = { id: id, offset: offset };
break;
case 'vcount':
data.vcount = parseInts( child.textContent );
break;
case 'v':
data.v = parseInts( child.textContent );
break;
}
}
return data;
}
// image
parseImage( xml ) {
const data = {
init_from: getElementsByTagName( xml, 'init_from' )[ 0 ].textContent
};
this.library.images[ xml.getAttribute( 'id' ) ] = data;
}
// effect
parseEffect( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'profile_COMMON':
data.profile = this.parseEffectProfileCOMMON( child );
break;
}
}
this.library.effects[ xml.getAttribute( 'id' ) ] = data;
}
parseEffectProfileCOMMON( xml ) {
const data = {
surfaces: {},
samplers: {}
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'newparam':
this.parseEffectNewparam( child, data );
break;
case 'technique':
data.technique = this.parseEffectTechnique( child );
break;
case 'extra':
data.extra = this.parseEffectExtra( child );
break;
}
}
return data;
}
parseEffectNewparam( xml, data ) {
const sid = xml.getAttribute( 'sid' );
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'surface':
data.surfaces[ sid ] = this.parseEffectSurface( child );
break;
case 'sampler2D':
data.samplers[ sid ] = this.parseEffectSampler( child );
break;
}
}
}
parseEffectSurface( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'init_from':
data.init_from = child.textContent;
break;
}
}
return data;
}
parseEffectSampler( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'source':
data.source = child.textContent;
break;
}
}
return data;
}
parseEffectTechnique( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'constant':
case 'lambert':
case 'blinn':
case 'phong':
data.type = child.nodeName;
data.parameters = this.parseEffectParameters( child );
break;
case 'extra':
data.extra = this.parseEffectExtra( child );
break;
}
}
return data;
}
parseEffectParameters( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'emission':
case 'diffuse':
case 'specular':
case 'bump':
case 'ambient':
case 'shininess':
case 'transparency':
data[ child.nodeName ] = this.parseEffectParameter( child );
break;
case 'transparent':
data[ child.nodeName ] = {
opaque: child.hasAttribute( 'opaque' ) ? child.getAttribute( 'opaque' ) : 'A_ONE',
data: this.parseEffectParameter( child )
};
break;
}
}
return data;
}
parseEffectParameter( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'color':
data[ child.nodeName ] = parseFloats( child.textContent );
break;
case 'float':
data[ child.nodeName ] = parseFloat( child.textContent );
break;
case 'texture':
data[ child.nodeName ] = { id: child.getAttribute( 'texture' ), extra: this.parseEffectParameterTexture( child ) };
break;
}
}
return data;
}
parseEffectParameterTexture( xml ) {
const data = {
technique: {}
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'extra':
this.parseEffectParameterTextureExtra( child, data );
break;
}
}
return data;
}
parseEffectParameterTextureExtra( xml, data ) {
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'technique':
this.parseEffectParameterTextureExtraTechnique( child, data );
break;
}
}
}
parseEffectParameterTextureExtraTechnique( xml, data ) {
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'repeatU':
case 'repeatV':
case 'offsetU':
case 'offsetV':
data.technique[ child.nodeName ] = parseFloat( child.textContent );
break;
case 'wrapU':
case 'wrapV':
// some files have values for wrapU/wrapV which become NaN via parseInt
if ( child.textContent.toUpperCase() === 'TRUE' ) {
data.technique[ child.nodeName ] = 1;
} else if ( child.textContent.toUpperCase() === 'FALSE' ) {
data.technique[ child.nodeName ] = 0;
} else {
data.technique[ child.nodeName ] = parseInt( child.textContent );
}
break;
case 'bump':
data[ child.nodeName ] = this.parseEffectExtraTechniqueBump( child );
break;
}
}
}
parseEffectExtra( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'technique':
data.technique = this.parseEffectExtraTechnique( child );
break;
}
}
return data;
}
parseEffectExtraTechnique( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'double_sided':
data[ child.nodeName ] = parseInt( child.textContent );
break;
case 'bump':
data[ child.nodeName ] = this.parseEffectExtraTechniqueBump( child );
break;
}
}
return data;
}
parseEffectExtraTechniqueBump( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'texture':
data[ child.nodeName ] = { id: child.getAttribute( 'texture' ), texcoord: child.getAttribute( 'texcoord' ), extra: this.parseEffectParameterTexture( child ) };
break;
}
}
return data;
}
// material
parseMaterial( xml ) {
const data = {
name: xml.getAttribute( 'name' )
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'instance_effect':
data.url = parseId( child.getAttribute( 'url' ) );
break;
}
}
this.library.materials[ xml.getAttribute( 'id' ) ] = data;
}
// camera
parseCamera( xml ) {
const data = {
name: xml.getAttribute( 'name' )
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'optics':
data.optics = this.parseCameraOptics( child );
break;
}
}
this.library.cameras[ xml.getAttribute( 'id' ) ] = data;
}
parseCameraOptics( xml ) {
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
switch ( child.nodeName ) {
case 'technique_common':
return this.parseCameraTechnique( child );
}
}
return {};
}
parseCameraTechnique( xml ) {
const data = {};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
switch ( child.nodeName ) {
case 'perspective':
case 'orthographic':
data.technique = child.nodeName;
data.parameters = this.parseCameraParameters( child );
break;
}
}
return data;
}
parseCameraParameters( xml ) {
const data = {};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
switch ( child.nodeName ) {
case 'xfov':
case 'yfov':
case 'xmag':
case 'ymag':
case 'znear':
case 'zfar':
case 'aspect_ratio':
data[ child.nodeName ] = parseFloat( child.textContent );
break;
}
}
return data;
}
// light
parseLight( xml ) {
let data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'technique_common':
data = this.parseLightTechnique( child );
break;
}
}
this.library.lights[ xml.getAttribute( 'id' ) ] = data;
}
parseLightTechnique( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'directional':
case 'point':
case 'spot':
case 'ambient':
data.technique = child.nodeName;
data.parameters = this.parseLightParameters( child );
break;
}
}
return data;
}
parseLightParameters( xml ) {
const data = {};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'color':
const array = parseFloats( child.textContent );
data.color = new Color().fromArray( array );
ColorManagement.colorSpaceToWorking( data.color, SRGBColorSpace );
break;
case 'falloff_angle':
data.falloffAngle = parseFloat( child.textContent );
break;
case 'quadratic_attenuation':
const f = parseFloat( child.textContent );
data.distance = f ? Math.sqrt( 1 / f ) : 0;
break;
}
}
return data;
}
// geometry
parseGeometry( xml ) {
const data = {
name: xml.getAttribute( 'name' ),
sources: {},
vertices: {},
primitives: []
};
const mesh = getElementsByTagName( xml, 'mesh' )[ 0 ];
// the following tags inside geometry are not supported yet (see https://github.com/mrdoob/three.js/pull/12606): convex_mesh, spline, brep
if ( mesh === undefined ) return;
for ( let i = 0; i < mesh.childNodes.length; i ++ ) {
const child = mesh.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
const id = child.getAttribute( 'id' );
switch ( child.nodeName ) {
case 'source':
data.sources[ id ] = this.parseSource( child );
break;
case 'vertices':
// data.sources[ id ] = data.sources[ parseId( getElementsByTagName( child, 'input' )[ 0 ].getAttribute( 'source' ) ) ];
data.vertices = this.parseGeometryVertices( child );
break;
case 'polygons':
console.warn( 'THREE.ColladaLoader: Unsupported primitive type: ', child.nodeName );
break;
case 'lines':
case 'linestrips':
case 'polylist':
case 'triangles':
data.primitives.push( this.parseGeometryPrimitive( child ) );
break;
default:
}
}
this.library.geometries[ xml.getAttribute( 'id' ) ] = data;
}
parseSource( xml ) {
const data = {
array: [],
stride: 3
};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'float_array':
data.array = parseFloats( child.textContent );
break;
case 'Name_array':
data.array = parseStrings( child.textContent );
break;
case 'technique_common':
const accessor = getElementsByTagName( child, 'accessor' )[ 0 ];
if ( accessor !== undefined ) {
data.stride = parseInt( accessor.getAttribute( 'stride' ) );
}
break;
}
}
return data;
}
parseGeometryVertices( xml ) {
const data = {};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
data[ child.getAttribute( 'semantic' ) ] = parseId( child.getAttribute( 'source' ) );
}
return data;
}
parseGeometryPrimitive( xml ) {
const primitive = {
type: xml.nodeName,
material: xml.getAttribute( 'material' ),
count: parseInt( xml.getAttribute( 'count' ) ),
inputs: {},
stride: 0,
hasUV: false
};
for ( let i = 0, l = xml.childNodes.length; i < l; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'input':
const id = parseId( child.getAttribute( 'source' ) );
const semantic = child.getAttribute( 'semantic' );
const offset = parseInt( child.getAttribute( 'offset' ) );
const set = parseInt( child.getAttribute( 'set' ) );
const inputname = ( set > 0 ? semantic + set : semantic );
primitive.inputs[ inputname ] = { id: id, offset: offset };
primitive.stride = Math.max( primitive.stride, offset + 1 );
if ( semantic === 'TEXCOORD' ) primitive.hasUV = true;
break;
case 'vcount':
primitive.vcount = parseInts( child.textContent );
break;
case 'p':
primitive.p = parseInts( child.textContent );
break;
}
}
return primitive;
}
// kinematics
parseKinematicsModel( xml ) {
const data = {
name: xml.getAttribute( 'name' ) || '',
joints: {},
links: []
};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'technique_common':
this.parseKinematicsTechniqueCommon( child, data );
break;
}
}
this.library.kinematicsModels[ xml.getAttribute( 'id' ) ] = data;
}
parseKinematicsTechniqueCommon( xml, data ) {
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'joint':
data.joints[ child.getAttribute( 'sid' ) ] = this.parseKinematicsJoint( child );
break;
case 'link':
data.links.push( this.parseKinematicsLink( child ) );
break;
}
}
}
parseKinematicsJoint( xml ) {
let data;
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'prismatic':
case 'revolute':
data = this.parseKinematicsJointParameter( child );
break;
}
}
return data;
}
parseKinematicsJointParameter( xml ) {
const data = {
sid: xml.getAttribute( 'sid' ),
name: xml.getAttribute( 'name' ) || '',
axis: new Vector3(),
limits: {
min: 0,
max: 0
},
type: xml.nodeName,
static: false,
zeroPosition: 0,
middlePosition: 0
};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'axis':
const array = parseFloats( child.textContent );
data.axis.fromArray( array );
break;
case 'limits':
const max = child.getElementsByTagName( 'max' )[ 0 ];
const min = child.getElementsByTagName( 'min' )[ 0 ];
data.limits.max = parseFloat( max.textContent );
data.limits.min = parseFloat( min.textContent );
break;
}
}
// if min is equal to or greater than max, consider the joint static
if ( data.limits.min >= data.limits.max ) {
data.static = true;
}
// calculate middle position
data.middlePosition = ( data.limits.min + data.limits.max ) / 2.0;
return data;
}
parseKinematicsLink( xml ) {
const data = {
sid: xml.getAttribute( 'sid' ),
name: xml.getAttribute( 'name' ) || '',
attachments: [],
transforms: []
};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'attachment_full':
data.attachments.push( this.parseKinematicsAttachment( child ) );
break;
case 'matrix':
case 'translate':
case 'rotate':
data.transforms.push( this.parseKinematicsTransform( child ) );
break;
}
}
return data;
}
parseKinematicsAttachment( xml ) {
const data = {
joint: xml.getAttribute( 'joint' ).split( '/' ).pop(),
transforms: [],
links: []
};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'link':
data.links.push( this.parseKinematicsLink( child ) );
break;
case 'matrix':
case 'translate':
case 'rotate':
data.transforms.push( this.parseKinematicsTransform( child ) );
break;
}
}
return data;
}
parseKinematicsTransform( xml ) {
const data = {
type: xml.nodeName
};
const array = parseFloats( xml.textContent );
switch ( data.type ) {
case 'matrix':
data.obj = new Matrix4();
data.obj.fromArray( array ).transpose();
break;
case 'translate':
data.obj = new Vector3();
data.obj.fromArray( array );
break;
case 'rotate':
data.obj = new Vector3();
data.obj.fromArray( array );
data.angle = MathUtils.degToRad( array[ 3 ] );
break;
}
return data;
}
// physics
parsePhysicsModel( xml ) {
const data = {
name: xml.getAttribute( 'name' ) || '',
rigidBodies: {}
};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'rigid_body':
data.rigidBodies[ child.getAttribute( 'name' ) ] = {};
this.parsePhysicsRigidBody( child, data.rigidBodies[ child.getAttribute( 'name' ) ] );
break;
}
}
this.library.physicsModels[ xml.getAttribute( 'id' ) ] = data;
}
parsePhysicsRigidBody( xml, data ) {
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'technique_common':
this.parsePhysicsTechniqueCommon( child, data );
break;
}
}
}
parsePhysicsTechniqueCommon( xml, data ) {
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'inertia':
data.inertia = parseFloats( child.textContent );
break;
case 'mass':
data.mass = parseFloats( child.textContent )[ 0 ];
break;
}
}
}
// scene
parseKinematicsScene( xml ) {
const data = {
bindJointAxis: []
};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'bind_joint_axis':
data.bindJointAxis.push( this.parseKinematicsBindJointAxis( child ) );
break;
}
}
this.library.kinematicsScenes[ parseId( xml.getAttribute( 'url' ) ) ] = data;
}
parseKinematicsBindJointAxis( xml ) {
const data = {
target: xml.getAttribute( 'target' ).split( '/' ).pop()
};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
switch ( child.nodeName ) {
case 'axis':
const param = child.getElementsByTagName( 'param' )[ 0 ];
data.axis = param.textContent;
const tmpJointIndex = data.axis.split( 'inst_' ).pop().split( 'axis' )[ 0 ];
data.jointIndex = tmpJointIndex.substring( 0, tmpJointIndex.length - 1 );
break;
}
}
return data;
}
// nodes
prepareNodes( xml ) {
const elements = xml.getElementsByTagName( 'node' );
// ensure all node elements have id attributes
for ( let i = 0; i < elements.length; i ++ ) {
const element = elements[ i ];
if ( element.hasAttribute( 'id' ) === false ) {
element.setAttribute( 'id', this.generateId() );
}
}
}
parseNode( xml ) {
const matrix = new Matrix4();
const vector = new Vector3();
const data = {
name: xml.getAttribute( 'name' ) || '',
type: xml.getAttribute( 'type' ),
id: xml.getAttribute( 'id' ),
sid: xml.getAttribute( 'sid' ),
matrix: new Matrix4(),
nodes: [],
instanceCameras: [],
instanceControllers: [],
instanceLights: [],
instanceGeometries: [],
instanceNodes: [],
transforms: {},
transformData: {},
transformOrder: []
};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
if ( child.nodeType !== 1 ) continue;
let array;
switch ( child.nodeName ) {
case 'node':
data.nodes.push( child.getAttribute( 'id' ) );
this.parseNode( child );
break;
case 'instance_camera':
data.instanceCameras.push( parseId( child.getAttribute( 'url' ) ) );
break;
case 'instance_controller':
data.instanceControllers.push( this.parseNodeInstance( child ) );
break;
case 'instance_light':
data.instanceLights.push( parseId( child.getAttribute( 'url' ) ) );
break;
case 'instance_geometry':
data.instanceGeometries.push( this.parseNodeInstance( child ) );
break;
case 'instance_node':
data.instanceNodes.push( parseId( child.getAttribute( 'url' ) ) );
break;
case 'matrix':
array = parseFloats( child.textContent );
data.matrix.multiply( matrix.fromArray( array ).transpose() );
{
const sid = child.getAttribute( 'sid' );
data.transforms[ sid ] = child.nodeName;
data.transformData[ sid ] = { type: 'matrix', array: array };
data.transformOrder.push( sid );
}
break;
case 'translate':
array = parseFloats( child.textContent );
vector.fromArray( array );
data.matrix.multiply( matrix.makeTranslation( vector.x, vector.y, vector.z ) );
{
const sid = child.getAttribute( 'sid' );
data.transforms[ sid ] = child.nodeName;
data.transformData[ sid ] = { type: 'translate', x: array[ 0 ], y: array[ 1 ], z: array[ 2 ] };
data.transformOrder.push( sid );
}
break;
case 'rotate':
array = parseFloats( child.textContent );
{
const angle = MathUtils.degToRad( array[ 3 ] );
data.matrix.multiply( matrix.makeRotationAxis( vector.fromArray( array ), angle ) );
const sid = child.getAttribute( 'sid' );
data.transforms[ sid ] = child.nodeName;
data.transformData[ sid ] = { type: 'rotate', axis: [ array[ 0 ], array[ 1 ], array[ 2 ] ], angle: array[ 3 ] };
data.transformOrder.push( sid );
}
break;
case 'scale':
array = parseFloats( child.textContent );
data.matrix.scale( vector.fromArray( array ) );
{
const sid = child.getAttribute( 'sid' );
data.transforms[ sid ] = child.nodeName;
data.transformData[ sid ] = { type: 'scale', x: array[ 0 ], y: array[ 1 ], z: array[ 2 ] };
data.transformOrder.push( sid );
}
break;
case 'extra':
break;
default:
}
}
if ( this.hasNode( data.id ) ) {
console.warn( 'THREE.ColladaLoader: There is already a node with ID %s. Exclude current node from further processing.', data.id );
} else {
this.library.nodes[ data.id ] = data;
}
return data;
}
parseNodeInstance( xml ) {
const data = {
id: parseId( xml.getAttribute( 'url' ) ),
materials: {},
skeletons: []
};
for ( let i = 0; i < xml.childNodes.length; i ++ ) {
const child = xml.childNodes[ i ];
switch ( child.nodeName ) {
case 'bind_material':
const instances = child.getElementsByTagName( 'instance_material' );
for ( let j = 0; j < instances.length; j ++ ) {
const instance = instances[ j ];
const symbol = instance.getAttribute( 'symbol' );
const target = instance.getAttribute( 'target' );
data.materials[ symbol ] = parseId( target );
}
break;
case 'skeleton':
data.skeletons.push( parseId( child.textContent ) );
break;
default:
break;
}
}
return data;
}
// visual scenes
parseVisualScene( xml ) {
const data = {
name: xml.getAttribute( 'name' ),
children: []
};
this.prepareNodes( xml );
const elements = getElementsByTagName( xml, 'node' );
for ( let i = 0; i < elements.length; i ++ ) {
data.children.push( this.parseNode( elements[ i ] ) );
}
this.library.visualScenes[ xml.getAttribute( 'id' ) ] = data;
}
hasNode( id ) {
return this.library.nodes[ id ] !== undefined;
}
}
export { ColladaParser, getElementsByTagName, parseStrings, parseFloats, parseInts, parseId };

Sorry, the diff of this file is too big to display

import { RenderTarget, Vector2, NodeMaterial, RendererUtils, QuadMesh, TempNode, NodeUpdateType } from 'three/webgpu';
import { Fn, float, uv, uniform, convertToTexture, vec2, vec4, passTexture, luminance, abs, exp, max } from 'three/tsl';
const _quadMesh = /*@__PURE__*/ new QuadMesh();
let _rendererState;
/**
* Post processing node for creating a bilateral blur effect.
*
* Bilateral blur smooths an image while preserving sharp edges. Unlike a
* standard Gaussian blur which blurs everything equally, bilateral blur
* analyzes the intensity/color of neighboring pixels. If a neighbor is too
* different from the center pixel (indicating an edge), it is excluded
* from the blurring process.
*
* Reference: {@link https://en.wikipedia.org/wiki/Bilateral_filter}
*
* @augments TempNode
* @three_import import { bilateralBlur } from 'three/addons/tsl/display/BilateralBlurNode.js';
*/
class BilateralBlurNode extends TempNode {
static get type() {
return 'BilateralBlurNode';
}
/**
* Constructs a new bilateral blur node.
*
* @param {TextureNode} textureNode - The texture node that represents the input of the effect.
* @param {Node<vec2|float>} directionNode - Defines the direction and radius of the blur.
* @param {number} sigma - Controls the spatial kernel of the blur filter. Higher values mean a wider blur radius.
* @param {number} sigmaColor - Controls the intensity kernel. Higher values allow more color difference to be blurred together.
*/
constructor( textureNode, directionNode = null, sigma = 4, sigmaColor = 0.1 ) {
super( 'vec4' );
/**
* The texture node that represents the input of the effect.
*
* @type {TextureNode}
*/
this.textureNode = textureNode;
/**
* Defines the direction and radius of the blur.
*
* @type {Node<vec2|float>}
*/
this.directionNode = directionNode;
/**
* Controls the spatial kernel of the blur filter. Higher values mean a wider blur radius.
*
* @type {number}
*/
this.sigma = sigma;
/**
* Controls the color/intensity kernel. Higher values allow more color difference
* to be blurred together. Lower values preserve edges more strictly.
*
* @type {number}
*/
this.sigmaColor = sigmaColor;
/**
* A uniform node holding the inverse resolution value.
*
* @private
* @type {UniformNode<vec2>}
*/
this._invSize = uniform( new Vector2() );
/**
* Bilateral blur is applied in two passes (horizontal, vertical).
* This node controls the direction of each pass.
*
* @private
* @type {UniformNode<vec2>}
*/
this._passDirection = uniform( new Vector2() );
/**
* The render target used for the horizontal pass.
*
* @private
* @type {RenderTarget}
*/
this._horizontalRT = new RenderTarget( 1, 1, { depthBuffer: false } );
this._horizontalRT.texture.name = 'BilateralBlurNode.horizontal';
/**
* The render target used for the vertical pass.
*
* @private
* @type {RenderTarget}
*/
this._verticalRT = new RenderTarget( 1, 1, { depthBuffer: false } );
this._verticalRT.texture.name = 'BilateralBlurNode.vertical';
/**
* The result of the effect is represented as a separate texture node.
*
* @private
* @type {PassTextureNode}
*/
this._textureNode = passTexture( this, this._verticalRT.texture );
this._textureNode.uvNode = textureNode.uvNode;
/**
* The `updateBeforeType` is set to `NodeUpdateType.FRAME` since the node renders
* its effect once per frame in `updateBefore()`.
*
* @type {string}
* @default 'frame'
*/
this.updateBeforeType = NodeUpdateType.FRAME;
/**
* The resolution scale.
*
* @type {number}
* @default 1
*/
this.resolutionScale = 1;
}
/**
* Sets the size of the effect.
*
* @param {number} width - The width of the effect.
* @param {number} height - The height of the effect.
*/
setSize( width, height ) {
width = Math.max( Math.round( width * this.resolutionScale ), 1 );
height = Math.max( Math.round( height * this.resolutionScale ), 1 );
this._invSize.value.set( 1 / width, 1 / height );
this._horizontalRT.setSize( width, height );
this._verticalRT.setSize( width, height );
}
/**
* This method is used to render the effect once per frame.
*
* @param {NodeFrame} frame - The current node frame.
*/
updateBefore( frame ) {
const { renderer } = frame;
_rendererState = RendererUtils.resetRendererState( renderer, _rendererState );
//
const textureNode = this.textureNode;
const map = textureNode.value;
const currentTexture = textureNode.value;
_quadMesh.material = this._material;
this.setSize( map.image.width, map.image.height );
const textureType = map.type;
this._horizontalRT.texture.type = textureType;
this._verticalRT.texture.type = textureType;
// horizontal
renderer.setRenderTarget( this._horizontalRT );
this._passDirection.value.set( 1, 0 );
_quadMesh.name = 'Bilateral Blur [ Horizontal Pass ]';
_quadMesh.render( renderer );
// vertical
textureNode.value = this._horizontalRT.texture;
renderer.setRenderTarget( this._verticalRT );
this._passDirection.value.set( 0, 1 );
_quadMesh.name = 'Bilateral Blur [ Vertical Pass ]';
_quadMesh.render( renderer );
// restore
textureNode.value = currentTexture;
RendererUtils.restoreRendererState( renderer, _rendererState );
}
/**
* Returns the result of the effect as a texture node.
*
* @return {PassTextureNode} A texture node that represents the result of the effect.
*/
getTextureNode() {
return this._textureNode;
}
/**
* This method is used to setup the effect's TSL code.
*
* @param {NodeBuilder} builder - The current node builder.
* @return {PassTextureNode}
*/
setup( builder ) {
const textureNode = this.textureNode;
//
const uvNode = uv();
const directionNode = vec2( this.directionNode || 1 );
const sampleTexture = ( uv ) => textureNode.sample( uv );
const blur = Fn( () => {
const kernelSize = this.sigma * 2 + 3;
const spatialCoefficients = this._getSpatialCoefficients( kernelSize );
const invSize = this._invSize;
const direction = directionNode.mul( this._passDirection );
// Sample center pixel
const centerColor = sampleTexture( uvNode ).toVar();
const centerLuminance = luminance( centerColor.rgb ).toVar();
// Accumulate weighted samples
const weightSum = float( spatialCoefficients[ 0 ] ).toVar();
const colorSum = vec4( centerColor.mul( spatialCoefficients[ 0 ] ) ).toVar();
// Precompute color sigma factor: -0.5 / (sigmaColor^2)
const colorSigmaFactor = float( - 0.5 ).div( float( this.sigmaColor * this.sigmaColor ) ).toConst();
for ( let i = 1; i < kernelSize; i ++ ) {
const x = float( i );
const spatialWeight = float( spatialCoefficients[ i ] );
const uvOffset = vec2( direction.mul( invSize.mul( x ) ) ).toVar();
// Sample in both directions (+/-)
const sampleUv1 = uvNode.add( uvOffset );
const sampleUv2 = uvNode.sub( uvOffset );
const sample1 = sampleTexture( sampleUv1 );
const sample2 = sampleTexture( sampleUv2 );
// Compute luminance difference for edge detection
const lum1 = luminance( sample1.rgb );
const lum2 = luminance( sample2.rgb );
const diff1 = abs( lum1.sub( centerLuminance ) );
const diff2 = abs( lum2.sub( centerLuminance ) );
// Compute color-based weights using Gaussian function
const colorWeight1 = exp( diff1.mul( diff1 ).mul( colorSigmaFactor ) ).toVar();
const colorWeight2 = exp( diff2.mul( diff2 ).mul( colorSigmaFactor ) ).toVar();
// Combined bilateral weight = spatial weight * color weight
const bilateralWeight1 = spatialWeight.mul( colorWeight1 );
const bilateralWeight2 = spatialWeight.mul( colorWeight2 );
colorSum.addAssign( sample1.mul( bilateralWeight1 ) );
colorSum.addAssign( sample2.mul( bilateralWeight2 ) );
weightSum.addAssign( bilateralWeight1 );
weightSum.addAssign( bilateralWeight2 );
}
// Normalize by the total weight
return colorSum.div( max( weightSum, 0.0001 ) );
} );
//
const material = this._material || ( this._material = new NodeMaterial() );
material.fragmentNode = blur().context( builder.getSharedContext() );
material.name = 'Bilateral_blur';
material.needsUpdate = true;
//
const properties = builder.getNodeProperties( this );
properties.textureNode = textureNode;
//
return this._textureNode;
}
/**
* Frees internal resources. This method should be called
* when the effect is no longer required.
*/
dispose() {
this._horizontalRT.dispose();
this._verticalRT.dispose();
}
/**
* Computes spatial (Gaussian) coefficients depending on the given kernel radius.
* These coefficients are used for the spatial component of the bilateral filter.
*
* @private
* @param {number} kernelRadius - The kernel radius.
* @return {Array<number>}
*/
_getSpatialCoefficients( kernelRadius ) {
const coefficients = [];
const sigma = kernelRadius / 3;
for ( let i = 0; i < kernelRadius; i ++ ) {
coefficients.push( 0.39894 * Math.exp( - 0.5 * i * i / ( sigma * sigma ) ) / sigma );
}
return coefficients;
}
}
export default BilateralBlurNode;
/**
* TSL function for creating a bilateral blur node for post processing.
*
* Bilateral blur smooths an image while preserving sharp edges by considering
* both spatial distance and color/intensity differences between pixels.
*
* @tsl
* @function
* @param {Node<vec4>} node - The node that represents the input of the effect.
* @param {Node<vec2|float>} directionNode - Defines the direction and radius of the blur.
* @param {number} sigma - Controls the spatial kernel of the blur filter. Higher values mean a wider blur radius.
* @param {number} sigmaColor - Controls the intensity kernel. Higher values allow more color difference to be blurred together.
* @returns {BilateralBlurNode}
*/
export const bilateralBlur = ( node, directionNode, sigma, sigmaColor ) => new BilateralBlurNode( convertToTexture( node ), directionNode, sigma, sigmaColor );
import { Fn, float, vec2, vec3, sin, screenUV, mix, clamp, dot, convertToTexture, time, uv, select } from 'three/tsl';
import { circle } from './Shape.js';
/**
* Creates barrel-distorted UV coordinates.
* The center of the screen appears to bulge outward (convex distortion).
*
* @tsl
* @function
* @param {Node<float>} [curvature=0.1] - The amount of curvature (0 = flat, 0.5 = very curved).
* @param {Node<vec2>} [coord=uv()] - The input UV coordinates.
* @return {Node<vec2>} The distorted UV coordinates.
*/
export const barrelUV = Fn( ( [ curvature = float( 0.1 ), coord = uv() ] ) => {
// Center UV coordinates (-1 to 1)
const centered = coord.sub( 0.5 ).mul( 2.0 );
// Calculate squared distance from center
const r2 = dot( centered, centered );
// Barrel distortion: push center outward (bulge effect)
const distortion = float( 1.0 ).sub( r2.mul( curvature ) );
// Calculate scale to compensate for edge expansion
// At corners r² = 2, so we scale by the inverse of corner distortion
const cornerDistortion = float( 1.0 ).sub( curvature.mul( 2.0 ) );
// Apply distortion and compensate scale to keep edges aligned
const distorted = centered.div( distortion ).mul( cornerDistortion ).mul( 0.5 ).add( 0.5 );
return distorted;
} );
/**
* Checks if UV coordinates are inside the valid 0-1 range.
* Useful for masking areas inside the distorted screen.
*
* @tsl
* @function
* @param {Node<vec2>} coord - The UV coordinates to check.
* @return {Node<float>} 1.0 if inside bounds, 0.0 if outside.
*/
export const barrelMask = Fn( ( [ coord ] ) => {
const outOfBounds = coord.x.lessThan( 0.0 )
.or( coord.x.greaterThan( 1.0 ) )
.or( coord.y.lessThan( 0.0 ) )
.or( coord.y.greaterThan( 1.0 ) );
return select( outOfBounds, float( 0.0 ), float( 1.0 ) );
} );
/**
* Applies color bleeding effect to simulate horizontal color smearing.
* Simulates the analog signal bleeding in CRT displays where colors
* "leak" into adjacent pixels horizontally.
*
* @tsl
* @function
* @param {Node} color - The input texture node.
* @param {Node<float>} [amount=0.002] - The amount of color bleeding (0-0.01).
* @return {Node<vec3>} The color with bleeding effect applied.
*/
export const colorBleeding = Fn( ( [ color, amount = float( 0.002 ) ] ) => {
const inputTexture = convertToTexture( color );
// Get the original color
const original = inputTexture.sample( screenUV ).rgb;
// Sample colors from the left (simulating signal trailing)
const left1 = inputTexture.sample( screenUV.sub( vec2( amount, 0.0 ) ) ).rgb;
const left2 = inputTexture.sample( screenUV.sub( vec2( amount.mul( 2.0 ), 0.0 ) ) ).rgb;
const left3 = inputTexture.sample( screenUV.sub( vec2( amount.mul( 3.0 ), 0.0 ) ) ).rgb;
// Red bleeds more (travels further in analog signal)
const bleedR = original.r
.add( left1.r.mul( 0.4 ) )
.add( left2.r.mul( 0.2 ) )
.add( left3.r.mul( 0.1 ) );
// Green bleeds medium
const bleedG = original.g
.add( left1.g.mul( 0.25 ) )
.add( left2.g.mul( 0.1 ) );
// Blue bleeds least
const bleedB = original.b
.add( left1.b.mul( 0.15 ) );
// Normalize and clamp
const r = clamp( bleedR.div( 1.7 ), 0.0, 1.0 );
const g = clamp( bleedG.div( 1.35 ), 0.0, 1.0 );
const b = clamp( bleedB.div( 1.15 ), 0.0, 1.0 );
return vec3( r, g, b );
} );
/**
* Applies scanline effect to simulate CRT monitor horizontal lines with animation.
*
* @tsl
* @function
* @param {Node<vec3>} color - The input color.
* @param {Node<float>} [intensity=0.3] - The intensity of the scanlines (0-1).
* @param {Node<float>} [count=240] - The number of scanlines (typically matches vertical resolution).
* @param {Node<float>} [speed=0.0] - The scroll speed of scanlines (0 = static, 1 = normal CRT roll).
* @param {Node<vec2>} [coord=uv()] - The UV coordinates to use for scanlines.
* @return {Node<vec3>} The color with scanlines applied.
*/
export const scanlines = Fn( ( [ color, intensity = float( 0.3 ), count = float( 240.0 ), speed = float( 0.0 ), coord = uv() ] ) => {
// Animate scanlines scrolling down (like CRT vertical sync roll)
const animatedY = coord.y.sub( time.mul( speed ) );
// Create scanline pattern
const scanline = sin( animatedY.mul( count ) );
const scanlineIntensity = scanline.mul( 0.5 ).add( 0.5 ).mul( intensity );
// Darken alternate lines
return color.mul( float( 1.0 ).sub( scanlineIntensity ) );
} );
/**
* Applies vignette effect to darken the edges of the screen.
*
* @tsl
* @function
* @param {Node<vec3>} color - The input color.
* @param {Node<float>} [intensity=0.4] - The intensity of the vignette (0-1).
* @param {Node<float>} [smoothness=0.5] - The smoothness of the vignette falloff.
* @param {Node<vec2>} [coord=uv()] - The UV coordinates to use for vignette calculation.
* @return {Node<vec3>} The color with vignette applied.
*/
export const vignette = Fn( ( [ color, intensity = float( 0.4 ), smoothness = float( 0.5 ), coord = uv() ] ) => {
// Use circle for radial gradient (1.42 ≈ √2 covers full diagonal)
const mask = circle( float( 1.42 ), smoothness, coord );
// Apply vignette: center = 1, edges = (1 - intensity)
const vignetteAmount = mix( float( 1.0 ).sub( intensity ), float( 1.0 ), mask );
return color.mul( vignetteAmount );
} );
import { abs, color, float, Fn, Loop, mix, nodeObject, perspectiveDepthToViewZ, reference, textureSize, uv, vec2, vec4, viewZToOrthographicDepth, int, If, array, ivec2 } from 'three/tsl';
/**
* Performs a depth-aware blend between a base scene and a secondary effect (like godrays).
* This function uses a Poisson disk sampling pattern to detect depth discontinuities
* in the neighborhood of the current pixel. If an edge is detected, it shifts the
* sampling coordinate for the blend node away from the edge to prevent light leaking/haloing.
*
* @param {Node} baseNode - The main scene/beauty pass texture node.
* @param {Node} blendNode - The effect to be blended (e.g., Godrays, Bloom).
* @param {Node} depthNode - The scene depth texture node.
* @param {Camera} camera - The camera used for the scene.
* @param {Object} [options={}] - Configuration for the blend effect.
* @param {Node|Color} [options.blendColor=Color(0xff0000)] - The color applied to the blend node.
* @param {Node<int> | number} [options.edgeRadius=2] - The search radius (in pixels) for detecting depth edges.
* @param {Node<float> | number} [options.edgeStrength=2] - How far to "push" the UV away from detected edges.
* @returns {Node<vec4>} The resulting blended color node.
*/
export const depthAwareBlend = /*#__PURE__*/ Fn( ( [ baseNode, blendNode, depthNode, camera, options = {} ] ) => {
const uvNode = baseNode.uvNode || uv();
const cameraNear = reference( 'near', 'float', camera );
const cameraFar = reference( 'far', 'float', camera );
const blendColor = nodeObject( options.blendColor ) || color( 0xffffff );
const edgeRadius = nodeObject( options.edgeRadius ) || int( 2 );
const edgeStrength = nodeObject( options.edgeStrength ) || float( 2 );
const viewZ = perspectiveDepthToViewZ( depthNode, cameraNear, cameraFar );
const correctDepth = viewZToOrthographicDepth( viewZ, cameraNear, cameraFar );
const pushDir = vec2( 0.0 ).toVar();
const count = float( 0 ).toVar();
const resolution = ivec2( textureSize( baseNode ) ).toConst();
const pixelStep = vec2( 1 ).div( resolution );
const poissonDisk = array( [
vec2( 0.493393, 0.394269 ),
vec2( 0.798547, 0.885922 ),
vec2( 0.259143, 0.650754 ),
vec2( 0.605322, 0.023588 ),
vec2( - 0.574681, 0.137452 ),
vec2( - 0.430397, - 0.638423 ),
vec2( - 0.849487, - 0.366258 ),
vec2( 0.170621, - 0.569941 )
] );
Loop( 8, ( { i } ) => {
const offset = poissonDisk.element( i ).mul( edgeRadius );
const sampleUv = uvNode.add( offset.mul( pixelStep ) );
const sampleDepth = depthNode.sample( sampleUv );
const sampleViewZ = perspectiveDepthToViewZ( sampleDepth, cameraNear, cameraFar );
const sampleLinearDepth = viewZToOrthographicDepth( sampleViewZ, cameraNear, cameraFar );
If( abs( sampleLinearDepth.sub( correctDepth ) ).lessThan( float( 0.05 ).mul( correctDepth ) ), () => {
pushDir.addAssign( offset );
count.addAssign( 1 );
} );
} );
count.assign( count.equal( 0 ).select( 1, count ) );
pushDir.divAssign( count ).normalize();
const sampleUv = pushDir.length().greaterThan( 0 ).select( uvNode.add( edgeStrength.mul( pushDir.div( resolution ) ) ), uvNode );
const bestChoice = blendNode.sample( sampleUv ).r;
const baseColor = baseNode.sample( uvNode );
return vec4( mix( baseColor, vec4( blendColor, 1 ), bestChoice ) );
} );
import { Frustum, Matrix4, RenderTarget, Vector2, RendererUtils, QuadMesh, TempNode, NodeMaterial, NodeUpdateType, Vector3, Plane, WebGPUCoordinateSystem } from 'three/webgpu';
import { cubeTexture, clamp, viewZToPerspectiveDepth, logarithmicDepthToViewZ, float, Loop, max, Fn, passTexture, uv, dot, uniformArray, If, getViewPosition, uniform, vec4, add, interleavedGradientNoise, screenCoordinate, round, mul, uint, mix, exp, vec3, distance, pow, reference, lightPosition, vec2, bool, texture, perspectiveDepthToViewZ, lightShadowMatrix } from 'three/tsl';
const _quadMesh = /*@__PURE__*/ new QuadMesh();
const _size = /*@__PURE__*/ new Vector2();
const _DIRECTIONS = [
new Vector3( 1, 0, 0 ),
new Vector3( - 1, 0, 0 ),
new Vector3( 0, 1, 0 ),
new Vector3( 0, - 1, 0 ),
new Vector3( 0, 0, 1 ),
new Vector3( 0, 0, - 1 ),
];
const _PLANES = _DIRECTIONS.map( () => new Plane() );
const _SCRATCH_VECTOR = new Vector3();
const _SCRATCH_MAT4 = new Matrix4();
const _SCRATCH_FRUSTUM = new Frustum();
let _rendererState;
/**
* Post-Processing node for apply Screen-space raymarched godrays to a scene.
*
* After the godrays have been computed, it's recommened to apply a Bilateral
* Blur to the result to mitigate raymarching and noise artifacts.
*
* The composite with the scene pass is ideally done with `depthAwareBlend()`,
* which mitigates aliasing and light leaking.
*
* ```js
* const godraysPass = godrays( scenePassDepth, camera, light );
*
* const blurPass = bilateralBlur( godraysPassColor ); // optional blur
*
* const outputBlurred = depthAwareBlend( scenePassColor, blurPassColor, scenePassDepth, camera, { blendColor, edgeRadius, edgeStrength } ); // composite
* ```
*
* Limitations:
*
* - Only point and directional lights are currently supported.
* - The effect requires a full shadow setup. Meaning shadows must be enabled in the renderer,
* 3D objects must cast and receive shadows and the main light must cast shadows.
*
* Reference: This Node is a part of [three-good-godrays](https://github.com/Ameobea/three-good-godrays).
*
* @augments TempNode
* @three_import import { godrays } from 'three/addons/tsl/display/GodraysNode.js';
*/
class GodraysNode extends TempNode {
static get type() {
return 'GodraysNode';
}
/**
* Constructs a new Godrays node.
*
* @param {TextureNode} depthNode - A texture node that represents the scene's depth.
* @param {Camera} camera - The camera the scene is rendered with.
* @param {(DirectionalLight|PointLight)} light - The light the godrays are rendered for.
*/
constructor( depthNode, camera, light ) {
super( 'vec4' );
/**
* A node that represents the beauty pass's depth.
*
* @type {TextureNode}
*/
this.depthNode = depthNode;
/**
* The number of raymarching steps
*
* @type {UniformNode<uint>}
* @default 60
*/
this.raymarchSteps = uniform( uint( 60 ) );
/**
* The rate of accumulation for the godrays. Higher values roughly equate to more humid air/denser fog.
*
* @type {UniformNode<float>}
* @default 0.7
*/
this.density = uniform( float( 0.7 ) );
/**
* The maximum density of the godrays. Limits the maximum brightness of the godrays.
*
* @type {UniformNode<float>}
* @default 0.5
*/
this.maxDensity = uniform( float( 0.5 ) );
/**
* Higher values decrease the accumulation of godrays the further away they are from the light source.
*
* @type {UniformNode<float>}
* @default 2
*/
this.distanceAttenuation = uniform( float( 2 ) );
/**
* The resolution scale.
*
* @type {number}
*/
this.resolutionScale = 0.5;
/**
* The `updateBeforeType` is set to `NodeUpdateType.FRAME` since the node renders
* its effect once per frame in `updateBefore()`.
*
* @type {string}
* @default 'frame'
*/
this.updateBeforeType = NodeUpdateType.FRAME;
// private uniforms
/**
* Represents the world matrix of the scene's camera.
*
* @private
* @type {UniformNode<mat4>}
*/
this._cameraMatrixWorld = uniform( camera.matrixWorld );
/**
* Represents the inverse projection matrix of the scene's camera.
*
* @private
* @type {UniformNode<mat4>}
*/
this._cameraProjectionMatrixInverse = uniform( camera.projectionMatrixInverse );
/**
* Represents the inverse projection matrix of the scene's camera.
*
* @private
* @type {UniformNode<mat4>}
*/
this._premultipliedLightCameraMatrix = uniform( new Matrix4() );
/**
* Represents the world position of the scene's camera.
*
* @private
* @type {UniformNode<mat4>}
*/
this._cameraPosition = uniform( new Vector3() );
/**
* Represents the near value of the scene's camera.
*
* @private
* @type {ReferenceNode<float>}
*/
this._cameraNear = reference( 'near', 'float', camera );
/**
* Represents the far value of the scene's camera.
*
* @private
* @type {ReferenceNode<float>}
*/
this._cameraFar = reference( 'far', 'float', camera );
/**
* The near value of the shadow camera.
*
* @private
* @type {ReferenceNode<float>}
*/
this._shadowCameraNear = reference( 'near', 'float', light.shadow.camera );
/**
* The far value of the shadow camera.
*
* @private
* @type {ReferenceNode<float>}
*/
this._shadowCameraFar = reference( 'far', 'float', light.shadow.camera );
this._fNormals = uniformArray( _DIRECTIONS.map( () => new Vector3() ) );
this._fConstants = uniformArray( _DIRECTIONS.map( () => 0 ) );
/**
* The light the godrays are rendered for.
*
* @private
* @type {(DirectionalLight|PointLight)}
*/
this._light = light;
/**
* The camera the scene is rendered with.
*
* @private
* @type {Camera}
*/
this._camera = camera;
/**
* The render target the godrays are rendered into.
*
* @private
* @type {RenderTarget}
*/
this._godraysRenderTarget = new RenderTarget( 1, 1, { depthBuffer: false } );
this._godraysRenderTarget.texture.name = 'Godrays';
/**
* The material that is used to render the effect.
*
* @private
* @type {NodeMaterial}
*/
this._material = new NodeMaterial();
this._material.name = 'Godrays';
/**
* The result of the effect is represented as a separate texture node.
*
* @private
* @type {PassTextureNode}
*/
this._textureNode = passTexture( this, this._godraysRenderTarget.texture );
}
/**
* Returns the result of the effect as a texture node.
*
* @return {PassTextureNode} A texture node that represents the result of the effect.
*/
getTextureNode() {
return this._textureNode;
}
/**
* Sets the size of the effect.
*
* @param {number} width - The width of the effect.
* @param {number} height - The height of the effect.
*/
setSize( width, height ) {
width = Math.round( this.resolutionScale * width );
height = Math.round( this.resolutionScale * height );
this._godraysRenderTarget.setSize( width, height );
}
/**
* This method is used to render the effect once per frame.
*
* @param {NodeFrame} frame - The current node frame.
*/
updateBefore( frame ) {
const { renderer } = frame;
_rendererState = RendererUtils.resetRendererState( renderer, _rendererState );
//
const size = renderer.getDrawingBufferSize( _size );
this.setSize( size.width, size.height );
//
_quadMesh.material = this._material;
_quadMesh.name = 'Godrays';
this._updateLightParams();
this._cameraPosition.value.setFromMatrixPosition( this._camera.matrixWorld );
// clear
renderer.setClearColor( 0xffffff, 1 );
// godrays
renderer.setRenderTarget( this._godraysRenderTarget );
_quadMesh.render( renderer );
// restore
RendererUtils.restoreRendererState( renderer, _rendererState );
}
_updateLightParams() {
const light = this._light;
const shadowCamera = light.shadow.camera;
this._premultipliedLightCameraMatrix.value.multiplyMatrices( shadowCamera.projectionMatrix, shadowCamera.matrixWorldInverse );
if ( light.isPointLight ) {
for ( let i = 0; i < _DIRECTIONS.length; i ++ ) {
const direction = _DIRECTIONS[ i ];
const plane = _PLANES[ i ];
_SCRATCH_VECTOR.copy( light.position );
_SCRATCH_VECTOR.addScaledVector( direction, shadowCamera.far );
plane.setFromNormalAndCoplanarPoint( direction, _SCRATCH_VECTOR );
this._fNormals.array[ i ].copy( plane.normal );
this._fConstants.array[ i ] = plane.constant;
}
} else if ( light.isDirectionalLight ) {
_SCRATCH_MAT4.multiplyMatrices( shadowCamera.projectionMatrix, shadowCamera.matrixWorldInverse );
_SCRATCH_FRUSTUM.setFromProjectionMatrix( _SCRATCH_MAT4 );
for ( let i = 0; i < 6; i ++ ) {
const plane = _SCRATCH_FRUSTUM.planes[ i ];
this._fNormals.array[ i ].copy( plane.normal ).multiplyScalar( - 1 );
this._fConstants.array[ i ] = plane.constant * - 1;
}
}
}
/**
* This method is used to setup the effect's TSL code.
*
* @param {NodeBuilder} builder - The current node builder.
* @return {PassTextureNode}
*/
setup( builder ) {
const { renderer } = builder;
const uvNode = uv();
const lightPos = lightPosition( this._light );
const sampleDepth = ( uv ) => {
const depth = this.depthNode.sample( uv ).r;
if ( builder.renderer.logarithmicDepthBuffer === true ) {
const viewZ = logarithmicDepthToViewZ( depth, this._cameraNear, this._cameraFar );
return viewZToPerspectiveDepth( viewZ, this._cameraNear, this._cameraFar );
}
return depth;
};
const sdPlane = ( p, n, h ) => {
return dot( p, n ).add( h );
};
const intersectRayPlane = ( rayOrigin, rayDirection, planeNormal, planeDistance ) => {
const denom = dot( planeNormal, rayDirection );
return sdPlane( rayOrigin, planeNormal, planeDistance ).div( denom ).negate();
};
const computeShadowCoord = ( worldPos ) => {
const shadowPosition = lightShadowMatrix( this._light ).mul( worldPos );
const shadowCoord = shadowPosition.xyz.div( shadowPosition.w );
let coordZ = shadowCoord.z;
if ( renderer.coordinateSystem === WebGPUCoordinateSystem ) {
coordZ = coordZ.mul( 2 ).sub( 1 ); // WebGPU: Conversion [ 0, 1 ] to [ - 1, 1 ]
}
return vec3( shadowCoord.x, shadowCoord.y.oneMinus(), coordZ );
};
const inShadow = ( worldPos ) => {
if ( this._light.isPointLight ) {
const lightToPos = worldPos.sub( lightPos ).toConst();
const shadowPositionAbs = lightToPos.abs().toConst();
const viewZ = shadowPositionAbs.x.max( shadowPositionAbs.y ).max( shadowPositionAbs.z ).negate();
const depth = viewZToPerspectiveDepth( viewZ, this._shadowCameraNear, this._shadowCameraFar );
const result = cubeTexture( this._light.shadow.map.depthTexture, lightToPos ).compare( depth ).r;
return vec2( result.oneMinus().add( 0.005 ), viewZ.negate() );
} else if ( this._light.isDirectionalLight ) {
const shadowCoord = computeShadowCoord( worldPos ).toConst();
const frustumTest = shadowCoord.x.greaterThanEqual( 0 )
.and( shadowCoord.x.lessThanEqual( 1 ) )
.and( shadowCoord.y.greaterThanEqual( 0 ) )
.and( shadowCoord.y.lessThanEqual( 1 ) )
.and( shadowCoord.z.greaterThanEqual( 0 ) )
.and( shadowCoord.z.lessThanEqual( 1 ) );
const output = vec2( 1, 0 );
If( frustumTest.equal( true ), () => {
const result = texture( this._light.shadow.map.depthTexture, shadowCoord.xy ).compare( shadowCoord.z ).r;
const viewZ = perspectiveDepthToViewZ( shadowCoord.z, this._shadowCameraNear, this._shadowCameraFar );
output.assign( vec2( result.oneMinus(), viewZ.negate() ) );
} );
return output;
} else {
throw new Error( 'GodraysNode: Unsupported light type.' );
}
};
const godrays = Fn( () => {
const output = vec4( 0, 0, 0, 1 ).toVar();
const isEarlyOut = bool( false );
const depth = sampleDepth( uvNode ).toConst();
const viewPosition = getViewPosition( uvNode, depth, this._cameraProjectionMatrixInverse ).toConst();
const worldPosition = this._cameraMatrixWorld.mul( viewPosition );
const inBoxDist = float( - 10000.0 ).toVar();
Loop( 6, ( { i } ) => {
inBoxDist.assign( max( inBoxDist, sdPlane( this._cameraPosition, this._fNormals.element( i ), this._fConstants.element( i ) ) ) );
} );
const startPosition = this._cameraPosition.toVar();
If( inBoxDist.lessThan( 0 ), () => {
// If the ray target is outside the shadow box, move it to the nearest
// point on the box to avoid marching through unlit space
Loop( 6, ( { i } ) => {
If( sdPlane( worldPosition, this._fNormals.element( i ), this._fConstants.element( i ) ).greaterThan( 0 ), () => {
const direction = worldPosition.sub( this._cameraPosition ).toConst();
const t = intersectRayPlane( this._cameraPosition, direction, this._fNormals.element( i ), this._fConstants.element( i ) );
worldPosition.assign( this._cameraPosition.add( t.mul( direction ) ) );
} );
} );
} ).Else( () => {
// Find the first point where the ray intersects the shadow box (startPos)
const direction = worldPosition.sub( this._cameraPosition ).toConst();
const minT = float( 10000 ).toVar();
Loop( 6, ( { i } ) => {
const t = intersectRayPlane( this._cameraPosition, direction, this._fNormals.element( i ), this._fConstants.element( i ) );
If( t.lessThan( minT ).and( t.greaterThan( 0 ) ), () => {
minT.assign( t );
} );
} );
If( minT.equal( 10000 ), () => {
isEarlyOut.assign( true );
} ).Else( () => {
startPosition.assign( this._cameraPosition.add( minT.add( 0.001 ).mul( direction ) ) );
// If the ray target is outside the shadow box, move it to the nearest
// point on the box to avoid marching through unlit space
const endInBoxDist = float( - 10000 ).toVar();
Loop( 6, ( { i } ) => {
endInBoxDist.assign( max( endInBoxDist, sdPlane( worldPosition, this._fNormals.element( i ), this._fConstants.element( i ) ) ) );
} );
If( endInBoxDist.greaterThanEqual( 0 ), () => {
const minT = float( 10000 ).toVar();
Loop( 6, ( { i } ) => {
If( sdPlane( worldPosition, this._fNormals.element( i ), this._fConstants.element( i ) ).greaterThan( 0 ), () => {
const t = intersectRayPlane( startPosition, direction, this._fNormals.element( i ), this._fConstants.element( i ) );
If( t.lessThan( minT ).and( t.greaterThan( 0 ) ), () => {
minT.assign( t );
} );
} );
} );
If( minT.lessThan( worldPosition.distance( startPosition ) ), () => {
worldPosition.assign( startPosition.add( minT.mul( direction ) ) );
} );
} );
} );
} );
If( isEarlyOut.equal( false ), () => {
const illum = float( 0 ).toVar();
const noise = interleavedGradientNoise( screenCoordinate ).toConst();
const samplesFloat = round( add( this.raymarchSteps, mul( this.raymarchSteps.div( 8 ).add( 2 ), noise ) ) ).toConst();
const samples = uint( samplesFloat ).toConst();
Loop( samples, ( { i } ) => {
const samplePos = mix( startPosition, worldPosition, float( i ).div( samplesFloat ) ).toConst();
const shadowInfo = inShadow( samplePos );
const shadowAmount = shadowInfo.x.oneMinus().toConst();
illum.addAssign( shadowAmount.mul( distance( startPosition, worldPosition ).mul( this.density.div( 100 ) ) ).mul( pow( shadowInfo.y.div( this._shadowCameraFar ).oneMinus(), this.distanceAttenuation ) ) );
} );
illum.divAssign( samplesFloat );
output.assign( vec4( vec3( clamp( exp( illum.negate() ).oneMinus(), 0, this.maxDensity ) ), depth ) );
} );
return output;
} );
this._material.fragmentNode = godrays().context( builder.getSharedContext() );
this._material.needsUpdate = true;
return this._textureNode;
}
/**
* Frees internal resources. This method should be called
* when the effect is no longer required.
*/
dispose() {
this._godraysRenderTarget.dispose();
this._material.dispose();
}
}
export default GodraysNode;
/**
* TSL function for creating a Godrays effect.
*
* @tsl
* @function
* @param {TextureNode} depthNode - A texture node that represents the scene's depth.
* @param {Camera} camera - The camera the scene is rendered with.
* @param {(DirectionalLight|PointLight)} light - The light the godrays are rendered for.
* @returns {GodraysNode}
*/
export const godrays = ( depthNode, camera, light ) => new GodraysNode( depthNode, camera, light );
import { MeshBasicNodeMaterial, PassNode, UnsignedByteType, NearestFilter, CubeMapNode, MeshPhongNodeMaterial } from 'three/webgpu';
import { float, vec2, vec4, Fn, uv, varying, cameraProjectionMatrix, cameraViewMatrix, positionWorld, screenSize, materialColor, uint, texture, uniform, context, reflectVector } from 'three/tsl';
const _affineUv = varying( vec2() );
const _w = varying( float() );
const _clipSpaceRetro = Fn( () => {
const defaultPosition = cameraProjectionMatrix
.mul( cameraViewMatrix )
.mul( positionWorld );
const roundedPosition = defaultPosition.xy
.div( defaultPosition.w.mul( 2 ) )
.mul( screenSize.xy )
.round()
.div( screenSize.xy )
.mul( defaultPosition.w.mul( 2 ) );
_affineUv.assign( uv().mul( defaultPosition.w ) );
_w.assign( defaultPosition.w );
return vec4( roundedPosition.xy, defaultPosition.zw );
} )();
/**
* A post-processing pass that applies a retro PS1-style effect to the scene.
*
* This node renders the scene with classic PlayStation 1 visual characteristics:
* - **Vertex snapping**: Vertices are snapped to screen pixels, creating the iconic "wobbly" geometry
* - **Affine texture mapping**: Textures are sampled without perspective correction, resulting in distortion effects
* - **Low resolution**: Default 0.25 scale (typical 320x240 equivalent)
* - **Nearest-neighbor filtering**: Sharp pixelated textures without smoothing
*
* @augments PassNode
*/
class RetroPassNode extends PassNode {
/**
* Creates a new RetroPassNode instance.
*
* @param {Scene} scene - The scene to render.
* @param {Camera} camera - The camera to render from.
* @param {Object} [options={}] - Additional options for the retro pass.
* @param {Node} [options.affineDistortion=null] - An optional node to apply affine distortion to UVs.
*/
constructor( scene, camera, options = {} ) {
super( PassNode.COLOR, scene, camera );
const {
affineDistortion = null,
filterTextures = false
} = options;
this.setResolutionScale( .25 );
this.renderTarget.texture.type = UnsignedByteType;
this.renderTarget.texture.magFilter = NearestFilter;
this.renderTarget.texture.minFilter = NearestFilter;
this.affineDistortionNode = affineDistortion;
this.filterTextures = filterTextures;
this._materialCache = new Map();
}
/**
* Updates the retro pass before rendering.
*
* @override
* @param {Frame} frame - The current frame information.
* @returns {void}
*/
updateBefore( frame ) {
const renderer = frame.renderer;
const currentRenderObjectFunction = renderer.getRenderObjectFunction();
renderer.setRenderObjectFunction( ( object, scene, camera, geometry, material, ...params ) => {
const retroMaterialData = this._materialCache.get( material );
let retroMaterial;
if ( retroMaterialData === undefined || retroMaterialData.version !== material.version ) {
if ( retroMaterialData !== undefined ) {
retroMaterialData.material.dispose();
}
if ( material.isMeshBasicMaterial || material.isMeshBasicNodeMaterial ) {
retroMaterial = new MeshBasicNodeMaterial();
} else {
retroMaterial = new MeshPhongNodeMaterial();
}
retroMaterial.colorNode = material.colorNode || null;
retroMaterial.opacityNode = material.opacityNode || null;
retroMaterial.positionNode = material.positionNode || null;
retroMaterial.vertexNode = material.vertexNode || _clipSpaceRetro;
let colorNode = material.colorNode || materialColor;
if ( material.isMeshStandardNodeMaterial || material.isMeshStandardMaterial ) {
const envMap = material.envMap || scene.environment;
if ( envMap ) {
const reflection = new CubeMapNode( texture( envMap ) );
let metalness;
if ( material.metalnessNode ) {
metalness = material.metalnessNode;
} else {
metalness = uniform( material.metalness ).onRenderUpdate( ( { material } ) => material.metalness );
if ( material.metalnessMap ) {
const textureUniform = texture( material.metalnessMap ).onRenderUpdate( ( { material } ) => material.metalnessMap );
metalness = metalness.mul( textureUniform.b );
}
}
colorNode = metalness.mix( colorNode, reflection );
}
}
retroMaterial.colorNode = colorNode;
//
const contextData = {};
if ( this.affineDistortionNode ) {
contextData.getUV = ( texture ) => {
let finalUV;
if ( texture.isCubeTextureNode ) {
finalUV = reflectVector;
} else {
finalUV = this.affineDistortionNode.mix( uv(), _affineUv.div( _w ) );
}
return finalUV;
};
}
if ( this.filterTextures !== true ) {
contextData.getTextureLevel = () => uint( 0 );
}
retroMaterial.contextNode = context( contextData );
//
this._materialCache.set( material, {
material: retroMaterial,
version: material.version
} );
} else {
retroMaterial = retroMaterialData.material;
}
for ( const property in material ) {
if ( retroMaterial[ property ] === undefined ) continue;
retroMaterial[ property ] = material[ property ];
}
renderer.renderObject( object, scene, camera, geometry, retroMaterial, ...params );
} );
super.updateBefore( frame );
renderer.setRenderObjectFunction( currentRenderObjectFunction );
}
/**
* Disposes the retro pass and its internal resources.
*
* @override
* @returns {void}
*/
dispose() {
super.dispose();
this._materialCache.forEach( ( data ) => {
data.material.dispose();
} );
this._materialCache.clear();
}
}
export default RetroPassNode;
/**
* Creates a new RetroPassNode instance for PS1-style rendering.
*
* The retro pass applies vertex snapping, affine texture mapping, and low-resolution
* rendering to achieve an authentic PlayStation 1 aesthetic. Combine with other
* post-processing effects like dithering, posterization, and scanlines for full retro look.
*
* ```js
* // Combined with other effects
* let pipeline = retroPass( scene, camera );
* pipeline = bayerDither( pipeline, 32 );
* pipeline = posterize( pipeline, 32 );
* renderPipeline.outputNode = pipeline;
* ```
*
* @tsl
* @function
* @param {Scene} scene - The scene to render.
* @param {Camera} camera - The camera to render from.
* @param {Object} [options={}] - Additional options for the retro pass.
* @param {Node} [options.affineDistortion=null] - An optional node to apply affine distortion to UVs.
* @return {RetroPassNode} A new RetroPassNode instance.
*/
export const retroPass = ( scene, camera, options = {} ) => new RetroPassNode( scene, camera, options );
import { Fn, float, length, smoothstep, uv } from 'three/tsl';
/**
* Returns a radial gradient from center (white) to edges (black).
* Useful for masking effects based on distance from center.
*
* @tsl
* @function
* @param {Node<float>} [scale=1.0] - Controls the size of the gradient (0 = all black, 1 = full circle).
* @param {Node<float>} [softness=0.5] - Controls the edge softness (0 = hard edge, 1 = soft gradient).
* @param {Node<vec2>} [coord=uv()] - The input UV coordinates.
* @return {Node<float>} 1.0 at center, 0.0 at edges.
*/
export const circle = Fn( ( [ scale = float( 1.0 ), softness = float( 0.5 ), coord = uv() ] ) => {
// Center UV coordinates (-0.5 to 0.5)
const centered = coord.sub( 0.5 );
// Calculate distance from center (0 at center, ~0.707 at corners)
const dist = length( centered ).mul( 2.0 );
// Calculate inner and outer edges based on scale and softness
const outer = scale;
const inner = scale.sub( softness.mul( scale ) );
// Smoothstep for soft/hard transition
return smoothstep( outer, inner, dist );
} );
import { Interpolant } from '../Interpolant.js';
/**
* A Bezier interpolant using cubic Bezier curves with 2D control points.
*
* This interpolant supports the COLLADA/Maya style of Bezier animation where
* each keyframe has explicit in/out tangent control points specified as
* 2D coordinates (time, value).
*
* The tangent data must be provided via the `settings` object:
* - `settings.inTangents`: Float32Array with [time, value] pairs per keyframe per component
* - `settings.outTangents`: Float32Array with [time, value] pairs per keyframe per component
*
* For a track with N keyframes and stride S:
* - Each tangent array has N * S * 2 values
* - Layout: [k0_c0_time, k0_c0_value, k0_c1_time, k0_c1_value, ..., k0_cS_time, k0_cS_value,
* k1_c0_time, k1_c0_value, ...]
*
* @augments Interpolant
*/
class BezierInterpolant extends Interpolant {
interpolate_( i1, t0, t, t1 ) {
const result = this.resultBuffer;
const values = this.sampleValues;
const stride = this.valueSize;
const offset1 = i1 * stride;
const offset0 = offset1 - stride;
const settings = this.settings || this.DefaultSettings_;
const inTangents = settings.inTangents;
const outTangents = settings.outTangents;
// If no tangent data, fall back to linear interpolation
if ( ! inTangents || ! outTangents ) {
const weight1 = ( t - t0 ) / ( t1 - t0 );
const weight0 = 1 - weight1;
for ( let i = 0; i !== stride; ++ i ) {
result[ i ] = values[ offset0 + i ] * weight0 + values[ offset1 + i ] * weight1;
}
return result;
}
const tangentStride = stride * 2;
const i0 = i1 - 1;
for ( let i = 0; i !== stride; ++ i ) {
const v0 = values[ offset0 + i ];
const v1 = values[ offset1 + i ];
// outTangent of previous keyframe (C0)
const outTangentOffset = i0 * tangentStride + i * 2;
const c0x = outTangents[ outTangentOffset ];
const c0y = outTangents[ outTangentOffset + 1 ];
// inTangent of current keyframe (C1)
const inTangentOffset = i1 * tangentStride + i * 2;
const c1x = inTangents[ inTangentOffset ];
const c1y = inTangents[ inTangentOffset + 1 ];
// Solve for Bezier parameter s where Bx(s) = t using Newton-Raphson
let s = ( t - t0 ) / ( t1 - t0 );
let s2, s3, oneMinusS, oneMinusS2, oneMinusS3;
for ( let iter = 0; iter < 8; iter ++ ) {
s2 = s * s;
s3 = s2 * s;
oneMinusS = 1 - s;
oneMinusS2 = oneMinusS * oneMinusS;
oneMinusS3 = oneMinusS2 * oneMinusS;
// Bezier X(s) = (1-s)³·t0 + 3(1-s)²s·c0x + 3(1-s)s²·c1x + s³·t1
const bx = oneMinusS3 * t0 + 3 * oneMinusS2 * s * c0x + 3 * oneMinusS * s2 * c1x + s3 * t1;
const error = bx - t;
if ( Math.abs( error ) < 1e-10 ) break;
// Derivative dX/ds
const dbx = 3 * oneMinusS2 * ( c0x - t0 ) + 6 * oneMinusS * s * ( c1x - c0x ) + 3 * s2 * ( t1 - c1x );
if ( Math.abs( dbx ) < 1e-10 ) break;
s = s - error / dbx;
s = Math.max( 0, Math.min( 1, s ) );
}
// Evaluate Bezier Y(s)
result[ i ] = oneMinusS3 * v0 + 3 * oneMinusS2 * s * c0y + 3 * oneMinusS * s2 * c1y + s3 * v1;
}
return result;
}
}
export { BezierInterpolant };
import { UVMapping } from '../../constants.js';
import { Euler } from '../../math/Euler.js';
import { Matrix4 } from '../../math/Matrix4.js';
import { renderGroup } from '../core/UniformGroupNode.js';
import { uniform } from '../tsl/TSLBase.js';
const _e1 = /*@__PURE__*/ new Euler();
const _m1 = /*@__PURE__*/ new Matrix4();
/**
* TSL object that represents the scene's background blurriness.
*
* @tsl
* @type {Node<float>}
*/
export const backgroundBlurriness = /*@__PURE__*/ uniform( 0 ).setGroup( renderGroup ).onRenderUpdate( ( { scene } ) => scene.backgroundBlurriness );
/**
* TSL object that represents the scene's background intensity.
*
* @tsl
* @type {Node<float>}
*/
export const backgroundIntensity = /*@__PURE__*/ uniform( 1 ).setGroup( renderGroup ).onRenderUpdate( ( { scene } ) => scene.backgroundIntensity );
/**
* TSL object that represents the scene's background rotation.
*
* @tsl
* @type {Node<mat4>}
*/
export const backgroundRotation = /*@__PURE__*/ uniform( new Matrix4() ).setGroup( renderGroup ).onRenderUpdate( ( { scene } ) => {
const background = scene.background;
if ( background !== null && background.isTexture && background.mapping !== UVMapping ) {
_e1.copy( scene.backgroundRotation );
// accommodate left-handed frame
_e1.x *= - 1; _e1.y *= - 1; _e1.z *= - 1;
_m1.makeRotationFromEuler( _e1 );
} else {
_m1.identity();
}
return _m1;
} );
/**
* Custom error class for node-related errors, including stack trace information.
*/
class NodeError extends Error {
constructor( message, stackTrace = null ) {
super( message );
/**
* The name of the error.
*
* @type {string}
*/
this.name = 'NodeError';
/**
* The stack trace associated with the error.
*
* @type {?StackTrace}
*/
this.stackTrace = stackTrace;
}
}
export default NodeError;
// Pre-compiled RegExp patterns for ignored files
const IGNORED_FILES = [
/^StackTrace\.js$/,
/^TSLCore\.js$/,
/^.*Node\.js$/,
/^three\.webgpu.*\.js$/
];
/**
* Parses the stack trace and filters out ignored files.
* Returns an array with function name, file, line, and column.
*/
function getFilteredStack( stack ) {
// Pattern to extract function name, file, line, and column from different browsers
// Chrome: "at functionName (file.js:1:2)" or "at file.js:1:2"
// Firefox: "functionName@file.js:1:2"
const regex = /(?:at\s+(.+?)\s+\()?(?:(.+?)@)?([^@\s()]+):(\d+):(\d+)/;
return stack.split( '\n' )
.map( line => {
const match = line.match( regex );
if ( ! match ) return null; // Skip if line format is invalid
// Chrome: match[1], Firefox: match[2]
const fn = match[ 1 ] || match[ 2 ] || '';
const file = match[ 3 ].split( '?' )[ 0 ]; // Clean file name (Vite/HMR)
const lineNum = parseInt( match[ 4 ], 10 );
const column = parseInt( match[ 5 ], 10 );
// Extract only the filename from full path
const fileName = file.split( '/' ).pop();
return {
fn: fn,
file: fileName,
line: lineNum,
column: column
};
} )
.filter( frame => {
// Only keep frames that are valid and not in the ignore list
return frame && ! IGNORED_FILES.some( regex => regex.test( frame.file ) );
} );
}
/**
* Class representing a stack trace for debugging purposes.
*/
class StackTrace {
/**
* Creates a StackTrace instance by capturing and filtering the current stack trace.
*
* @param {Error|string|null} stackMessage - An optional stack trace to use instead of capturing a new one.
*/
constructor( stackMessage = null ) {
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isStackTrace = true;
/**
* The stack trace.
*
* @type {Array<{fn: string, file: string, line: number, column: number}>}
*/
this.stack = getFilteredStack( stackMessage ? stackMessage : new Error().stack );
}
/**
* Returns a formatted location string of the top stack frame.
*
* @returns {string} The formatted stack trace message.
*/
getLocation() {
if ( this.stack.length === 0 ) {
return '[Unknown location]';
}
const mainStack = this.stack[ 0 ];
const fn = mainStack.fn;
const fnName = fn ? `"${ fn }()" at ` : '';
return `${fnName}"${mainStack.file}:${mainStack.line}"`; // :${mainStack.column}
}
/**
* Returns the full error message including the stack trace.
*
* @param {string} message - The error message.
* @returns {string} The full error message with stack trace.
*/
getError( message ) {
if ( this.stack.length === 0 ) {
return message;
}
// Output: "Error: message\n at functionName (file.js:line:column)"
const stackString = this.stack.map( frame => {
const location = `${ frame.file }:${ frame.line }:${ frame.column }`;
if ( frame.fn ) {
return ` at ${ frame.fn } (${ location })`;
}
return ` at ${ location }`;
} ).join( '\n' );
return `${ message }\n${ stackString }`;
}
}
export default StackTrace;
import { uv } from '../accessors/UV.js';
import { Fn, float, vec2 } from '../tsl/TSLBase.js';
/**
* TSL function for computing texture coordinates for animated sprite sheets.
*
* ```js
* const uvNode = spritesheetUV( vec2( 6, 6 ), uv(), time.mul( animationSpeed ) );
*
* material.colorNode = texture( spriteSheet, uvNode );
* ```
*
* @tsl
* @function
* @param {Node<vec2>} countNode - The node that defines the number of sprites in the x and y direction (e.g 6x6).
* @param {?Node<vec2>} [uvNode=uv()] - The uv node.
* @param {?Node<float>} [frameNode=float(0)] - The node that defines the current frame/sprite.
* @returns {Node<vec2>}
*/
export const spritesheetUV = /*@__PURE__*/ Fn( ( [ countNode, uvNode = uv(), frameNode = float( 0 ) ] ) => {
const width = countNode.x;
const height = countNode.y;
const frameNum = frameNode.mod( width.mul( height ) ).floor();
const column = frameNum.mod( width );
const row = height.sub( frameNum.add( 1 ).div( width ).ceil() );
const scale = countNode.reciprocal();
const uvFrameOffset = vec2( column, row );
return uvNode.add( uvFrameOffset ).mul( scale );
} );
import { NormalBlending, AddEquation, SrcAlphaFactor, OneMinusSrcAlphaFactor } from '../../constants.js';
/**
* Represents blending configuration.
*
* This class encapsulates all blending-related properties that control how
* a material's colors are combined with the colors already in the frame buffer.
*/
class BlendMode {
/**
* Constructs a new blending configuration.
*
* @param {(NoBlending|NormalBlending|AdditiveBlending|SubtractiveBlending|MultiplyBlending|CustomBlending|MaterialBlending)} [blending=NormalBlending] - The blending mode.
*/
constructor( blending = NormalBlending ) {
/**
* Defines the blending type.
*
* It must be set to `CustomBlending` if custom blending properties like
* {@link BlendMode#blendSrc}, {@link BlendMode#blendDst} or {@link BlendMode#blendEquation}
* should have any effect.
*
* @type {(NoBlending|NormalBlending|AdditiveBlending|SubtractiveBlending|MultiplyBlending|CustomBlending|MaterialBlending)}
* @default NormalBlending
*/
this.blending = blending;
/**
* Defines the blending source factor.
*
* This determines how the source (incoming) fragment color is factored before being added
* to the destination (existing) fragment color in the frame buffer.
*
* @type {(ZeroFactor|OneFactor|SrcColorFactor|OneMinusSrcColorFactor|SrcAlphaFactor|OneMinusSrcAlphaFactor|DstAlphaFactor|OneMinusDstAlphaFactor|DstColorFactor|OneMinusDstColorFactor|SrcAlphaSaturateFactor|ConstantColorFactor|OneMinusConstantColorFactor|ConstantAlphaFactor|OneMinusConstantAlphaFactor)}
* @default SrcAlphaFactor
*/
this.blendSrc = SrcAlphaFactor;
/**
* Defines the blending destination factor.
*
* This determines how the destination (existing) fragment color in the frame buffer
* is factored before being combined with the source (incoming) fragment color.
*
* @type {(ZeroFactor|OneFactor|SrcColorFactor|OneMinusSrcColorFactor|SrcAlphaFactor|OneMinusSrcAlphaFactor|DstAlphaFactor|OneMinusDstAlphaFactor|DstColorFactor|OneMinusDstColorFactor|SrcAlphaSaturateFactor|ConstantColorFactor|OneMinusConstantColorFactor|ConstantAlphaFactor|OneMinusConstantAlphaFactor)}
* @default OneMinusSrcAlphaFactor
*/
this.blendDst = OneMinusSrcAlphaFactor;
/**
* Defines the blending equation.
*
* This determines how the source and destination colors are combined.
*
* @type {(AddEquation|SubtractEquation|ReverseSubtractEquation|MinEquation|MaxEquation)}
* @default AddEquation
*/
this.blendEquation = AddEquation;
/**
* Defines the blending source alpha factor.
*
* When set, this allows separate control of the alpha channel's source blending factor.
* If `null`, {@link BlendMode#blendSrc} is used for the alpha channel as well.
*
* @type {?(ZeroFactor|OneFactor|SrcColorFactor|OneMinusSrcColorFactor|SrcAlphaFactor|OneMinusSrcAlphaFactor|DstAlphaFactor|OneMinusDstAlphaFactor|DstColorFactor|OneMinusDstColorFactor|SrcAlphaSaturateFactor|ConstantColorFactor|OneMinusConstantColorFactor|ConstantAlphaFactor|OneMinusConstantAlphaFactor)}
* @default null
*/
this.blendSrcAlpha = null;
/**
* Defines the blending destination alpha factor.
*
* When set, this allows separate control of the alpha channel's destination blending factor.
* If `null`, {@link BlendMode#blendDst} is used for the alpha channel as well.
*
* @type {?(ZeroFactor|OneFactor|SrcColorFactor|OneMinusSrcColorFactor|SrcAlphaFactor|OneMinusSrcAlphaFactor|DstAlphaFactor|OneMinusDstAlphaFactor|DstColorFactor|OneMinusDstColorFactor|SrcAlphaSaturateFactor|ConstantColorFactor|OneMinusConstantColorFactor|ConstantAlphaFactor|OneMinusConstantAlphaFactor)}
* @default null
*/
this.blendDstAlpha = null;
/**
* Defines the blending equation of the alpha channel.
*
* When set, this allows separate control of the alpha channel's blending equation.
* If `null`, {@link BlendMode#blendEquation} is used for the alpha channel as well.
*
* @type {?(AddEquation|SubtractEquation|ReverseSubtractEquation|MinEquation|MaxEquation)}
* @default null
*/
this.blendEquationAlpha = null;
/**
* Defines whether to premultiply the alpha (transparency) value.
*
* If `true`, the RGB color of the texture or material is multiplied by its alpha value.
* This is useful for transparent textures/materials where the color data
* should already include the transparency information.
*
* @type {boolean}
* @default false
*/
this.premultiplyAlpha = false;
}
/**
* Copies the blending properties from the given source to this instance.
*
* @param {BlendMode} source - The blending configuration to copy from.
* @return {BlendMode} A reference to this instance.
*/
copy( source ) {
this.blending = source.blending;
this.blendSrc = source.blendSrc;
this.blendDst = source.blendDst;
this.blendEquation = source.blendEquation;
this.blendSrcAlpha = source.blendSrcAlpha;
this.blendDstAlpha = source.blendDstAlpha;
this.blendEquationAlpha = source.blendEquationAlpha;
this.premultiplyAlpha = source.premultiplyAlpha;
return this;
}
/**
* Returns a clone of this blending configuration.
*
* @return {BlendMode} A new Blending instance with the same properties.
*/
clone() {
return new this.constructor().copy( this );
}
}
export default BlendMode;
import DataMap from '../DataMap.js';
import ChainMap from '../ChainMap.js';
import NodeBuilderState from './NodeBuilderState.js';
import NodeMaterial from '../../../materials/nodes/NodeMaterial.js';
import { cubeMapNode } from '../../../nodes/utils/CubeMapNode.js';
import { NodeFrame, StackTrace } from '../../../nodes/Nodes.js';
import { objectGroup, renderGroup, frameGroup, cubeTexture, texture, texture3D, vec3, fog, rangeFogFactor, densityFogFactor, reference, pmremTexture, screenUV } from '../../../nodes/TSL.js';
import { builtin } from '../../../nodes/accessors/BuiltinNode.js';
import { CubeUVReflectionMapping, EquirectangularReflectionMapping, EquirectangularRefractionMapping } from '../../../constants.js';
import { hashArray } from '../../../nodes/core/NodeUtils.js';
import { error } from '../../../utils.js';
const _outputNodeMap = new WeakMap();
const _chainKeys = [];
const _cacheKeyValues = [];
/**
* This renderer module manages node-related objects and is the
* primary interface between the renderer and the node system.
*
* @private
* @augments DataMap
*/
class NodeManager extends DataMap {
/**
* Constructs a new nodes management component.
*
* @param {Renderer} renderer - The renderer.
* @param {Backend} backend - The renderer's backend.
*/
constructor( renderer, backend ) {
super();
/**
* The renderer.
*
* @type {Renderer}
*/
this.renderer = renderer;
/**
* The renderer's backend.
*
* @type {Backend}
*/
this.backend = backend;
/**
* The node frame.
*
* @type {Renderer}
*/
this.nodeFrame = new NodeFrame();
/**
* A cache for managing node builder states.
*
* @type {Map<number,NodeBuilderState>}
*/
this.nodeBuilderCache = new Map();
/**
* A cache for managing data cache key data.
*
* @type {ChainMap}
*/
this.callHashCache = new ChainMap();
/**
* A cache for managing node uniforms group data.
*
* @type {ChainMap}
*/
this.groupsData = new ChainMap();
/**
* A cache for managing node objects of
* scene properties like fog or environments.
*
* @type {Object<string,WeakMap>}
*/
this.cacheLib = {};
}
/**
* Returns `true` if the given node uniforms group must be updated or not.
*
* @param {NodeUniformsGroup} nodeUniformsGroup - The node uniforms group.
* @return {boolean} Whether the node uniforms group requires an update or not.
*/
updateGroup( nodeUniformsGroup ) {
const groupNode = nodeUniformsGroup.groupNode;
const name = groupNode.name;
// objectGroup is always updated
if ( name === objectGroup.name ) return true;
// renderGroup is updated once per render/compute call
if ( name === renderGroup.name ) {
const uniformsGroupData = this.get( nodeUniformsGroup );
const renderId = this.nodeFrame.renderId;
if ( uniformsGroupData.renderId !== renderId ) {
uniformsGroupData.renderId = renderId;
return true;
}
return false;
}
// frameGroup is updated once per frame
if ( name === frameGroup.name ) {
const uniformsGroupData = this.get( nodeUniformsGroup );
const frameId = this.nodeFrame.frameId;
if ( uniformsGroupData.frameId !== frameId ) {
uniformsGroupData.frameId = frameId;
return true;
}
return false;
}
// other groups are updated just when groupNode.needsUpdate is true
_chainKeys[ 0 ] = groupNode;
_chainKeys[ 1 ] = nodeUniformsGroup;
let groupData = this.groupsData.get( _chainKeys );
if ( groupData === undefined ) this.groupsData.set( _chainKeys, groupData = {} );
_chainKeys[ 0 ] = null;
_chainKeys[ 1 ] = null;
if ( groupData.version !== groupNode.version ) {
groupData.version = groupNode.version;
return true;
}
return false;
}
/**
* Returns the cache key for the given render object.
*
* @param {RenderObject} renderObject - The render object.
* @return {number} The cache key.
*/
getForRenderCacheKey( renderObject ) {
return renderObject.initialCacheKey;
}
/**
* Returns a node builder state for the given render object.
*
* @param {RenderObject} renderObject - The render object.
* @return {NodeBuilderState} The node builder state.
*/
getForRender( renderObject ) {
const renderObjectData = this.get( renderObject );
let nodeBuilderState = renderObjectData.nodeBuilderState;
if ( nodeBuilderState === undefined ) {
const { nodeBuilderCache } = this;
const cacheKey = this.getForRenderCacheKey( renderObject );
nodeBuilderState = nodeBuilderCache.get( cacheKey );
if ( nodeBuilderState === undefined ) {
const createNodeBuilder = ( material ) => {
const nodeBuilder = this.backend.createNodeBuilder( renderObject.object, this.renderer );
nodeBuilder.scene = renderObject.scene;
nodeBuilder.material = material;
nodeBuilder.camera = renderObject.camera;
nodeBuilder.context.material = material;
nodeBuilder.lightsNode = renderObject.lightsNode;
nodeBuilder.environmentNode = this.getEnvironmentNode( renderObject.scene );
nodeBuilder.fogNode = this.getFogNode( renderObject.scene );
nodeBuilder.clippingContext = renderObject.clippingContext;
if ( this.renderer.getOutputRenderTarget() ? this.renderer.getOutputRenderTarget().multiview : false ) {
nodeBuilder.enableMultiview();
}
return nodeBuilder;
};
let nodeBuilder = createNodeBuilder( renderObject.material );
try {
nodeBuilder.build();
} catch ( e ) {
nodeBuilder = createNodeBuilder( new NodeMaterial() );
nodeBuilder.build();
let stackTrace = e.stackTrace;
if ( ! stackTrace && e.stack ) {
// Capture stack trace for JavaScript errors
stackTrace = new StackTrace( e.stack );
}
error( 'TSL: ' + e, stackTrace );
}
nodeBuilderState = this._createNodeBuilderState( nodeBuilder );
nodeBuilderCache.set( cacheKey, nodeBuilderState );
}
nodeBuilderState.usedTimes ++;
renderObjectData.nodeBuilderState = nodeBuilderState;
}
return nodeBuilderState;
}
/**
* Deletes the given object from the internal data map
*
* @param {any} object - The object to delete.
* @return {?Object} The deleted dictionary.
*/
delete( object ) {
if ( object.isRenderObject ) {
const nodeBuilderState = this.get( object ).nodeBuilderState;
nodeBuilderState.usedTimes --;
if ( nodeBuilderState.usedTimes === 0 ) {
this.nodeBuilderCache.delete( this.getForRenderCacheKey( object ) );
}
}
return super.delete( object );
}
/**
* Returns a node builder state for the given compute node.
*
* @param {Node} computeNode - The compute node.
* @return {NodeBuilderState} The node builder state.
*/
getForCompute( computeNode ) {
const computeData = this.get( computeNode );
let nodeBuilderState = computeData.nodeBuilderState;
if ( nodeBuilderState === undefined ) {
const nodeBuilder = this.backend.createNodeBuilder( computeNode, this.renderer );
nodeBuilder.build();
nodeBuilderState = this._createNodeBuilderState( nodeBuilder );
computeData.nodeBuilderState = nodeBuilderState;
}
return nodeBuilderState;
}
/**
* Creates a node builder state for the given node builder.
*
* @private
* @param {NodeBuilder} nodeBuilder - The node builder.
* @return {NodeBuilderState} The node builder state.
*/
_createNodeBuilderState( nodeBuilder ) {
return new NodeBuilderState(
nodeBuilder.vertexShader,
nodeBuilder.fragmentShader,
nodeBuilder.computeShader,
nodeBuilder.getAttributesArray(),
nodeBuilder.getBindings(),
nodeBuilder.updateNodes,
nodeBuilder.updateBeforeNodes,
nodeBuilder.updateAfterNodes,
nodeBuilder.observer,
nodeBuilder.transforms
);
}
/**
* Returns an environment node for the current configured
* scene environment.
*
* @param {Scene} scene - The scene.
* @return {Node} A node representing the current scene environment.
*/
getEnvironmentNode( scene ) {
this.updateEnvironment( scene );
let environmentNode = null;
if ( scene.environmentNode && scene.environmentNode.isNode ) {
environmentNode = scene.environmentNode;
} else {
const sceneData = this.get( scene );
if ( sceneData.environmentNode ) {
environmentNode = sceneData.environmentNode;
}
}
return environmentNode;
}
/**
* Returns a background node for the current configured
* scene background.
*
* @param {Scene} scene - The scene.
* @return {Node} A node representing the current scene background.
*/
getBackgroundNode( scene ) {
this.updateBackground( scene );
let backgroundNode = null;
if ( scene.backgroundNode && scene.backgroundNode.isNode ) {
backgroundNode = scene.backgroundNode;
} else {
const sceneData = this.get( scene );
if ( sceneData.backgroundNode ) {
backgroundNode = sceneData.backgroundNode;
}
}
return backgroundNode;
}
/**
* Returns a fog node for the current configured scene fog.
*
* @param {Scene} scene - The scene.
* @return {Node} A node representing the current scene fog.
*/
getFogNode( scene ) {
this.updateFog( scene );
return scene.fogNode || this.get( scene ).fogNode || null;
}
/**
* Returns a cache key for the given scene and lights node.
* This key is used by `RenderObject` as a part of the dynamic
* cache key (a key that must be checked every time the render
* objects is drawn).
*
* @param {Scene} scene - The scene.
* @param {LightsNode} lightsNode - The lights node.
* @return {number} The cache key.
*/
getCacheKey( scene, lightsNode ) {
_chainKeys[ 0 ] = scene;
_chainKeys[ 1 ] = lightsNode;
const callId = this.renderer.info.calls;
const cacheKeyData = this.callHashCache.get( _chainKeys ) || {};
if ( cacheKeyData.callId !== callId ) {
const environmentNode = this.getEnvironmentNode( scene );
const fogNode = this.getFogNode( scene );
if ( lightsNode ) _cacheKeyValues.push( lightsNode.getCacheKey( true ) );
if ( environmentNode ) _cacheKeyValues.push( environmentNode.getCacheKey() );
if ( fogNode ) _cacheKeyValues.push( fogNode.getCacheKey() );
_cacheKeyValues.push( this.renderer.getOutputRenderTarget() && this.renderer.getOutputRenderTarget().multiview ? 1 : 0 );
_cacheKeyValues.push( this.renderer.shadowMap.enabled ? 1 : 0 );
_cacheKeyValues.push( this.renderer.shadowMap.type );
cacheKeyData.callId = callId;
cacheKeyData.cacheKey = hashArray( _cacheKeyValues );
this.callHashCache.set( _chainKeys, cacheKeyData );
_cacheKeyValues.length = 0;
}
_chainKeys[ 0 ] = null;
_chainKeys[ 1 ] = null;
return cacheKeyData.cacheKey;
}
/**
* A boolean that indicates whether tone mapping should be enabled
* or not.
*
* @type {boolean}
*/
get isToneMappingState() {
return this.renderer.getRenderTarget() ? false : true;
}
/**
* If a scene background is configured, this method makes sure to
* represent the background with a corresponding node-based implementation.
*
* @param {Scene} scene - The scene.
*/
updateBackground( scene ) {
const sceneData = this.get( scene );
const background = scene.background;
if ( background ) {
const forceUpdate = ( scene.backgroundBlurriness === 0 && sceneData.backgroundBlurriness > 0 ) || ( scene.backgroundBlurriness > 0 && sceneData.backgroundBlurriness === 0 );
if ( sceneData.background !== background || forceUpdate ) {
const backgroundNode = this.getCacheNode( 'background', background, () => {
if ( background.isCubeTexture === true || ( background.mapping === EquirectangularReflectionMapping || background.mapping === EquirectangularRefractionMapping || background.mapping === CubeUVReflectionMapping ) ) {
if ( scene.backgroundBlurriness > 0 || background.mapping === CubeUVReflectionMapping ) {
return pmremTexture( background );
} else {
let envMap;
if ( background.isCubeTexture === true ) {
envMap = cubeTexture( background );
} else {
envMap = texture( background );
}
return cubeMapNode( envMap );
}
} else if ( background.isTexture === true ) {
return texture( background, screenUV.flipY() ).setUpdateMatrix( true );
} else if ( background.isColor !== true ) {
error( 'WebGPUNodes: Unsupported background configuration.', background );
}
}, forceUpdate );
sceneData.backgroundNode = backgroundNode;
sceneData.background = background;
sceneData.backgroundBlurriness = scene.backgroundBlurriness;
}
} else if ( sceneData.backgroundNode ) {
delete sceneData.backgroundNode;
delete sceneData.background;
}
}
/**
* This method is part of the caching of nodes which are used to represents the
* scene's background, fog or environment.
*
* @param {string} type - The type of object to cache.
* @param {Object} object - The object.
* @param {Function} callback - A callback that produces a node representation for the given object.
* @param {boolean} [forceUpdate=false] - Whether an update should be enforced or not.
* @return {Node} The node representation.
*/
getCacheNode( type, object, callback, forceUpdate = false ) {
const nodeCache = this.cacheLib[ type ] || ( this.cacheLib[ type ] = new WeakMap() );
let node = nodeCache.get( object );
if ( node === undefined || forceUpdate ) {
node = callback();
nodeCache.set( object, node );
}
return node;
}
/**
* If a scene fog is configured, this method makes sure to
* represent the fog with a corresponding node-based implementation.
*
* @param {Scene} scene - The scene.
*/
updateFog( scene ) {
const sceneData = this.get( scene );
const sceneFog = scene.fog;
if ( sceneFog ) {
if ( sceneData.fog !== sceneFog ) {
const fogNode = this.getCacheNode( 'fog', sceneFog, () => {
if ( sceneFog.isFogExp2 ) {
const color = reference( 'color', 'color', sceneFog ).setGroup( renderGroup );
const density = reference( 'density', 'float', sceneFog ).setGroup( renderGroup );
return fog( color, densityFogFactor( density ) );
} else if ( sceneFog.isFog ) {
const color = reference( 'color', 'color', sceneFog ).setGroup( renderGroup );
const near = reference( 'near', 'float', sceneFog ).setGroup( renderGroup );
const far = reference( 'far', 'float', sceneFog ).setGroup( renderGroup );
return fog( color, rangeFogFactor( near, far ) );
} else {
error( 'Renderer: Unsupported fog configuration.', sceneFog );
}
} );
sceneData.fogNode = fogNode;
sceneData.fog = sceneFog;
}
} else {
delete sceneData.fogNode;
delete sceneData.fog;
}
}
/**
* If a scene environment is configured, this method makes sure to
* represent the environment with a corresponding node-based implementation.
*
* @param {Scene} scene - The scene.
*/
updateEnvironment( scene ) {
const sceneData = this.get( scene );
const environment = scene.environment;
if ( environment ) {
if ( sceneData.environment !== environment ) {
const environmentNode = this.getCacheNode( 'environment', environment, () => {
if ( environment.isCubeTexture === true ) {
return cubeTexture( environment );
} else if ( environment.isTexture === true ) {
return texture( environment );
} else {
error( 'Nodes: Unsupported environment configuration.', environment );
}
} );
sceneData.environmentNode = environmentNode;
sceneData.environment = environment;
}
} else if ( sceneData.environmentNode ) {
delete sceneData.environmentNode;
delete sceneData.environment;
}
}
getNodeFrame( renderer = this.renderer, scene = null, object = null, camera = null, material = null ) {
const nodeFrame = this.nodeFrame;
nodeFrame.renderer = renderer;
nodeFrame.scene = scene;
nodeFrame.object = object;
nodeFrame.camera = camera;
nodeFrame.material = material;
return nodeFrame;
}
getNodeFrameForRender( renderObject ) {
return this.getNodeFrame( renderObject.renderer, renderObject.scene, renderObject.object, renderObject.camera, renderObject.material );
}
/**
* Returns the current output cache key.
*
* @return {string} The output cache key.
*/
getOutputCacheKey() {
const renderer = this.renderer;
return renderer.toneMapping + ',' + renderer.currentColorSpace + ',' + renderer.xr.isPresenting;
}
/**
* Checks if the output configuration (tone mapping and color space) for
* the given target has changed.
*
* @param {Texture} outputTarget - The output target.
* @return {boolean} Whether the output configuration has changed or not.
*/
hasOutputChange( outputTarget ) {
const cacheKey = _outputNodeMap.get( outputTarget );
return cacheKey !== this.getOutputCacheKey();
}
/**
* Returns a node that represents the output configuration (tone mapping and
* color space) for the current target.
*
* @param {Texture} outputTarget - The output target.
* @return {Node} The output node.
*/
getOutputNode( outputTarget ) {
const renderer = this.renderer;
const cacheKey = this.getOutputCacheKey();
const output = outputTarget.isArrayTexture ?
texture3D( outputTarget, vec3( screenUV, builtin( 'gl_ViewID_OVR' ) ) ).renderOutput( renderer.toneMapping, renderer.currentColorSpace ) :
texture( outputTarget, screenUV ).renderOutput( renderer.toneMapping, renderer.currentColorSpace );
_outputNodeMap.set( outputTarget, cacheKey );
return output;
}
/**
* Triggers the call of `updateBefore()` methods
* for all nodes of the given render object.
*
* @param {RenderObject} renderObject - The render object.
*/
updateBefore( renderObject ) {
const nodeBuilder = renderObject.getNodeBuilderState();
for ( const node of nodeBuilder.updateBeforeNodes ) {
// update frame state for each node
this.getNodeFrameForRender( renderObject ).updateBeforeNode( node );
}
}
/**
* Triggers the call of `updateAfter()` methods
* for all nodes of the given render object.
*
* @param {RenderObject} renderObject - The render object.
*/
updateAfter( renderObject ) {
const nodeBuilder = renderObject.getNodeBuilderState();
for ( const node of nodeBuilder.updateAfterNodes ) {
// update frame state for each node
this.getNodeFrameForRender( renderObject ).updateAfterNode( node );
}
}
/**
* Triggers the call of `update()` methods
* for all nodes of the given compute node.
*
* @param {Node} computeNode - The compute node.
*/
updateForCompute( computeNode ) {
const nodeFrame = this.getNodeFrame();
const nodeBuilder = this.getForCompute( computeNode );
for ( const node of nodeBuilder.updateNodes ) {
nodeFrame.updateNode( node );
}
}
/**
* Triggers the call of `update()` methods
* for all nodes of the given compute node.
*
* @param {RenderObject} renderObject - The render object.
*/
updateForRender( renderObject ) {
const nodeFrame = this.getNodeFrameForRender( renderObject );
const nodeBuilder = renderObject.getNodeBuilderState();
for ( const node of nodeBuilder.updateNodes ) {
nodeFrame.updateNode( node );
}
}
/**
* Returns `true` if the given render object requires a refresh.
*
* @param {RenderObject} renderObject - The render object.
* @return {boolean} Whether the given render object requires a refresh or not.
*/
needsRefresh( renderObject ) {
const nodeFrame = this.getNodeFrameForRender( renderObject );
const monitor = renderObject.getMonitor();
return monitor.needsRefresh( renderObject, nodeFrame );
}
/**
* Frees the internal resources.
*/
dispose() {
super.dispose();
this.nodeFrame = new NodeFrame();
this.nodeBuilderCache = new Map();
this.cacheLib = {};
}
}
export default NodeManager;
import Pipeline from './Pipeline.js';
/**
* Class for representing render pipelines.
*
* @private
* @augments Pipeline
*/
class RenderObjectPipeline extends Pipeline {
/**
* Constructs a new render object pipeline.
*
* @param {string} cacheKey - The pipeline's cache key.
* @param {ProgrammableStage} vertexProgram - The pipeline's vertex shader.
* @param {ProgrammableStage} fragmentProgram - The pipeline's fragment shader.
*/
constructor( cacheKey, vertexProgram, fragmentProgram ) {
super( cacheKey );
/**
* The pipeline's vertex shader.
*
* @type {ProgrammableStage}
*/
this.vertexProgram = vertexProgram;
/**
* The pipeline's fragment shader.
*
* @type {ProgrammableStage}
*/
this.fragmentProgram = fragmentProgram;
}
}
export default RenderObjectPipeline;
import { CubeReflectionMapping, CubeRefractionMapping, EquirectangularReflectionMapping, EquirectangularRefractionMapping } from '../../constants.js';
import { PMREMGenerator } from '../../extras/PMREMGenerator.js';
import { WebGLCubeRenderTarget } from '../WebGLCubeRenderTarget.js';
function WebGLEnvironments( renderer ) {
let cubeMaps = new WeakMap();
let pmremMaps = new WeakMap();
let pmremGenerator = null;
function get( texture, usePMREM = false ) {
if ( texture === null || texture === undefined ) return null;
if ( usePMREM ) {
return getPMREM( texture );
}
return getCube( texture );
}
function getCube( texture ) {
if ( texture && texture.isTexture ) {
const mapping = texture.mapping;
if ( mapping === EquirectangularReflectionMapping || mapping === EquirectangularRefractionMapping ) {
if ( cubeMaps.has( texture ) ) {
const cubemap = cubeMaps.get( texture ).texture;
return mapTextureMapping( cubemap, texture.mapping );
} else {
const image = texture.image;
if ( image && image.height > 0 ) {
const renderTarget = new WebGLCubeRenderTarget( image.height );
renderTarget.fromEquirectangularTexture( renderer, texture );
cubeMaps.set( texture, renderTarget );
texture.addEventListener( 'dispose', onCubemapDispose );
return mapTextureMapping( renderTarget.texture, texture.mapping );
} else {
// image not yet ready. try the conversion next frame
return null;
}
}
}
}
return texture;
}
function getPMREM( texture ) {
if ( texture && texture.isTexture ) {
const mapping = texture.mapping;
const isEquirectMap = ( mapping === EquirectangularReflectionMapping || mapping === EquirectangularRefractionMapping );
const isCubeMap = ( mapping === CubeReflectionMapping || mapping === CubeRefractionMapping );
// equirect/cube map to cubeUV conversion
if ( isEquirectMap || isCubeMap ) {
let renderTarget = pmremMaps.get( texture );
const currentPMREMVersion = renderTarget !== undefined ? renderTarget.texture.pmremVersion : 0;
if ( texture.isRenderTargetTexture && texture.pmremVersion !== currentPMREMVersion ) {
if ( pmremGenerator === null ) pmremGenerator = new PMREMGenerator( renderer );
renderTarget = isEquirectMap ? pmremGenerator.fromEquirectangular( texture, renderTarget ) : pmremGenerator.fromCubemap( texture, renderTarget );
renderTarget.texture.pmremVersion = texture.pmremVersion;
pmremMaps.set( texture, renderTarget );
return renderTarget.texture;
} else {
if ( renderTarget !== undefined ) {
return renderTarget.texture;
} else {
const image = texture.image;
if ( ( isEquirectMap && image && image.height > 0 ) || ( isCubeMap && image && isCubeTextureComplete( image ) ) ) {
if ( pmremGenerator === null ) pmremGenerator = new PMREMGenerator( renderer );
renderTarget = isEquirectMap ? pmremGenerator.fromEquirectangular( texture ) : pmremGenerator.fromCubemap( texture );
renderTarget.texture.pmremVersion = texture.pmremVersion;
pmremMaps.set( texture, renderTarget );
texture.addEventListener( 'dispose', onPMREMDispose );
return renderTarget.texture;
} else {
// image not yet ready. try the conversion next frame
return null;
}
}
}
}
}
return texture;
}
function mapTextureMapping( texture, mapping ) {
if ( mapping === EquirectangularReflectionMapping ) {
texture.mapping = CubeReflectionMapping;
} else if ( mapping === EquirectangularRefractionMapping ) {
texture.mapping = CubeRefractionMapping;
}
return texture;
}
function isCubeTextureComplete( image ) {
let count = 0;
const length = 6;
for ( let i = 0; i < length; i ++ ) {
if ( image[ i ] !== undefined ) count ++;
}
return count === length;
}
function onCubemapDispose( event ) {
const texture = event.target;
texture.removeEventListener( 'dispose', onCubemapDispose );
const cubemap = cubeMaps.get( texture );
if ( cubemap !== undefined ) {
cubeMaps.delete( texture );
cubemap.dispose();
}
}
function onPMREMDispose( event ) {
const texture = event.target;
texture.removeEventListener( 'dispose', onPMREMDispose );
const pmrem = pmremMaps.get( texture );
if ( pmrem !== undefined ) {
pmremMaps.delete( texture );
pmrem.dispose();
}
}
function dispose() {
cubeMaps = new WeakMap();
pmremMaps = new WeakMap();
if ( pmremGenerator !== null ) {
pmremGenerator.dispose();
pmremGenerator = null;
}
}
return {
get: get,
dispose: dispose
};
}
export { WebGLEnvironments };
+7
-11
/**
* @license
* Copyright 2010-2025 Three.js Authors
* Copyright 2010-2026 Three.js Authors
* SPDX-License-Identifier: MIT

@@ -37,3 +37,2 @@ */

const Schlick_to_F0 = TSL.Schlick_to_F0;
const ScriptableNodeResources = TSL.ScriptableNodeResources;
const ShaderNode = TSL.ShaderNode;

@@ -67,3 +66,2 @@ const Stack = TSL.Stack;

const atan = TSL.atan;
const atan2 = TSL.atan2;
const atomicAdd = TSL.atomicAdd;

@@ -108,3 +106,2 @@ const atomicAnd = TSL.atomicAnd;

const bumpMap = TSL.bumpMap;
const burn = TSL.burn;
const builtin = TSL.builtin;

@@ -138,2 +135,3 @@ const builtinAOContext = TSL.builtinAOContext;

const clearcoatRoughness = TSL.clearcoatRoughness;
const clipSpace = TSL.clipSpace;
const code = TSL.code;

@@ -181,3 +179,2 @@ const color = TSL.color;

const div = TSL.div;
const dodge = TSL.dodge;
const dot = TSL.dot;

@@ -189,6 +186,6 @@ const drawIndex = TSL.drawIndex;

const equal = TSL.equal;
const equals = TSL.equals;
const equirectUV = TSL.equirectUV;
const exp = TSL.exp;
const exp2 = TSL.exp2;
const exponentialHeightFogFactor = TSL.exponentialHeightFogFactor;
const expression = TSL.expression;

@@ -431,3 +428,2 @@ const faceDirection = TSL.faceDirection;

const outputStruct = TSL.outputStruct;
const overlay = TSL.overlay;
const overloadingFn = TSL.overloadingFn;

@@ -502,4 +498,2 @@ const packHalf2x16 = TSL.packHalf2x16;

const screenUV = TSL.screenUV;
const scriptable = TSL.scriptable;
const scriptableValue = TSL.scriptableValue;
const select = TSL.select;

@@ -535,3 +529,2 @@ const setCurrentStack = TSL.setCurrentStack;

const storageBarrier = TSL.storageBarrier;
const storageObject = TSL.storageObject;
const storageTexture = TSL.storageTexture;

@@ -628,2 +621,4 @@ const string = TSL.string;

const viewZToPerspectiveDepth = TSL.viewZToPerspectiveDepth;
const viewZToReversedOrthographicDepth = TSL.viewZToReversedOrthographicDepth;
const viewZToReversedPerspectiveDepth = TSL.viewZToReversedPerspectiveDepth;
const viewport = TSL.viewport;

@@ -634,2 +629,3 @@ const viewportCoordinate = TSL.viewportCoordinate;

const viewportMipTexture = TSL.viewportMipTexture;
const viewportOpaqueMipTexture = TSL.viewportOpaqueMipTexture;
const viewportResolution = TSL.viewportResolution;

@@ -663,2 +659,2 @@ const viewportSafeUV = TSL.viewportSafeUV;

export { BRDF_GGX, BRDF_Lambert, BasicPointShadowFilter, BasicShadowFilter, Break, Const, Continue, DFGLUT, D_GGX, Discard, EPSILON, F_Schlick, Fn, HALF_PI, INFINITY, If, Loop, NodeAccess, NodeShaderStage, NodeType, NodeUpdateType, OnBeforeMaterialUpdate, OnBeforeObjectUpdate, OnMaterialUpdate, OnObjectUpdate, PCFShadowFilter, PCFSoftShadowFilter, PI, PI2, PointShadowFilter, Return, Schlick_to_F0, ScriptableNodeResources, ShaderNode, Stack, Switch, TBNViewMatrix, TWO_PI, VSMShadowFilter, V_GGX_SmithCorrelated, Var, VarIntent, abs, acesFilmicToneMapping, acos, add, addMethodChaining, addNodeElement, agxToneMapping, all, alphaT, and, anisotropy, anisotropyB, anisotropyT, any, append, array, arrayBuffer, asin, assign, atan, atan2, atomicAdd, atomicAnd, atomicFunc, atomicLoad, atomicMax, atomicMin, atomicOr, atomicStore, atomicSub, atomicXor, attenuationColor, attenuationDistance, attribute, attributeArray, backgroundBlurriness, backgroundIntensity, backgroundRotation, batch, bentNormalView, billboarding, bitAnd, bitNot, bitOr, bitXor, bitangentGeometry, bitangentLocal, bitangentView, bitangentWorld, bitcast, blendBurn, blendColor, blendDodge, blendOverlay, blendScreen, blur, bool, buffer, bufferAttribute, builtin, builtinAOContext, builtinShadowContext, bumpMap, burn, bvec2, bvec3, bvec4, bypass, cache, call, cameraFar, cameraIndex, cameraNear, cameraNormalMatrix, cameraPosition, cameraProjectionMatrix, cameraProjectionMatrixInverse, cameraViewMatrix, cameraViewport, cameraWorldMatrix, cbrt, cdl, ceil, checker, cineonToneMapping, clamp, clearcoat, clearcoatNormalView, clearcoatRoughness, code, color, colorSpaceToWorking, colorToDirection, compute, computeKernel, computeSkinning, context, convert, convertColorSpace, convertToTexture, cos, countLeadingZeros, countOneBits, countTrailingZeros, cross, cubeTexture, cubeTextureBase, dFdx, dFdy, dashSize, debug, decrement, decrementBefore, defaultBuildStages, defaultShaderStages, defined, degrees, deltaTime, densityFog, densityFogFactor, depth, depthPass, determinant, difference, diffuseColor, directPointLight, directionToColor, directionToFaceDirection, dispersion, distance, div, dodge, dot, drawIndex, dynamicBufferAttribute, element, emissive, equal, equals, equirectUV, exp, exp2, expression, faceDirection, faceForward, faceforward, float, floatBitsToInt, floatBitsToUint, floor, fog, fract, frameGroup, frameId, frontFacing, fwidth, gain, gapSize, getConstNodeType, getCurrentStack, getDirection, getDistanceAttenuation, getGeometryRoughness, getNormalFromDepth, getParallaxCorrectNormal, getRoughness, getScreenPosition, getShIrradianceAt, getShadowMaterial, getShadowRenderObjectFunction, getTextureIndex, getViewPosition, globalId, glsl, glslFn, grayscale, greaterThan, greaterThanEqual, hash, highpModelNormalViewMatrix, highpModelViewMatrix, hue, increment, incrementBefore, instance, instanceIndex, instancedArray, instancedBufferAttribute, instancedDynamicBufferAttribute, instancedMesh, int, intBitsToFloat, interleavedGradientNoise, inverse, inverseSqrt, inversesqrt, invocationLocalIndex, invocationSubgroupIndex, ior, iridescence, iridescenceIOR, iridescenceThickness, ivec2, ivec3, ivec4, js, label, length, lengthSq, lessThan, lessThanEqual, lightPosition, lightProjectionUV, lightShadowMatrix, lightTargetDirection, lightTargetPosition, lightViewPosition, lightingContext, lights, linearDepth, linearToneMapping, localId, log, log2, logarithmicDepthToViewZ, luminance, mat2, mat3, mat4, matcapUV, materialAO, materialAlphaTest, materialAnisotropy, materialAnisotropyVector, materialAttenuationColor, materialAttenuationDistance, materialClearcoat, materialClearcoatNormal, materialClearcoatRoughness, materialColor, materialDispersion, materialEmissive, materialEnvIntensity, materialEnvRotation, materialIOR, materialIridescence, materialIridescenceIOR, materialIridescenceThickness, materialLightMap, materialLineDashOffset, materialLineDashSize, materialLineGapSize, materialLineScale, materialLineWidth, materialMetalness, materialNormal, materialOpacity, materialPointSize, materialReference, materialReflectivity, materialRefractionRatio, materialRotation, materialRoughness, materialSheen, materialSheenRoughness, materialShininess, materialSpecular, materialSpecularColor, materialSpecularIntensity, materialSpecularStrength, materialThickness, materialTransmission, max, maxMipLevel, mediumpModelViewMatrix, metalness, min, mix, mixElement, mod, modInt, modelDirection, modelNormalMatrix, modelPosition, modelRadius, modelScale, modelViewMatrix, modelViewPosition, modelViewProjection, modelWorldMatrix, modelWorldMatrixInverse, morphReference, mrt, mul, mx_aastep, mx_add, mx_atan2, mx_cell_noise_float, mx_contrast, mx_divide, mx_fractal_noise_float, mx_fractal_noise_vec2, mx_fractal_noise_vec3, mx_fractal_noise_vec4, mx_frame, mx_heighttonormal, mx_hsvtorgb, mx_ifequal, mx_ifgreater, mx_ifgreatereq, mx_invert, mx_modulo, mx_multiply, mx_noise_float, mx_noise_vec3, mx_noise_vec4, mx_place2d, mx_power, mx_ramp4, mx_ramplr, mx_ramptb, mx_rgbtohsv, mx_rotate2d, mx_rotate3d, mx_safepower, mx_separate, mx_splitlr, mx_splittb, mx_srgb_texture_to_lin_rec709, mx_subtract, mx_timer, mx_transform_uv, mx_unifiednoise2d, mx_unifiednoise3d, mx_worley_noise_float, mx_worley_noise_vec2, mx_worley_noise_vec3, negate, neutralToneMapping, nodeArray, nodeImmutable, nodeObject, nodeObjectIntent, nodeObjects, nodeProxy, nodeProxyIntent, normalFlat, normalGeometry, normalLocal, normalMap, normalView, normalViewGeometry, normalWorld, normalWorldGeometry, normalize, not, notEqual, numWorkgroups, objectDirection, objectGroup, objectPosition, objectRadius, objectScale, objectViewPosition, objectWorldMatrix, oneMinus, or, orthographicDepthToViewZ, oscSawtooth, oscSine, oscSquare, oscTriangle, output, outputStruct, overlay, overloadingFn, packHalf2x16, packSnorm2x16, packUnorm2x16, parabola, parallaxDirection, parallaxUV, parameter, pass, passTexture, pcurve, perspectiveDepthToViewZ, pmremTexture, pointShadow, pointUV, pointWidth, positionGeometry, positionLocal, positionPrevious, positionView, positionViewDirection, positionWorld, positionWorldDirection, posterize, pow, pow2, pow3, pow4, premultiplyAlpha, property, radians, rand, range, rangeFog, rangeFogFactor, reciprocal, reference, referenceBuffer, reflect, reflectVector, reflectView, reflector, refract, refractVector, refractView, reinhardToneMapping, remap, remapClamp, renderGroup, renderOutput, rendererReference, replaceDefaultUV, rotate, rotateUV, roughness, round, rtt, sRGBTransferEOTF, sRGBTransferOETF, sample, sampler, samplerComparison, saturate, saturation, screen, screenCoordinate, screenDPR, screenSize, screenUV, scriptable, scriptableValue, select, setCurrentStack, setName, shaderStages, shadow, shadowPositionWorld, shapeCircle, sharedUniformGroup, sheen, sheenRoughness, shiftLeft, shiftRight, shininess, sign, sin, sinc, skinning, smoothstep, smoothstepElement, specularColor, specularF90, spherizeUV, split, spritesheetUV, sqrt, stack, step, stepElement, storage, storageBarrier, storageObject, storageTexture, string, struct, sub, subBuild, subgroupAdd, subgroupAll, subgroupAnd, subgroupAny, subgroupBallot, subgroupBroadcast, subgroupBroadcastFirst, subgroupElect, subgroupExclusiveAdd, subgroupExclusiveMul, subgroupInclusiveAdd, subgroupInclusiveMul, subgroupIndex, subgroupMax, subgroupMin, subgroupMul, subgroupOr, subgroupShuffle, subgroupShuffleDown, subgroupShuffleUp, subgroupShuffleXor, subgroupSize, subgroupXor, tan, tangentGeometry, tangentLocal, tangentView, tangentWorld, texture, texture3D, textureBarrier, textureBicubic, textureBicubicLevel, textureCubeUV, textureLevel, textureLoad, textureSize, textureStore, thickness, time, toneMapping, toneMappingExposure, toonOutlinePass, transformDirection, transformNormal, transformNormalToView, transformedClearcoatNormalView, transformedNormalView, transformedNormalWorld, transmission, transpose, triNoise3D, triplanarTexture, triplanarTextures, trunc, uint, uintBitsToFloat, uniform, uniformArray, uniformCubeTexture, uniformFlow, uniformGroup, uniformTexture, unpackHalf2x16, unpackSnorm2x16, unpackUnorm2x16, unpremultiplyAlpha, userData, uv, uvec2, uvec3, uvec4, varying, varyingProperty, vec2, vec3, vec4, vectorComponents, velocity, vertexColor, vertexIndex, vertexStage, vibrance, viewZToLogarithmicDepth, viewZToOrthographicDepth, viewZToPerspectiveDepth, viewport, viewportCoordinate, viewportDepthTexture, viewportLinearDepth, viewportMipTexture, viewportResolution, viewportSafeUV, viewportSharedTexture, viewportSize, viewportTexture, viewportUV, vogelDiskSample, wgsl, wgslFn, workgroupArray, workgroupBarrier, workgroupId, workingToColorSpace, xor };
export { BRDF_GGX, BRDF_Lambert, BasicPointShadowFilter, BasicShadowFilter, Break, Const, Continue, DFGLUT, D_GGX, Discard, EPSILON, F_Schlick, Fn, HALF_PI, INFINITY, If, Loop, NodeAccess, NodeShaderStage, NodeType, NodeUpdateType, OnBeforeMaterialUpdate, OnBeforeObjectUpdate, OnMaterialUpdate, OnObjectUpdate, PCFShadowFilter, PCFSoftShadowFilter, PI, PI2, PointShadowFilter, Return, Schlick_to_F0, ShaderNode, Stack, Switch, TBNViewMatrix, TWO_PI, VSMShadowFilter, V_GGX_SmithCorrelated, Var, VarIntent, abs, acesFilmicToneMapping, acos, add, addMethodChaining, addNodeElement, agxToneMapping, all, alphaT, and, anisotropy, anisotropyB, anisotropyT, any, append, array, arrayBuffer, asin, assign, atan, atomicAdd, atomicAnd, atomicFunc, atomicLoad, atomicMax, atomicMin, atomicOr, atomicStore, atomicSub, atomicXor, attenuationColor, attenuationDistance, attribute, attributeArray, backgroundBlurriness, backgroundIntensity, backgroundRotation, batch, bentNormalView, billboarding, bitAnd, bitNot, bitOr, bitXor, bitangentGeometry, bitangentLocal, bitangentView, bitangentWorld, bitcast, blendBurn, blendColor, blendDodge, blendOverlay, blendScreen, blur, bool, buffer, bufferAttribute, builtin, builtinAOContext, builtinShadowContext, bumpMap, bvec2, bvec3, bvec4, bypass, cache, call, cameraFar, cameraIndex, cameraNear, cameraNormalMatrix, cameraPosition, cameraProjectionMatrix, cameraProjectionMatrixInverse, cameraViewMatrix, cameraViewport, cameraWorldMatrix, cbrt, cdl, ceil, checker, cineonToneMapping, clamp, clearcoat, clearcoatNormalView, clearcoatRoughness, clipSpace, code, color, colorSpaceToWorking, colorToDirection, compute, computeKernel, computeSkinning, context, convert, convertColorSpace, convertToTexture, cos, countLeadingZeros, countOneBits, countTrailingZeros, cross, cubeTexture, cubeTextureBase, dFdx, dFdy, dashSize, debug, decrement, decrementBefore, defaultBuildStages, defaultShaderStages, defined, degrees, deltaTime, densityFog, densityFogFactor, depth, depthPass, determinant, difference, diffuseColor, directPointLight, directionToColor, directionToFaceDirection, dispersion, distance, div, dot, drawIndex, dynamicBufferAttribute, element, emissive, equal, equirectUV, exp, exp2, exponentialHeightFogFactor, expression, faceDirection, faceForward, faceforward, float, floatBitsToInt, floatBitsToUint, floor, fog, fract, frameGroup, frameId, frontFacing, fwidth, gain, gapSize, getConstNodeType, getCurrentStack, getDirection, getDistanceAttenuation, getGeometryRoughness, getNormalFromDepth, getParallaxCorrectNormal, getRoughness, getScreenPosition, getShIrradianceAt, getShadowMaterial, getShadowRenderObjectFunction, getTextureIndex, getViewPosition, globalId, glsl, glslFn, grayscale, greaterThan, greaterThanEqual, hash, highpModelNormalViewMatrix, highpModelViewMatrix, hue, increment, incrementBefore, instance, instanceIndex, instancedArray, instancedBufferAttribute, instancedDynamicBufferAttribute, instancedMesh, int, intBitsToFloat, interleavedGradientNoise, inverse, inverseSqrt, inversesqrt, invocationLocalIndex, invocationSubgroupIndex, ior, iridescence, iridescenceIOR, iridescenceThickness, ivec2, ivec3, ivec4, js, label, length, lengthSq, lessThan, lessThanEqual, lightPosition, lightProjectionUV, lightShadowMatrix, lightTargetDirection, lightTargetPosition, lightViewPosition, lightingContext, lights, linearDepth, linearToneMapping, localId, log, log2, logarithmicDepthToViewZ, luminance, mat2, mat3, mat4, matcapUV, materialAO, materialAlphaTest, materialAnisotropy, materialAnisotropyVector, materialAttenuationColor, materialAttenuationDistance, materialClearcoat, materialClearcoatNormal, materialClearcoatRoughness, materialColor, materialDispersion, materialEmissive, materialEnvIntensity, materialEnvRotation, materialIOR, materialIridescence, materialIridescenceIOR, materialIridescenceThickness, materialLightMap, materialLineDashOffset, materialLineDashSize, materialLineGapSize, materialLineScale, materialLineWidth, materialMetalness, materialNormal, materialOpacity, materialPointSize, materialReference, materialReflectivity, materialRefractionRatio, materialRotation, materialRoughness, materialSheen, materialSheenRoughness, materialShininess, materialSpecular, materialSpecularColor, materialSpecularIntensity, materialSpecularStrength, materialThickness, materialTransmission, max, maxMipLevel, mediumpModelViewMatrix, metalness, min, mix, mixElement, mod, modInt, modelDirection, modelNormalMatrix, modelPosition, modelRadius, modelScale, modelViewMatrix, modelViewPosition, modelViewProjection, modelWorldMatrix, modelWorldMatrixInverse, morphReference, mrt, mul, mx_aastep, mx_add, mx_atan2, mx_cell_noise_float, mx_contrast, mx_divide, mx_fractal_noise_float, mx_fractal_noise_vec2, mx_fractal_noise_vec3, mx_fractal_noise_vec4, mx_frame, mx_heighttonormal, mx_hsvtorgb, mx_ifequal, mx_ifgreater, mx_ifgreatereq, mx_invert, mx_modulo, mx_multiply, mx_noise_float, mx_noise_vec3, mx_noise_vec4, mx_place2d, mx_power, mx_ramp4, mx_ramplr, mx_ramptb, mx_rgbtohsv, mx_rotate2d, mx_rotate3d, mx_safepower, mx_separate, mx_splitlr, mx_splittb, mx_srgb_texture_to_lin_rec709, mx_subtract, mx_timer, mx_transform_uv, mx_unifiednoise2d, mx_unifiednoise3d, mx_worley_noise_float, mx_worley_noise_vec2, mx_worley_noise_vec3, negate, neutralToneMapping, nodeArray, nodeImmutable, nodeObject, nodeObjectIntent, nodeObjects, nodeProxy, nodeProxyIntent, normalFlat, normalGeometry, normalLocal, normalMap, normalView, normalViewGeometry, normalWorld, normalWorldGeometry, normalize, not, notEqual, numWorkgroups, objectDirection, objectGroup, objectPosition, objectRadius, objectScale, objectViewPosition, objectWorldMatrix, oneMinus, or, orthographicDepthToViewZ, oscSawtooth, oscSine, oscSquare, oscTriangle, output, outputStruct, overloadingFn, packHalf2x16, packSnorm2x16, packUnorm2x16, parabola, parallaxDirection, parallaxUV, parameter, pass, passTexture, pcurve, perspectiveDepthToViewZ, pmremTexture, pointShadow, pointUV, pointWidth, positionGeometry, positionLocal, positionPrevious, positionView, positionViewDirection, positionWorld, positionWorldDirection, posterize, pow, pow2, pow3, pow4, premultiplyAlpha, property, radians, rand, range, rangeFog, rangeFogFactor, reciprocal, reference, referenceBuffer, reflect, reflectVector, reflectView, reflector, refract, refractVector, refractView, reinhardToneMapping, remap, remapClamp, renderGroup, renderOutput, rendererReference, replaceDefaultUV, rotate, rotateUV, roughness, round, rtt, sRGBTransferEOTF, sRGBTransferOETF, sample, sampler, samplerComparison, saturate, saturation, screen, screenCoordinate, screenDPR, screenSize, screenUV, select, setCurrentStack, setName, shaderStages, shadow, shadowPositionWorld, shapeCircle, sharedUniformGroup, sheen, sheenRoughness, shiftLeft, shiftRight, shininess, sign, sin, sinc, skinning, smoothstep, smoothstepElement, specularColor, specularF90, spherizeUV, split, spritesheetUV, sqrt, stack, step, stepElement, storage, storageBarrier, storageTexture, string, struct, sub, subBuild, subgroupAdd, subgroupAll, subgroupAnd, subgroupAny, subgroupBallot, subgroupBroadcast, subgroupBroadcastFirst, subgroupElect, subgroupExclusiveAdd, subgroupExclusiveMul, subgroupInclusiveAdd, subgroupInclusiveMul, subgroupIndex, subgroupMax, subgroupMin, subgroupMul, subgroupOr, subgroupShuffle, subgroupShuffleDown, subgroupShuffleUp, subgroupShuffleXor, subgroupSize, subgroupXor, tan, tangentGeometry, tangentLocal, tangentView, tangentWorld, texture, texture3D, textureBarrier, textureBicubic, textureBicubicLevel, textureCubeUV, textureLevel, textureLoad, textureSize, textureStore, thickness, time, toneMapping, toneMappingExposure, toonOutlinePass, transformDirection, transformNormal, transformNormalToView, transformedClearcoatNormalView, transformedNormalView, transformedNormalWorld, transmission, transpose, triNoise3D, triplanarTexture, triplanarTextures, trunc, uint, uintBitsToFloat, uniform, uniformArray, uniformCubeTexture, uniformFlow, uniformGroup, uniformTexture, unpackHalf2x16, unpackSnorm2x16, unpackUnorm2x16, unpremultiplyAlpha, userData, uv, uvec2, uvec3, uvec4, varying, varyingProperty, vec2, vec3, vec4, vectorComponents, velocity, vertexColor, vertexIndex, vertexStage, vibrance, viewZToLogarithmicDepth, viewZToOrthographicDepth, viewZToPerspectiveDepth, viewZToReversedOrthographicDepth, viewZToReversedPerspectiveDepth, viewport, viewportCoordinate, viewportDepthTexture, viewportLinearDepth, viewportMipTexture, viewportOpaqueMipTexture, viewportResolution, viewportSafeUV, viewportSharedTexture, viewportSize, viewportTexture, viewportUV, vogelDiskSample, wgsl, wgslFn, workgroupArray, workgroupBarrier, workgroupId, workingToColorSpace, xor };
/**
* @license
* Copyright 2010-2025 Three.js Authors
* Copyright 2010-2026 Three.js Authors
* SPDX-License-Identifier: MIT
*/
import{TSL as e}from"three/webgpu";const t=e.BRDF_GGX,r=e.BRDF_Lambert,a=e.BasicPointShadowFilter,o=e.BasicShadowFilter,i=e.Break,n=e.Const,l=e.Continue,s=e.DFGLUT,c=e.D_GGX,m=e.Discard,u=e.EPSILON,p=e.F_Schlick,d=e.Fn,g=e.INFINITY,x=e.If,h=e.Loop,b=e.NodeAccess,f=e.NodeShaderStage,v=e.NodeType,w=e.NodeUpdateType,_=e.PCFShadowFilter,S=e.PCFSoftShadowFilter,T=e.PI,y=e.PI2,V=e.TWO_PI,M=e.HALF_PI,F=e.PointShadowFilter,D=e.Return,I=e.Schlick_to_F0,B=e.ScriptableNodeResources,C=e.ShaderNode,P=e.Stack,A=e.Switch,N=e.TBNViewMatrix,R=e.VSMShadowFilter,k=e.V_GGX_SmithCorrelated,O=e.Var,L=e.VarIntent,U=e.abs,G=e.acesFilmicToneMapping,j=e.acos,E=e.add,W=e.addMethodChaining,q=e.addNodeElement,z=e.agxToneMapping,Z=e.all,X=e.alphaT,H=e.and,K=e.anisotropy,Y=e.anisotropyB,J=e.anisotropyT,Q=e.any,$=e.append,ee=e.array,te=e.arrayBuffer,re=e.asin,ae=e.assign,oe=e.atan,ie=e.atan2,ne=e.atomicAdd,le=e.atomicAnd,se=e.atomicFunc,ce=e.atomicLoad,me=e.atomicMax,ue=e.atomicMin,pe=e.atomicOr,de=e.atomicStore,ge=e.atomicSub,xe=e.atomicXor,he=e.attenuationColor,be=e.attenuationDistance,fe=e.attribute,ve=e.attributeArray,we=e.backgroundBlurriness,_e=e.backgroundIntensity,Se=e.backgroundRotation,Te=e.batch,ye=e.bentNormalView,Ve=e.billboarding,Me=e.bitAnd,Fe=e.bitNot,De=e.bitOr,Ie=e.bitXor,Be=e.bitangentGeometry,Ce=e.bitangentLocal,Pe=e.bitangentView,Ae=e.bitangentWorld,Ne=e.bitcast,Re=e.blendBurn,ke=e.blendColor,Oe=e.blendDodge,Le=e.blendOverlay,Ue=e.blendScreen,Ge=e.blur,je=e.bool,Ee=e.buffer,We=e.bufferAttribute,qe=e.bumpMap,ze=e.burn,Ze=e.builtin,Xe=e.builtinAOContext,He=e.builtinShadowContext,Ke=e.bvec2,Ye=e.bvec3,Je=e.bvec4,Qe=e.bypass,$e=e.cache,et=e.call,tt=e.cameraFar,rt=e.cameraIndex,at=e.cameraNear,ot=e.cameraNormalMatrix,it=e.cameraPosition,nt=e.cameraProjectionMatrix,lt=e.cameraProjectionMatrixInverse,st=e.cameraViewMatrix,ct=e.cameraViewport,mt=e.cameraWorldMatrix,ut=e.cbrt,pt=e.cdl,dt=e.ceil,gt=e.checker,xt=e.cineonToneMapping,ht=e.clamp,bt=e.clearcoat,ft=e.clearcoatNormalView,vt=e.clearcoatRoughness,wt=e.code,_t=e.color,St=e.colorSpaceToWorking,Tt=e.colorToDirection,yt=e.compute,Vt=e.computeKernel,Mt=e.computeSkinning,Ft=e.context,Dt=e.convert,It=e.convertColorSpace,Bt=e.convertToTexture,Ct=e.countLeadingZeros,Pt=e.countOneBits,At=e.countTrailingZeros,Nt=e.cos,Rt=e.cross,kt=e.cubeTexture,Ot=e.cubeTextureBase,Lt=e.dFdx,Ut=e.dFdy,Gt=e.dashSize,jt=e.debug,Et=e.decrement,Wt=e.decrementBefore,qt=e.defaultBuildStages,zt=e.defaultShaderStages,Zt=e.defined,Xt=e.degrees,Ht=e.deltaTime,Kt=e.densityFog,Yt=e.densityFogFactor,Jt=e.depth,Qt=e.depthPass,$t=e.determinant,er=e.difference,tr=e.diffuseColor,rr=e.directPointLight,ar=e.directionToColor,or=e.directionToFaceDirection,ir=e.dispersion,nr=e.distance,lr=e.div,sr=e.dodge,cr=e.dot,mr=e.drawIndex,ur=e.dynamicBufferAttribute,pr=e.element,dr=e.emissive,gr=e.equal,xr=e.equals,hr=e.equirectUV,br=e.exp,fr=e.exp2,vr=e.expression,wr=e.faceDirection,_r=e.faceForward,Sr=e.faceforward,Tr=e.float,yr=e.floatBitsToInt,Vr=e.floatBitsToUint,Mr=e.floor,Fr=e.fog,Dr=e.fract,Ir=e.frameGroup,Br=e.frameId,Cr=e.frontFacing,Pr=e.fwidth,Ar=e.gain,Nr=e.gapSize,Rr=e.getConstNodeType,kr=e.getCurrentStack,Or=e.getDirection,Lr=e.getDistanceAttenuation,Ur=e.getGeometryRoughness,Gr=e.getNormalFromDepth,jr=e.interleavedGradientNoise,Er=e.vogelDiskSample,Wr=e.getParallaxCorrectNormal,qr=e.getRoughness,zr=e.getScreenPosition,Zr=e.getShIrradianceAt,Xr=e.getShadowMaterial,Hr=e.getShadowRenderObjectFunction,Kr=e.getTextureIndex,Yr=e.getViewPosition,Jr=e.globalId,Qr=e.glsl,$r=e.glslFn,ea=e.grayscale,ta=e.greaterThan,ra=e.greaterThanEqual,aa=e.hash,oa=e.highpModelNormalViewMatrix,ia=e.highpModelViewMatrix,na=e.hue,la=e.increment,sa=e.incrementBefore,ca=e.instance,ma=e.instanceIndex,ua=e.instancedArray,pa=e.instancedBufferAttribute,da=e.instancedDynamicBufferAttribute,ga=e.instancedMesh,xa=e.int,ha=e.intBitsToFloat,ba=e.inverse,fa=e.inverseSqrt,va=e.inversesqrt,wa=e.invocationLocalIndex,_a=e.invocationSubgroupIndex,Sa=e.ior,Ta=e.iridescence,ya=e.iridescenceIOR,Va=e.iridescenceThickness,Ma=e.ivec2,Fa=e.ivec3,Da=e.ivec4,Ia=e.js,Ba=e.label,Ca=e.length,Pa=e.lengthSq,Aa=e.lessThan,Na=e.lessThanEqual,Ra=e.lightPosition,ka=e.lightProjectionUV,Oa=e.lightShadowMatrix,La=e.lightTargetDirection,Ua=e.lightTargetPosition,Ga=e.lightViewPosition,ja=e.lightingContext,Ea=e.lights,Wa=e.linearDepth,qa=e.linearToneMapping,za=e.localId,Za=e.log,Xa=e.log2,Ha=e.logarithmicDepthToViewZ,Ka=e.luminance,Ya=e.mat2,Ja=e.mat3,Qa=e.mat4,$a=e.matcapUV,eo=e.materialAO,to=e.materialAlphaTest,ro=e.materialAnisotropy,ao=e.materialAnisotropyVector,oo=e.materialAttenuationColor,io=e.materialAttenuationDistance,no=e.materialClearcoat,lo=e.materialClearcoatNormal,so=e.materialClearcoatRoughness,co=e.materialColor,mo=e.materialDispersion,uo=e.materialEmissive,po=e.materialEnvIntensity,go=e.materialEnvRotation,xo=e.materialIOR,ho=e.materialIridescence,bo=e.materialIridescenceIOR,fo=e.materialIridescenceThickness,vo=e.materialLightMap,wo=e.materialLineDashOffset,_o=e.materialLineDashSize,So=e.materialLineGapSize,To=e.materialLineScale,yo=e.materialLineWidth,Vo=e.materialMetalness,Mo=e.materialNormal,Fo=e.materialOpacity,Do=e.materialPointSize,Io=e.materialReference,Bo=e.materialReflectivity,Co=e.materialRefractionRatio,Po=e.materialRotation,Ao=e.materialRoughness,No=e.materialSheen,Ro=e.materialSheenRoughness,ko=e.materialShininess,Oo=e.materialSpecular,Lo=e.materialSpecularColor,Uo=e.materialSpecularIntensity,Go=e.materialSpecularStrength,jo=e.materialThickness,Eo=e.materialTransmission,Wo=e.max,qo=e.maxMipLevel,zo=e.mediumpModelViewMatrix,Zo=e.metalness,Xo=e.min,Ho=e.mix,Ko=e.mixElement,Yo=e.mod,Jo=e.modInt,Qo=e.modelDirection,$o=e.modelNormalMatrix,ei=e.modelPosition,ti=e.modelRadius,ri=e.modelScale,ai=e.modelViewMatrix,oi=e.modelViewPosition,ii=e.modelViewProjection,ni=e.modelWorldMatrix,li=e.modelWorldMatrixInverse,si=e.morphReference,ci=e.mrt,mi=e.mul,ui=e.mx_aastep,pi=e.mx_add,di=e.mx_atan2,gi=e.mx_cell_noise_float,xi=e.mx_contrast,hi=e.mx_divide,bi=e.mx_fractal_noise_float,fi=e.mx_fractal_noise_vec2,vi=e.mx_fractal_noise_vec3,wi=e.mx_fractal_noise_vec4,_i=e.mx_frame,Si=e.mx_heighttonormal,Ti=e.mx_hsvtorgb,yi=e.mx_ifequal,Vi=e.mx_ifgreater,Mi=e.mx_ifgreatereq,Fi=e.mx_invert,Di=e.mx_modulo,Ii=e.mx_multiply,Bi=e.mx_noise_float,Ci=e.mx_noise_vec3,Pi=e.mx_noise_vec4,Ai=e.mx_place2d,Ni=e.mx_power,Ri=e.mx_ramp4,ki=e.mx_ramplr,Oi=e.mx_ramptb,Li=e.mx_rgbtohsv,Ui=e.mx_rotate2d,Gi=e.mx_rotate3d,ji=e.mx_safepower,Ei=e.mx_separate,Wi=e.mx_splitlr,qi=e.mx_splittb,zi=e.mx_srgb_texture_to_lin_rec709,Zi=e.mx_subtract,Xi=e.mx_timer,Hi=e.mx_transform_uv,Ki=e.mx_unifiednoise2d,Yi=e.mx_unifiednoise3d,Ji=e.mx_worley_noise_float,Qi=e.mx_worley_noise_vec2,$i=e.mx_worley_noise_vec3,en=e.negate,tn=e.neutralToneMapping,rn=e.nodeArray,an=e.nodeImmutable,on=e.nodeObject,nn=e.nodeObjectIntent,ln=e.nodeObjects,sn=e.nodeProxy,cn=e.nodeProxyIntent,mn=e.normalFlat,un=e.normalGeometry,pn=e.normalLocal,dn=e.normalMap,gn=e.normalView,xn=e.normalViewGeometry,hn=e.normalWorld,bn=e.normalWorldGeometry,fn=e.normalize,vn=e.not,wn=e.notEqual,_n=e.numWorkgroups,Sn=e.objectDirection,Tn=e.objectGroup,yn=e.objectPosition,Vn=e.objectRadius,Mn=e.objectScale,Fn=e.objectViewPosition,Dn=e.objectWorldMatrix,In=e.OnBeforeObjectUpdate,Bn=e.OnBeforeMaterialUpdate,Cn=e.OnObjectUpdate,Pn=e.OnMaterialUpdate,An=e.oneMinus,Nn=e.or,Rn=e.orthographicDepthToViewZ,kn=e.oscSawtooth,On=e.oscSine,Ln=e.oscSquare,Un=e.oscTriangle,Gn=e.output,jn=e.outputStruct,En=e.overlay,Wn=e.overloadingFn,qn=e.packHalf2x16,zn=e.packSnorm2x16,Zn=e.packUnorm2x16,Xn=e.parabola,Hn=e.parallaxDirection,Kn=e.parallaxUV,Yn=e.parameter,Jn=e.pass,Qn=e.passTexture,$n=e.pcurve,el=e.perspectiveDepthToViewZ,tl=e.pmremTexture,rl=e.pointShadow,al=e.pointUV,ol=e.pointWidth,il=e.positionGeometry,nl=e.positionLocal,ll=e.positionPrevious,sl=e.positionView,cl=e.positionViewDirection,ml=e.positionWorld,ul=e.positionWorldDirection,pl=e.posterize,dl=e.pow,gl=e.pow2,xl=e.pow3,hl=e.pow4,bl=e.premultiplyAlpha,fl=e.property,vl=e.radians,wl=e.rand,_l=e.range,Sl=e.rangeFog,Tl=e.rangeFogFactor,yl=e.reciprocal,Vl=e.reference,Ml=e.referenceBuffer,Fl=e.reflect,Dl=e.reflectVector,Il=e.reflectView,Bl=e.reflector,Cl=e.refract,Pl=e.refractVector,Al=e.refractView,Nl=e.reinhardToneMapping,Rl=e.remap,kl=e.remapClamp,Ol=e.renderGroup,Ll=e.renderOutput,Ul=e.rendererReference,Gl=e.replaceDefaultUV,jl=e.rotate,El=e.rotateUV,Wl=e.roughness,ql=e.round,zl=e.rtt,Zl=e.sRGBTransferEOTF,Xl=e.sRGBTransferOETF,Hl=e.sample,Kl=e.sampler,Yl=e.samplerComparison,Jl=e.saturate,Ql=e.saturation,$l=e.screen,es=e.screenCoordinate,ts=e.screenDPR,rs=e.screenSize,as=e.screenUV,os=e.scriptable,is=e.scriptableValue,ns=e.select,ls=e.setCurrentStack,ss=e.setName,cs=e.shaderStages,ms=e.shadow,us=e.shadowPositionWorld,ps=e.shapeCircle,ds=e.sharedUniformGroup,gs=e.sheen,xs=e.sheenRoughness,hs=e.shiftLeft,bs=e.shiftRight,fs=e.shininess,vs=e.sign,ws=e.sin,_s=e.sinc,Ss=e.skinning,Ts=e.smoothstep,ys=e.smoothstepElement,Vs=e.specularColor,Ms=e.specularF90,Fs=e.spherizeUV,Ds=e.split,Is=e.spritesheetUV,Bs=e.sqrt,Cs=e.stack,Ps=e.step,As=e.stepElement,Ns=e.storage,Rs=e.storageBarrier,ks=e.storageObject,Os=e.storageTexture,Ls=e.string,Us=e.struct,Gs=e.sub,js=e.subgroupAdd,Es=e.subgroupAll,Ws=e.subgroupAnd,qs=e.subgroupAny,zs=e.subgroupBallot,Zs=e.subgroupBroadcast,Xs=e.subgroupBroadcastFirst,Hs=e.subBuild,Ks=e.subgroupElect,Ys=e.subgroupExclusiveAdd,Js=e.subgroupExclusiveMul,Qs=e.subgroupInclusiveAdd,$s=e.subgroupInclusiveMul,ec=e.subgroupIndex,tc=e.subgroupMax,rc=e.subgroupMin,ac=e.subgroupMul,oc=e.subgroupOr,ic=e.subgroupShuffle,nc=e.subgroupShuffleDown,lc=e.subgroupShuffleUp,sc=e.subgroupShuffleXor,cc=e.subgroupSize,mc=e.subgroupXor,uc=e.tan,pc=e.tangentGeometry,dc=e.tangentLocal,gc=e.tangentView,xc=e.tangentWorld,hc=e.texture,bc=e.texture3D,fc=e.textureBarrier,vc=e.textureBicubic,wc=e.textureBicubicLevel,_c=e.textureCubeUV,Sc=e.textureLoad,Tc=e.textureSize,yc=e.textureLevel,Vc=e.textureStore,Mc=e.thickness,Fc=e.time,Dc=e.toneMapping,Ic=e.toneMappingExposure,Bc=e.toonOutlinePass,Cc=e.transformDirection,Pc=e.transformNormal,Ac=e.transformNormalToView,Nc=e.transformedClearcoatNormalView,Rc=e.transformedNormalView,kc=e.transformedNormalWorld,Oc=e.transmission,Lc=e.transpose,Uc=e.triNoise3D,Gc=e.triplanarTexture,jc=e.triplanarTextures,Ec=e.trunc,Wc=e.uint,qc=e.uintBitsToFloat,zc=e.uniform,Zc=e.uniformArray,Xc=e.uniformCubeTexture,Hc=e.uniformGroup,Kc=e.uniformFlow,Yc=e.uniformTexture,Jc=e.unpackHalf2x16,Qc=e.unpackSnorm2x16,$c=e.unpackUnorm2x16,em=e.unpremultiplyAlpha,tm=e.userData,rm=e.uv,am=e.uvec2,om=e.uvec3,im=e.uvec4,nm=e.varying,lm=e.varyingProperty,sm=e.vec2,cm=e.vec3,mm=e.vec4,um=e.vectorComponents,pm=e.velocity,dm=e.vertexColor,gm=e.vertexIndex,xm=e.vertexStage,hm=e.vibrance,bm=e.viewZToLogarithmicDepth,fm=e.viewZToOrthographicDepth,vm=e.viewZToPerspectiveDepth,wm=e.viewport,_m=e.viewportCoordinate,Sm=e.viewportDepthTexture,Tm=e.viewportLinearDepth,ym=e.viewportMipTexture,Vm=e.viewportResolution,Mm=e.viewportSafeUV,Fm=e.viewportSharedTexture,Dm=e.viewportSize,Im=e.viewportTexture,Bm=e.viewportUV,Cm=e.wgsl,Pm=e.wgslFn,Am=e.workgroupArray,Nm=e.workgroupBarrier,Rm=e.workgroupId,km=e.workingToColorSpace,Om=e.xor;export{t as BRDF_GGX,r as BRDF_Lambert,a as BasicPointShadowFilter,o as BasicShadowFilter,i as Break,n as Const,l as Continue,s as DFGLUT,c as D_GGX,m as Discard,u as EPSILON,p as F_Schlick,d as Fn,M as HALF_PI,g as INFINITY,x as If,h as Loop,b as NodeAccess,f as NodeShaderStage,v as NodeType,w as NodeUpdateType,Bn as OnBeforeMaterialUpdate,In as OnBeforeObjectUpdate,Pn as OnMaterialUpdate,Cn as OnObjectUpdate,_ as PCFShadowFilter,S as PCFSoftShadowFilter,T as PI,y as PI2,F as PointShadowFilter,D as Return,I as Schlick_to_F0,B as ScriptableNodeResources,C as ShaderNode,P as Stack,A as Switch,N as TBNViewMatrix,V as TWO_PI,R as VSMShadowFilter,k as V_GGX_SmithCorrelated,O as Var,L as VarIntent,U as abs,G as acesFilmicToneMapping,j as acos,E as add,W as addMethodChaining,q as addNodeElement,z as agxToneMapping,Z as all,X as alphaT,H as and,K as anisotropy,Y as anisotropyB,J as anisotropyT,Q as any,$ as append,ee as array,te as arrayBuffer,re as asin,ae as assign,oe as atan,ie as atan2,ne as atomicAdd,le as atomicAnd,se as atomicFunc,ce as atomicLoad,me as atomicMax,ue as atomicMin,pe as atomicOr,de as atomicStore,ge as atomicSub,xe as atomicXor,he as attenuationColor,be as attenuationDistance,fe as attribute,ve as attributeArray,we as backgroundBlurriness,_e as backgroundIntensity,Se as backgroundRotation,Te as batch,ye as bentNormalView,Ve as billboarding,Me as bitAnd,Fe as bitNot,De as bitOr,Ie as bitXor,Be as bitangentGeometry,Ce as bitangentLocal,Pe as bitangentView,Ae as bitangentWorld,Ne as bitcast,Re as blendBurn,ke as blendColor,Oe as blendDodge,Le as blendOverlay,Ue as blendScreen,Ge as blur,je as bool,Ee as buffer,We as bufferAttribute,Ze as builtin,Xe as builtinAOContext,He as builtinShadowContext,qe as bumpMap,ze as burn,Ke as bvec2,Ye as bvec3,Je as bvec4,Qe as bypass,$e as cache,et as call,tt as cameraFar,rt as cameraIndex,at as cameraNear,ot as cameraNormalMatrix,it as cameraPosition,nt as cameraProjectionMatrix,lt as cameraProjectionMatrixInverse,st as cameraViewMatrix,ct as cameraViewport,mt as cameraWorldMatrix,ut as cbrt,pt as cdl,dt as ceil,gt as checker,xt as cineonToneMapping,ht as clamp,bt as clearcoat,ft as clearcoatNormalView,vt as clearcoatRoughness,wt as code,_t as color,St as colorSpaceToWorking,Tt as colorToDirection,yt as compute,Vt as computeKernel,Mt as computeSkinning,Ft as context,Dt as convert,It as convertColorSpace,Bt as convertToTexture,Nt as cos,Ct as countLeadingZeros,Pt as countOneBits,At as countTrailingZeros,Rt as cross,kt as cubeTexture,Ot as cubeTextureBase,Lt as dFdx,Ut as dFdy,Gt as dashSize,jt as debug,Et as decrement,Wt as decrementBefore,qt as defaultBuildStages,zt as defaultShaderStages,Zt as defined,Xt as degrees,Ht as deltaTime,Kt as densityFog,Yt as densityFogFactor,Jt as depth,Qt as depthPass,$t as determinant,er as difference,tr as diffuseColor,rr as directPointLight,ar as directionToColor,or as directionToFaceDirection,ir as dispersion,nr as distance,lr as div,sr as dodge,cr as dot,mr as drawIndex,ur as dynamicBufferAttribute,pr as element,dr as emissive,gr as equal,xr as equals,hr as equirectUV,br as exp,fr as exp2,vr as expression,wr as faceDirection,_r as faceForward,Sr as faceforward,Tr as float,yr as floatBitsToInt,Vr as floatBitsToUint,Mr as floor,Fr as fog,Dr as fract,Ir as frameGroup,Br as frameId,Cr as frontFacing,Pr as fwidth,Ar as gain,Nr as gapSize,Rr as getConstNodeType,kr as getCurrentStack,Or as getDirection,Lr as getDistanceAttenuation,Ur as getGeometryRoughness,Gr as getNormalFromDepth,Wr as getParallaxCorrectNormal,qr as getRoughness,zr as getScreenPosition,Zr as getShIrradianceAt,Xr as getShadowMaterial,Hr as getShadowRenderObjectFunction,Kr as getTextureIndex,Yr as getViewPosition,Jr as globalId,Qr as glsl,$r as glslFn,ea as grayscale,ta as greaterThan,ra as greaterThanEqual,aa as hash,oa as highpModelNormalViewMatrix,ia as highpModelViewMatrix,na as hue,la as increment,sa as incrementBefore,ca as instance,ma as instanceIndex,ua as instancedArray,pa as instancedBufferAttribute,da as instancedDynamicBufferAttribute,ga as instancedMesh,xa as int,ha as intBitsToFloat,jr as interleavedGradientNoise,ba as inverse,fa as inverseSqrt,va as inversesqrt,wa as invocationLocalIndex,_a as invocationSubgroupIndex,Sa as ior,Ta as iridescence,ya as iridescenceIOR,Va as iridescenceThickness,Ma as ivec2,Fa as ivec3,Da as ivec4,Ia as js,Ba as label,Ca as length,Pa as lengthSq,Aa as lessThan,Na as lessThanEqual,Ra as lightPosition,ka as lightProjectionUV,Oa as lightShadowMatrix,La as lightTargetDirection,Ua as lightTargetPosition,Ga as lightViewPosition,ja as lightingContext,Ea as lights,Wa as linearDepth,qa as linearToneMapping,za as localId,Za as log,Xa as log2,Ha as logarithmicDepthToViewZ,Ka as luminance,Ya as mat2,Ja as mat3,Qa as mat4,$a as matcapUV,eo as materialAO,to as materialAlphaTest,ro as materialAnisotropy,ao as materialAnisotropyVector,oo as materialAttenuationColor,io as materialAttenuationDistance,no as materialClearcoat,lo as materialClearcoatNormal,so as materialClearcoatRoughness,co as materialColor,mo as materialDispersion,uo as materialEmissive,po as materialEnvIntensity,go as materialEnvRotation,xo as materialIOR,ho as materialIridescence,bo as materialIridescenceIOR,fo as materialIridescenceThickness,vo as materialLightMap,wo as materialLineDashOffset,_o as materialLineDashSize,So as materialLineGapSize,To as materialLineScale,yo as materialLineWidth,Vo as materialMetalness,Mo as materialNormal,Fo as materialOpacity,Do as materialPointSize,Io as materialReference,Bo as materialReflectivity,Co as materialRefractionRatio,Po as materialRotation,Ao as materialRoughness,No as materialSheen,Ro as materialSheenRoughness,ko as materialShininess,Oo as materialSpecular,Lo as materialSpecularColor,Uo as materialSpecularIntensity,Go as materialSpecularStrength,jo as materialThickness,Eo as materialTransmission,Wo as max,qo as maxMipLevel,zo as mediumpModelViewMatrix,Zo as metalness,Xo as min,Ho as mix,Ko as mixElement,Yo as mod,Jo as modInt,Qo as modelDirection,$o as modelNormalMatrix,ei as modelPosition,ti as modelRadius,ri as modelScale,ai as modelViewMatrix,oi as modelViewPosition,ii as modelViewProjection,ni as modelWorldMatrix,li as modelWorldMatrixInverse,si as morphReference,ci as mrt,mi as mul,ui as mx_aastep,pi as mx_add,di as mx_atan2,gi as mx_cell_noise_float,xi as mx_contrast,hi as mx_divide,bi as mx_fractal_noise_float,fi as mx_fractal_noise_vec2,vi as mx_fractal_noise_vec3,wi as mx_fractal_noise_vec4,_i as mx_frame,Si as mx_heighttonormal,Ti as mx_hsvtorgb,yi as mx_ifequal,Vi as mx_ifgreater,Mi as mx_ifgreatereq,Fi as mx_invert,Di as mx_modulo,Ii as mx_multiply,Bi as mx_noise_float,Ci as mx_noise_vec3,Pi as mx_noise_vec4,Ai as mx_place2d,Ni as mx_power,Ri as mx_ramp4,ki as mx_ramplr,Oi as mx_ramptb,Li as mx_rgbtohsv,Ui as mx_rotate2d,Gi as mx_rotate3d,ji as mx_safepower,Ei as mx_separate,Wi as mx_splitlr,qi as mx_splittb,zi as mx_srgb_texture_to_lin_rec709,Zi as mx_subtract,Xi as mx_timer,Hi as mx_transform_uv,Ki as mx_unifiednoise2d,Yi as mx_unifiednoise3d,Ji as mx_worley_noise_float,Qi as mx_worley_noise_vec2,$i as mx_worley_noise_vec3,en as negate,tn as neutralToneMapping,rn as nodeArray,an as nodeImmutable,on as nodeObject,nn as nodeObjectIntent,ln as nodeObjects,sn as nodeProxy,cn as nodeProxyIntent,mn as normalFlat,un as normalGeometry,pn as normalLocal,dn as normalMap,gn as normalView,xn as normalViewGeometry,hn as normalWorld,bn as normalWorldGeometry,fn as normalize,vn as not,wn as notEqual,_n as numWorkgroups,Sn as objectDirection,Tn as objectGroup,yn as objectPosition,Vn as objectRadius,Mn as objectScale,Fn as objectViewPosition,Dn as objectWorldMatrix,An as oneMinus,Nn as or,Rn as orthographicDepthToViewZ,kn as oscSawtooth,On as oscSine,Ln as oscSquare,Un as oscTriangle,Gn as output,jn as outputStruct,En as overlay,Wn as overloadingFn,qn as packHalf2x16,zn as packSnorm2x16,Zn as packUnorm2x16,Xn as parabola,Hn as parallaxDirection,Kn as parallaxUV,Yn as parameter,Jn as pass,Qn as passTexture,$n as pcurve,el as perspectiveDepthToViewZ,tl as pmremTexture,rl as pointShadow,al as pointUV,ol as pointWidth,il as positionGeometry,nl as positionLocal,ll as positionPrevious,sl as positionView,cl as positionViewDirection,ml as positionWorld,ul as positionWorldDirection,pl as posterize,dl as pow,gl as pow2,xl as pow3,hl as pow4,bl as premultiplyAlpha,fl as property,vl as radians,wl as rand,_l as range,Sl as rangeFog,Tl as rangeFogFactor,yl as reciprocal,Vl as reference,Ml as referenceBuffer,Fl as reflect,Dl as reflectVector,Il as reflectView,Bl as reflector,Cl as refract,Pl as refractVector,Al as refractView,Nl as reinhardToneMapping,Rl as remap,kl as remapClamp,Ol as renderGroup,Ll as renderOutput,Ul as rendererReference,Gl as replaceDefaultUV,jl as rotate,El as rotateUV,Wl as roughness,ql as round,zl as rtt,Zl as sRGBTransferEOTF,Xl as sRGBTransferOETF,Hl as sample,Kl as sampler,Yl as samplerComparison,Jl as saturate,Ql as saturation,$l as screen,es as screenCoordinate,ts as screenDPR,rs as screenSize,as as screenUV,os as scriptable,is as scriptableValue,ns as select,ls as setCurrentStack,ss as setName,cs as shaderStages,ms as shadow,us as shadowPositionWorld,ps as shapeCircle,ds as sharedUniformGroup,gs as sheen,xs as sheenRoughness,hs as shiftLeft,bs as shiftRight,fs as shininess,vs as sign,ws as sin,_s as sinc,Ss as skinning,Ts as smoothstep,ys as smoothstepElement,Vs as specularColor,Ms as specularF90,Fs as spherizeUV,Ds as split,Is as spritesheetUV,Bs as sqrt,Cs as stack,Ps as step,As as stepElement,Ns as storage,Rs as storageBarrier,ks as storageObject,Os as storageTexture,Ls as string,Us as struct,Gs as sub,Hs as subBuild,js as subgroupAdd,Es as subgroupAll,Ws as subgroupAnd,qs as subgroupAny,zs as subgroupBallot,Zs as subgroupBroadcast,Xs as subgroupBroadcastFirst,Ks as subgroupElect,Ys as subgroupExclusiveAdd,Js as subgroupExclusiveMul,Qs as subgroupInclusiveAdd,$s as subgroupInclusiveMul,ec as subgroupIndex,tc as subgroupMax,rc as subgroupMin,ac as subgroupMul,oc as subgroupOr,ic as subgroupShuffle,nc as subgroupShuffleDown,lc as subgroupShuffleUp,sc as subgroupShuffleXor,cc as subgroupSize,mc as subgroupXor,uc as tan,pc as tangentGeometry,dc as tangentLocal,gc as tangentView,xc as tangentWorld,hc as texture,bc as texture3D,fc as textureBarrier,vc as textureBicubic,wc as textureBicubicLevel,_c as textureCubeUV,yc as textureLevel,Sc as textureLoad,Tc as textureSize,Vc as textureStore,Mc as thickness,Fc as time,Dc as toneMapping,Ic as toneMappingExposure,Bc as toonOutlinePass,Cc as transformDirection,Pc as transformNormal,Ac as transformNormalToView,Nc as transformedClearcoatNormalView,Rc as transformedNormalView,kc as transformedNormalWorld,Oc as transmission,Lc as transpose,Uc as triNoise3D,Gc as triplanarTexture,jc as triplanarTextures,Ec as trunc,Wc as uint,qc as uintBitsToFloat,zc as uniform,Zc as uniformArray,Xc as uniformCubeTexture,Kc as uniformFlow,Hc as uniformGroup,Yc as uniformTexture,Jc as unpackHalf2x16,Qc as unpackSnorm2x16,$c as unpackUnorm2x16,em as unpremultiplyAlpha,tm as userData,rm as uv,am as uvec2,om as uvec3,im as uvec4,nm as varying,lm as varyingProperty,sm as vec2,cm as vec3,mm as vec4,um as vectorComponents,pm as velocity,dm as vertexColor,gm as vertexIndex,xm as vertexStage,hm as vibrance,bm as viewZToLogarithmicDepth,fm as viewZToOrthographicDepth,vm as viewZToPerspectiveDepth,wm as viewport,_m as viewportCoordinate,Sm as viewportDepthTexture,Tm as viewportLinearDepth,ym as viewportMipTexture,Vm as viewportResolution,Mm as viewportSafeUV,Fm as viewportSharedTexture,Dm as viewportSize,Im as viewportTexture,Bm as viewportUV,Er as vogelDiskSample,Cm as wgsl,Pm as wgslFn,Am as workgroupArray,Nm as workgroupBarrier,Rm as workgroupId,km as workingToColorSpace,Om as xor};
import{TSL as e}from"three/webgpu";const t=e.BRDF_GGX,r=e.BRDF_Lambert,a=e.BasicPointShadowFilter,o=e.BasicShadowFilter,i=e.Break,n=e.Const,l=e.Continue,s=e.DFGLUT,c=e.D_GGX,m=e.Discard,u=e.EPSILON,p=e.F_Schlick,d=e.Fn,g=e.INFINITY,x=e.If,h=e.Loop,f=e.NodeAccess,b=e.NodeShaderStage,v=e.NodeType,w=e.NodeUpdateType,_=e.PCFShadowFilter,S=e.PCFSoftShadowFilter,T=e.PI,y=e.PI2,V=e.TWO_PI,M=e.HALF_PI,F=e.PointShadowFilter,D=e.Return,I=e.Schlick_to_F0,B=e.ShaderNode,C=e.Stack,P=e.Switch,R=e.TBNViewMatrix,A=e.VSMShadowFilter,O=e.V_GGX_SmithCorrelated,k=e.Var,N=e.VarIntent,L=e.abs,U=e.acesFilmicToneMapping,G=e.acos,j=e.add,E=e.addMethodChaining,W=e.addNodeElement,q=e.agxToneMapping,z=e.all,Z=e.alphaT,X=e.and,H=e.anisotropy,K=e.anisotropyB,Y=e.anisotropyT,J=e.any,Q=e.append,$=e.array,ee=e.arrayBuffer,te=e.asin,re=e.assign,ae=e.atan,oe=e.atomicAdd,ie=e.atomicAnd,ne=e.atomicFunc,le=e.atomicLoad,se=e.atomicMax,ce=e.atomicMin,me=e.atomicOr,ue=e.atomicStore,pe=e.atomicSub,de=e.atomicXor,ge=e.attenuationColor,xe=e.attenuationDistance,he=e.attribute,fe=e.attributeArray,be=e.backgroundBlurriness,ve=e.backgroundIntensity,we=e.backgroundRotation,_e=e.batch,Se=e.bentNormalView,Te=e.billboarding,ye=e.bitAnd,Ve=e.bitNot,Me=e.bitOr,Fe=e.bitXor,De=e.bitangentGeometry,Ie=e.bitangentLocal,Be=e.bitangentView,Ce=e.bitangentWorld,Pe=e.bitcast,Re=e.blendBurn,Ae=e.blendColor,Oe=e.blendDodge,ke=e.blendOverlay,Ne=e.blendScreen,Le=e.blur,Ue=e.bool,Ge=e.buffer,je=e.bufferAttribute,Ee=e.bumpMap,We=e.builtin,qe=e.builtinAOContext,ze=e.builtinShadowContext,Ze=e.bvec2,Xe=e.bvec3,He=e.bvec4,Ke=e.bypass,Ye=e.cache,Je=e.call,Qe=e.cameraFar,$e=e.cameraIndex,et=e.cameraNear,tt=e.cameraNormalMatrix,rt=e.cameraPosition,at=e.cameraProjectionMatrix,ot=e.cameraProjectionMatrixInverse,it=e.cameraViewMatrix,nt=e.cameraViewport,lt=e.cameraWorldMatrix,st=e.cbrt,ct=e.cdl,mt=e.ceil,ut=e.checker,pt=e.cineonToneMapping,dt=e.clamp,gt=e.clearcoat,xt=e.clearcoatNormalView,ht=e.clearcoatRoughness,ft=e.clipSpace,bt=e.code,vt=e.color,wt=e.colorSpaceToWorking,_t=e.colorToDirection,St=e.compute,Tt=e.computeKernel,yt=e.computeSkinning,Vt=e.context,Mt=e.convert,Ft=e.convertColorSpace,Dt=e.convertToTexture,It=e.countLeadingZeros,Bt=e.countOneBits,Ct=e.countTrailingZeros,Pt=e.cos,Rt=e.cross,At=e.cubeTexture,Ot=e.cubeTextureBase,kt=e.dFdx,Nt=e.dFdy,Lt=e.dashSize,Ut=e.debug,Gt=e.decrement,jt=e.decrementBefore,Et=e.defaultBuildStages,Wt=e.defaultShaderStages,qt=e.defined,zt=e.degrees,Zt=e.deltaTime,Xt=e.densityFog,Ht=e.densityFogFactor,Kt=e.depth,Yt=e.depthPass,Jt=e.determinant,Qt=e.difference,$t=e.diffuseColor,er=e.directPointLight,tr=e.directionToColor,rr=e.directionToFaceDirection,ar=e.dispersion,or=e.distance,ir=e.div,nr=e.dot,lr=e.drawIndex,sr=e.dynamicBufferAttribute,cr=e.element,mr=e.emissive,ur=e.equal,pr=e.equirectUV,dr=e.exp,gr=e.exp2,xr=e.exponentialHeightFogFactor,hr=e.expression,fr=e.faceDirection,br=e.faceForward,vr=e.faceforward,wr=e.float,_r=e.floatBitsToInt,Sr=e.floatBitsToUint,Tr=e.floor,yr=e.fog,Vr=e.fract,Mr=e.frameGroup,Fr=e.frameId,Dr=e.frontFacing,Ir=e.fwidth,Br=e.gain,Cr=e.gapSize,Pr=e.getConstNodeType,Rr=e.getCurrentStack,Ar=e.getDirection,Or=e.getDistanceAttenuation,kr=e.getGeometryRoughness,Nr=e.getNormalFromDepth,Lr=e.interleavedGradientNoise,Ur=e.vogelDiskSample,Gr=e.getParallaxCorrectNormal,jr=e.getRoughness,Er=e.getScreenPosition,Wr=e.getShIrradianceAt,qr=e.getShadowMaterial,zr=e.getShadowRenderObjectFunction,Zr=e.getTextureIndex,Xr=e.getViewPosition,Hr=e.globalId,Kr=e.glsl,Yr=e.glslFn,Jr=e.grayscale,Qr=e.greaterThan,$r=e.greaterThanEqual,ea=e.hash,ta=e.highpModelNormalViewMatrix,ra=e.highpModelViewMatrix,aa=e.hue,oa=e.increment,ia=e.incrementBefore,na=e.instance,la=e.instanceIndex,sa=e.instancedArray,ca=e.instancedBufferAttribute,ma=e.instancedDynamicBufferAttribute,ua=e.instancedMesh,pa=e.int,da=e.intBitsToFloat,ga=e.inverse,xa=e.inverseSqrt,ha=e.inversesqrt,fa=e.invocationLocalIndex,ba=e.invocationSubgroupIndex,va=e.ior,wa=e.iridescence,_a=e.iridescenceIOR,Sa=e.iridescenceThickness,Ta=e.ivec2,ya=e.ivec3,Va=e.ivec4,Ma=e.js,Fa=e.label,Da=e.length,Ia=e.lengthSq,Ba=e.lessThan,Ca=e.lessThanEqual,Pa=e.lightPosition,Ra=e.lightProjectionUV,Aa=e.lightShadowMatrix,Oa=e.lightTargetDirection,ka=e.lightTargetPosition,Na=e.lightViewPosition,La=e.lightingContext,Ua=e.lights,Ga=e.linearDepth,ja=e.linearToneMapping,Ea=e.localId,Wa=e.log,qa=e.log2,za=e.logarithmicDepthToViewZ,Za=e.luminance,Xa=e.mat2,Ha=e.mat3,Ka=e.mat4,Ya=e.matcapUV,Ja=e.materialAO,Qa=e.materialAlphaTest,$a=e.materialAnisotropy,eo=e.materialAnisotropyVector,to=e.materialAttenuationColor,ro=e.materialAttenuationDistance,ao=e.materialClearcoat,oo=e.materialClearcoatNormal,io=e.materialClearcoatRoughness,no=e.materialColor,lo=e.materialDispersion,so=e.materialEmissive,co=e.materialEnvIntensity,mo=e.materialEnvRotation,uo=e.materialIOR,po=e.materialIridescence,go=e.materialIridescenceIOR,xo=e.materialIridescenceThickness,ho=e.materialLightMap,fo=e.materialLineDashOffset,bo=e.materialLineDashSize,vo=e.materialLineGapSize,wo=e.materialLineScale,_o=e.materialLineWidth,So=e.materialMetalness,To=e.materialNormal,yo=e.materialOpacity,Vo=e.materialPointSize,Mo=e.materialReference,Fo=e.materialReflectivity,Do=e.materialRefractionRatio,Io=e.materialRotation,Bo=e.materialRoughness,Co=e.materialSheen,Po=e.materialSheenRoughness,Ro=e.materialShininess,Ao=e.materialSpecular,Oo=e.materialSpecularColor,ko=e.materialSpecularIntensity,No=e.materialSpecularStrength,Lo=e.materialThickness,Uo=e.materialTransmission,Go=e.max,jo=e.maxMipLevel,Eo=e.mediumpModelViewMatrix,Wo=e.metalness,qo=e.min,zo=e.mix,Zo=e.mixElement,Xo=e.mod,Ho=e.modInt,Ko=e.modelDirection,Yo=e.modelNormalMatrix,Jo=e.modelPosition,Qo=e.modelRadius,$o=e.modelScale,ei=e.modelViewMatrix,ti=e.modelViewPosition,ri=e.modelViewProjection,ai=e.modelWorldMatrix,oi=e.modelWorldMatrixInverse,ii=e.morphReference,ni=e.mrt,li=e.mul,si=e.mx_aastep,ci=e.mx_add,mi=e.mx_atan2,ui=e.mx_cell_noise_float,pi=e.mx_contrast,di=e.mx_divide,gi=e.mx_fractal_noise_float,xi=e.mx_fractal_noise_vec2,hi=e.mx_fractal_noise_vec3,fi=e.mx_fractal_noise_vec4,bi=e.mx_frame,vi=e.mx_heighttonormal,wi=e.mx_hsvtorgb,_i=e.mx_ifequal,Si=e.mx_ifgreater,Ti=e.mx_ifgreatereq,yi=e.mx_invert,Vi=e.mx_modulo,Mi=e.mx_multiply,Fi=e.mx_noise_float,Di=e.mx_noise_vec3,Ii=e.mx_noise_vec4,Bi=e.mx_place2d,Ci=e.mx_power,Pi=e.mx_ramp4,Ri=e.mx_ramplr,Ai=e.mx_ramptb,Oi=e.mx_rgbtohsv,ki=e.mx_rotate2d,Ni=e.mx_rotate3d,Li=e.mx_safepower,Ui=e.mx_separate,Gi=e.mx_splitlr,ji=e.mx_splittb,Ei=e.mx_srgb_texture_to_lin_rec709,Wi=e.mx_subtract,qi=e.mx_timer,zi=e.mx_transform_uv,Zi=e.mx_unifiednoise2d,Xi=e.mx_unifiednoise3d,Hi=e.mx_worley_noise_float,Ki=e.mx_worley_noise_vec2,Yi=e.mx_worley_noise_vec3,Ji=e.negate,Qi=e.neutralToneMapping,$i=e.nodeArray,en=e.nodeImmutable,tn=e.nodeObject,rn=e.nodeObjectIntent,an=e.nodeObjects,on=e.nodeProxy,nn=e.nodeProxyIntent,ln=e.normalFlat,sn=e.normalGeometry,cn=e.normalLocal,mn=e.normalMap,un=e.normalView,pn=e.normalViewGeometry,dn=e.normalWorld,gn=e.normalWorldGeometry,xn=e.normalize,hn=e.not,fn=e.notEqual,bn=e.numWorkgroups,vn=e.objectDirection,wn=e.objectGroup,_n=e.objectPosition,Sn=e.objectRadius,Tn=e.objectScale,yn=e.objectViewPosition,Vn=e.objectWorldMatrix,Mn=e.OnBeforeObjectUpdate,Fn=e.OnBeforeMaterialUpdate,Dn=e.OnObjectUpdate,In=e.OnMaterialUpdate,Bn=e.oneMinus,Cn=e.or,Pn=e.orthographicDepthToViewZ,Rn=e.oscSawtooth,An=e.oscSine,On=e.oscSquare,kn=e.oscTriangle,Nn=e.output,Ln=e.outputStruct,Un=e.overloadingFn,Gn=e.packHalf2x16,jn=e.packSnorm2x16,En=e.packUnorm2x16,Wn=e.parabola,qn=e.parallaxDirection,zn=e.parallaxUV,Zn=e.parameter,Xn=e.pass,Hn=e.passTexture,Kn=e.pcurve,Yn=e.perspectiveDepthToViewZ,Jn=e.pmremTexture,Qn=e.pointShadow,$n=e.pointUV,el=e.pointWidth,tl=e.positionGeometry,rl=e.positionLocal,al=e.positionPrevious,ol=e.positionView,il=e.positionViewDirection,nl=e.positionWorld,ll=e.positionWorldDirection,sl=e.posterize,cl=e.pow,ml=e.pow2,ul=e.pow3,pl=e.pow4,dl=e.premultiplyAlpha,gl=e.property,xl=e.radians,hl=e.rand,fl=e.range,bl=e.rangeFog,vl=e.rangeFogFactor,wl=e.reciprocal,_l=e.reference,Sl=e.referenceBuffer,Tl=e.reflect,yl=e.reflectVector,Vl=e.reflectView,Ml=e.reflector,Fl=e.refract,Dl=e.refractVector,Il=e.refractView,Bl=e.reinhardToneMapping,Cl=e.remap,Pl=e.remapClamp,Rl=e.renderGroup,Al=e.renderOutput,Ol=e.rendererReference,kl=e.replaceDefaultUV,Nl=e.rotate,Ll=e.rotateUV,Ul=e.roughness,Gl=e.round,jl=e.rtt,El=e.sRGBTransferEOTF,Wl=e.sRGBTransferOETF,ql=e.sample,zl=e.sampler,Zl=e.samplerComparison,Xl=e.saturate,Hl=e.saturation,Kl=e.screen,Yl=e.screenCoordinate,Jl=e.screenDPR,Ql=e.screenSize,$l=e.screenUV,es=e.select,ts=e.setCurrentStack,rs=e.setName,as=e.shaderStages,os=e.shadow,is=e.shadowPositionWorld,ns=e.shapeCircle,ls=e.sharedUniformGroup,ss=e.sheen,cs=e.sheenRoughness,ms=e.shiftLeft,us=e.shiftRight,ps=e.shininess,ds=e.sign,gs=e.sin,xs=e.sinc,hs=e.skinning,fs=e.smoothstep,bs=e.smoothstepElement,vs=e.specularColor,ws=e.specularF90,_s=e.spherizeUV,Ss=e.split,Ts=e.spritesheetUV,ys=e.sqrt,Vs=e.stack,Ms=e.step,Fs=e.stepElement,Ds=e.storage,Is=e.storageBarrier,Bs=e.storageTexture,Cs=e.string,Ps=e.struct,Rs=e.sub,As=e.subgroupAdd,Os=e.subgroupAll,ks=e.subgroupAnd,Ns=e.subgroupAny,Ls=e.subgroupBallot,Us=e.subgroupBroadcast,Gs=e.subgroupBroadcastFirst,js=e.subBuild,Es=e.subgroupElect,Ws=e.subgroupExclusiveAdd,qs=e.subgroupExclusiveMul,zs=e.subgroupInclusiveAdd,Zs=e.subgroupInclusiveMul,Xs=e.subgroupIndex,Hs=e.subgroupMax,Ks=e.subgroupMin,Ys=e.subgroupMul,Js=e.subgroupOr,Qs=e.subgroupShuffle,$s=e.subgroupShuffleDown,ec=e.subgroupShuffleUp,tc=e.subgroupShuffleXor,rc=e.subgroupSize,ac=e.subgroupXor,oc=e.tan,ic=e.tangentGeometry,nc=e.tangentLocal,lc=e.tangentView,sc=e.tangentWorld,cc=e.texture,mc=e.texture3D,uc=e.textureBarrier,pc=e.textureBicubic,dc=e.textureBicubicLevel,gc=e.textureCubeUV,xc=e.textureLoad,hc=e.textureSize,fc=e.textureLevel,bc=e.textureStore,vc=e.thickness,wc=e.time,_c=e.toneMapping,Sc=e.toneMappingExposure,Tc=e.toonOutlinePass,yc=e.transformDirection,Vc=e.transformNormal,Mc=e.transformNormalToView,Fc=e.transformedClearcoatNormalView,Dc=e.transformedNormalView,Ic=e.transformedNormalWorld,Bc=e.transmission,Cc=e.transpose,Pc=e.triNoise3D,Rc=e.triplanarTexture,Ac=e.triplanarTextures,Oc=e.trunc,kc=e.uint,Nc=e.uintBitsToFloat,Lc=e.uniform,Uc=e.uniformArray,Gc=e.uniformCubeTexture,jc=e.uniformGroup,Ec=e.uniformFlow,Wc=e.uniformTexture,qc=e.unpackHalf2x16,zc=e.unpackSnorm2x16,Zc=e.unpackUnorm2x16,Xc=e.unpremultiplyAlpha,Hc=e.userData,Kc=e.uv,Yc=e.uvec2,Jc=e.uvec3,Qc=e.uvec4,$c=e.varying,em=e.varyingProperty,tm=e.vec2,rm=e.vec3,am=e.vec4,om=e.vectorComponents,im=e.velocity,nm=e.vertexColor,lm=e.vertexIndex,sm=e.vertexStage,cm=e.vibrance,mm=e.viewZToLogarithmicDepth,um=e.viewZToOrthographicDepth,pm=e.viewZToPerspectiveDepth,dm=e.viewZToReversedOrthographicDepth,gm=e.viewZToReversedPerspectiveDepth,xm=e.viewport,hm=e.viewportCoordinate,fm=e.viewportDepthTexture,bm=e.viewportLinearDepth,vm=e.viewportMipTexture,wm=e.viewportOpaqueMipTexture,_m=e.viewportResolution,Sm=e.viewportSafeUV,Tm=e.viewportSharedTexture,ym=e.viewportSize,Vm=e.viewportTexture,Mm=e.viewportUV,Fm=e.wgsl,Dm=e.wgslFn,Im=e.workgroupArray,Bm=e.workgroupBarrier,Cm=e.workgroupId,Pm=e.workingToColorSpace,Rm=e.xor;export{t as BRDF_GGX,r as BRDF_Lambert,a as BasicPointShadowFilter,o as BasicShadowFilter,i as Break,n as Const,l as Continue,s as DFGLUT,c as D_GGX,m as Discard,u as EPSILON,p as F_Schlick,d as Fn,M as HALF_PI,g as INFINITY,x as If,h as Loop,f as NodeAccess,b as NodeShaderStage,v as NodeType,w as NodeUpdateType,Fn as OnBeforeMaterialUpdate,Mn as OnBeforeObjectUpdate,In as OnMaterialUpdate,Dn as OnObjectUpdate,_ as PCFShadowFilter,S as PCFSoftShadowFilter,T as PI,y as PI2,F as PointShadowFilter,D as Return,I as Schlick_to_F0,B as ShaderNode,C as Stack,P as Switch,R as TBNViewMatrix,V as TWO_PI,A as VSMShadowFilter,O as V_GGX_SmithCorrelated,k as Var,N as VarIntent,L as abs,U as acesFilmicToneMapping,G as acos,j as add,E as addMethodChaining,W as addNodeElement,q as agxToneMapping,z as all,Z as alphaT,X as and,H as anisotropy,K as anisotropyB,Y as anisotropyT,J as any,Q as append,$ as array,ee as arrayBuffer,te as asin,re as assign,ae as atan,oe as atomicAdd,ie as atomicAnd,ne as atomicFunc,le as atomicLoad,se as atomicMax,ce as atomicMin,me as atomicOr,ue as atomicStore,pe as atomicSub,de as atomicXor,ge as attenuationColor,xe as attenuationDistance,he as attribute,fe as attributeArray,be as backgroundBlurriness,ve as backgroundIntensity,we as backgroundRotation,_e as batch,Se as bentNormalView,Te as billboarding,ye as bitAnd,Ve as bitNot,Me as bitOr,Fe as bitXor,De as bitangentGeometry,Ie as bitangentLocal,Be as bitangentView,Ce as bitangentWorld,Pe as bitcast,Re as blendBurn,Ae as blendColor,Oe as blendDodge,ke as blendOverlay,Ne as blendScreen,Le as blur,Ue as bool,Ge as buffer,je as bufferAttribute,We as builtin,qe as builtinAOContext,ze as builtinShadowContext,Ee as bumpMap,Ze as bvec2,Xe as bvec3,He as bvec4,Ke as bypass,Ye as cache,Je as call,Qe as cameraFar,$e as cameraIndex,et as cameraNear,tt as cameraNormalMatrix,rt as cameraPosition,at as cameraProjectionMatrix,ot as cameraProjectionMatrixInverse,it as cameraViewMatrix,nt as cameraViewport,lt as cameraWorldMatrix,st as cbrt,ct as cdl,mt as ceil,ut as checker,pt as cineonToneMapping,dt as clamp,gt as clearcoat,xt as clearcoatNormalView,ht as clearcoatRoughness,ft as clipSpace,bt as code,vt as color,wt as colorSpaceToWorking,_t as colorToDirection,St as compute,Tt as computeKernel,yt as computeSkinning,Vt as context,Mt as convert,Ft as convertColorSpace,Dt as convertToTexture,Pt as cos,It as countLeadingZeros,Bt as countOneBits,Ct as countTrailingZeros,Rt as cross,At as cubeTexture,Ot as cubeTextureBase,kt as dFdx,Nt as dFdy,Lt as dashSize,Ut as debug,Gt as decrement,jt as decrementBefore,Et as defaultBuildStages,Wt as defaultShaderStages,qt as defined,zt as degrees,Zt as deltaTime,Xt as densityFog,Ht as densityFogFactor,Kt as depth,Yt as depthPass,Jt as determinant,Qt as difference,$t as diffuseColor,er as directPointLight,tr as directionToColor,rr as directionToFaceDirection,ar as dispersion,or as distance,ir as div,nr as dot,lr as drawIndex,sr as dynamicBufferAttribute,cr as element,mr as emissive,ur as equal,pr as equirectUV,dr as exp,gr as exp2,xr as exponentialHeightFogFactor,hr as expression,fr as faceDirection,br as faceForward,vr as faceforward,wr as float,_r as floatBitsToInt,Sr as floatBitsToUint,Tr as floor,yr as fog,Vr as fract,Mr as frameGroup,Fr as frameId,Dr as frontFacing,Ir as fwidth,Br as gain,Cr as gapSize,Pr as getConstNodeType,Rr as getCurrentStack,Ar as getDirection,Or as getDistanceAttenuation,kr as getGeometryRoughness,Nr as getNormalFromDepth,Gr as getParallaxCorrectNormal,jr as getRoughness,Er as getScreenPosition,Wr as getShIrradianceAt,qr as getShadowMaterial,zr as getShadowRenderObjectFunction,Zr as getTextureIndex,Xr as getViewPosition,Hr as globalId,Kr as glsl,Yr as glslFn,Jr as grayscale,Qr as greaterThan,$r as greaterThanEqual,ea as hash,ta as highpModelNormalViewMatrix,ra as highpModelViewMatrix,aa as hue,oa as increment,ia as incrementBefore,na as instance,la as instanceIndex,sa as instancedArray,ca as instancedBufferAttribute,ma as instancedDynamicBufferAttribute,ua as instancedMesh,pa as int,da as intBitsToFloat,Lr as interleavedGradientNoise,ga as inverse,xa as inverseSqrt,ha as inversesqrt,fa as invocationLocalIndex,ba as invocationSubgroupIndex,va as ior,wa as iridescence,_a as iridescenceIOR,Sa as iridescenceThickness,Ta as ivec2,ya as ivec3,Va as ivec4,Ma as js,Fa as label,Da as length,Ia as lengthSq,Ba as lessThan,Ca as lessThanEqual,Pa as lightPosition,Ra as lightProjectionUV,Aa as lightShadowMatrix,Oa as lightTargetDirection,ka as lightTargetPosition,Na as lightViewPosition,La as lightingContext,Ua as lights,Ga as linearDepth,ja as linearToneMapping,Ea as localId,Wa as log,qa as log2,za as logarithmicDepthToViewZ,Za as luminance,Xa as mat2,Ha as mat3,Ka as mat4,Ya as matcapUV,Ja as materialAO,Qa as materialAlphaTest,$a as materialAnisotropy,eo as materialAnisotropyVector,to as materialAttenuationColor,ro as materialAttenuationDistance,ao as materialClearcoat,oo as materialClearcoatNormal,io as materialClearcoatRoughness,no as materialColor,lo as materialDispersion,so as materialEmissive,co as materialEnvIntensity,mo as materialEnvRotation,uo as materialIOR,po as materialIridescence,go as materialIridescenceIOR,xo as materialIridescenceThickness,ho as materialLightMap,fo as materialLineDashOffset,bo as materialLineDashSize,vo as materialLineGapSize,wo as materialLineScale,_o as materialLineWidth,So as materialMetalness,To as materialNormal,yo as materialOpacity,Vo as materialPointSize,Mo as materialReference,Fo as materialReflectivity,Do as materialRefractionRatio,Io as materialRotation,Bo as materialRoughness,Co as materialSheen,Po as materialSheenRoughness,Ro as materialShininess,Ao as materialSpecular,Oo as materialSpecularColor,ko as materialSpecularIntensity,No as materialSpecularStrength,Lo as materialThickness,Uo as materialTransmission,Go as max,jo as maxMipLevel,Eo as mediumpModelViewMatrix,Wo as metalness,qo as min,zo as mix,Zo as mixElement,Xo as mod,Ho as modInt,Ko as modelDirection,Yo as modelNormalMatrix,Jo as modelPosition,Qo as modelRadius,$o as modelScale,ei as modelViewMatrix,ti as modelViewPosition,ri as modelViewProjection,ai as modelWorldMatrix,oi as modelWorldMatrixInverse,ii as morphReference,ni as mrt,li as mul,si as mx_aastep,ci as mx_add,mi as mx_atan2,ui as mx_cell_noise_float,pi as mx_contrast,di as mx_divide,gi as mx_fractal_noise_float,xi as mx_fractal_noise_vec2,hi as mx_fractal_noise_vec3,fi as mx_fractal_noise_vec4,bi as mx_frame,vi as mx_heighttonormal,wi as mx_hsvtorgb,_i as mx_ifequal,Si as mx_ifgreater,Ti as mx_ifgreatereq,yi as mx_invert,Vi as mx_modulo,Mi as mx_multiply,Fi as mx_noise_float,Di as mx_noise_vec3,Ii as mx_noise_vec4,Bi as mx_place2d,Ci as mx_power,Pi as mx_ramp4,Ri as mx_ramplr,Ai as mx_ramptb,Oi as mx_rgbtohsv,ki as mx_rotate2d,Ni as mx_rotate3d,Li as mx_safepower,Ui as mx_separate,Gi as mx_splitlr,ji as mx_splittb,Ei as mx_srgb_texture_to_lin_rec709,Wi as mx_subtract,qi as mx_timer,zi as mx_transform_uv,Zi as mx_unifiednoise2d,Xi as mx_unifiednoise3d,Hi as mx_worley_noise_float,Ki as mx_worley_noise_vec2,Yi as mx_worley_noise_vec3,Ji as negate,Qi as neutralToneMapping,$i as nodeArray,en as nodeImmutable,tn as nodeObject,rn as nodeObjectIntent,an as nodeObjects,on as nodeProxy,nn as nodeProxyIntent,ln as normalFlat,sn as normalGeometry,cn as normalLocal,mn as normalMap,un as normalView,pn as normalViewGeometry,dn as normalWorld,gn as normalWorldGeometry,xn as normalize,hn as not,fn as notEqual,bn as numWorkgroups,vn as objectDirection,wn as objectGroup,_n as objectPosition,Sn as objectRadius,Tn as objectScale,yn as objectViewPosition,Vn as objectWorldMatrix,Bn as oneMinus,Cn as or,Pn as orthographicDepthToViewZ,Rn as oscSawtooth,An as oscSine,On as oscSquare,kn as oscTriangle,Nn as output,Ln as outputStruct,Un as overloadingFn,Gn as packHalf2x16,jn as packSnorm2x16,En as packUnorm2x16,Wn as parabola,qn as parallaxDirection,zn as parallaxUV,Zn as parameter,Xn as pass,Hn as passTexture,Kn as pcurve,Yn as perspectiveDepthToViewZ,Jn as pmremTexture,Qn as pointShadow,$n as pointUV,el as pointWidth,tl as positionGeometry,rl as positionLocal,al as positionPrevious,ol as positionView,il as positionViewDirection,nl as positionWorld,ll as positionWorldDirection,sl as posterize,cl as pow,ml as pow2,ul as pow3,pl as pow4,dl as premultiplyAlpha,gl as property,xl as radians,hl as rand,fl as range,bl as rangeFog,vl as rangeFogFactor,wl as reciprocal,_l as reference,Sl as referenceBuffer,Tl as reflect,yl as reflectVector,Vl as reflectView,Ml as reflector,Fl as refract,Dl as refractVector,Il as refractView,Bl as reinhardToneMapping,Cl as remap,Pl as remapClamp,Rl as renderGroup,Al as renderOutput,Ol as rendererReference,kl as replaceDefaultUV,Nl as rotate,Ll as rotateUV,Ul as roughness,Gl as round,jl as rtt,El as sRGBTransferEOTF,Wl as sRGBTransferOETF,ql as sample,zl as sampler,Zl as samplerComparison,Xl as saturate,Hl as saturation,Kl as screen,Yl as screenCoordinate,Jl as screenDPR,Ql as screenSize,$l as screenUV,es as select,ts as setCurrentStack,rs as setName,as as shaderStages,os as shadow,is as shadowPositionWorld,ns as shapeCircle,ls as sharedUniformGroup,ss as sheen,cs as sheenRoughness,ms as shiftLeft,us as shiftRight,ps as shininess,ds as sign,gs as sin,xs as sinc,hs as skinning,fs as smoothstep,bs as smoothstepElement,vs as specularColor,ws as specularF90,_s as spherizeUV,Ss as split,Ts as spritesheetUV,ys as sqrt,Vs as stack,Ms as step,Fs as stepElement,Ds as storage,Is as storageBarrier,Bs as storageTexture,Cs as string,Ps as struct,Rs as sub,js as subBuild,As as subgroupAdd,Os as subgroupAll,ks as subgroupAnd,Ns as subgroupAny,Ls as subgroupBallot,Us as subgroupBroadcast,Gs as subgroupBroadcastFirst,Es as subgroupElect,Ws as subgroupExclusiveAdd,qs as subgroupExclusiveMul,zs as subgroupInclusiveAdd,Zs as subgroupInclusiveMul,Xs as subgroupIndex,Hs as subgroupMax,Ks as subgroupMin,Ys as subgroupMul,Js as subgroupOr,Qs as subgroupShuffle,$s as subgroupShuffleDown,ec as subgroupShuffleUp,tc as subgroupShuffleXor,rc as subgroupSize,ac as subgroupXor,oc as tan,ic as tangentGeometry,nc as tangentLocal,lc as tangentView,sc as tangentWorld,cc as texture,mc as texture3D,uc as textureBarrier,pc as textureBicubic,dc as textureBicubicLevel,gc as textureCubeUV,fc as textureLevel,xc as textureLoad,hc as textureSize,bc as textureStore,vc as thickness,wc as time,_c as toneMapping,Sc as toneMappingExposure,Tc as toonOutlinePass,yc as transformDirection,Vc as transformNormal,Mc as transformNormalToView,Fc as transformedClearcoatNormalView,Dc as transformedNormalView,Ic as transformedNormalWorld,Bc as transmission,Cc as transpose,Pc as triNoise3D,Rc as triplanarTexture,Ac as triplanarTextures,Oc as trunc,kc as uint,Nc as uintBitsToFloat,Lc as uniform,Uc as uniformArray,Gc as uniformCubeTexture,Ec as uniformFlow,jc as uniformGroup,Wc as uniformTexture,qc as unpackHalf2x16,zc as unpackSnorm2x16,Zc as unpackUnorm2x16,Xc as unpremultiplyAlpha,Hc as userData,Kc as uv,Yc as uvec2,Jc as uvec3,Qc as uvec4,$c as varying,em as varyingProperty,tm as vec2,rm as vec3,am as vec4,om as vectorComponents,im as velocity,nm as vertexColor,lm as vertexIndex,sm as vertexStage,cm as vibrance,mm as viewZToLogarithmicDepth,um as viewZToOrthographicDepth,pm as viewZToPerspectiveDepth,dm as viewZToReversedOrthographicDepth,gm as viewZToReversedPerspectiveDepth,xm as viewport,hm as viewportCoordinate,fm as viewportDepthTexture,bm as viewportLinearDepth,vm as viewportMipTexture,wm as viewportOpaqueMipTexture,_m as viewportResolution,Sm as viewportSafeUV,Tm as viewportSharedTexture,ym as viewportSize,Vm as viewportTexture,Mm as viewportUV,Ur as vogelDiskSample,Fm as wgsl,Dm as wgslFn,Im as workgroupArray,Bm as workgroupBarrier,Cm as workgroupId,Pm as workingToColorSpace,Rm as xor};

@@ -126,5 +126,3 @@ export * from './animation/AnimationClipCreator.js';

export * from './materials/MeshGouraudMaterial.js';
export * from './materials/LDrawConditionalLineMaterial.js';
export * from './materials/MeshPostProcessingMaterial.js';

@@ -229,3 +227,2 @@ export * from './math/Capsule.js';

export * from './shaders/GammaCorrectionShader.js';
export * from './shaders/GodRaysShader.js';
export * from './shaders/GTAOShader.js';

@@ -232,0 +229,0 @@ export * from './shaders/HalftoneShader.js';

@@ -53,3 +53,3 @@ import {

*
* @type {SkinnedMesh}
* @type {Array<CCDIKSolver~IK>}
*/

@@ -361,3 +361,3 @@ this.iks = iks;

*
* @type {SkinnedMesh}
* @type {SphereGeometry}
*/

@@ -364,0 +364,0 @@ this.sphereGeometry = new SphereGeometry( sphereSize, 16, 8 );

@@ -394,2 +394,4 @@ import {

this._cursorStyle = 'auto';
// the target DOM element for key events

@@ -466,2 +468,30 @@ this._domElementKeyEvents = null;

/**
* Defines the visual representation of the cursor.
*
* @type {('auto'|'grab')}
* @default 'auto'
*/
set cursorStyle( type ) {
this._cursorStyle = type;
if ( type === 'grab' ) {
this.domElement.style.cursor = 'grab';
} else {
this.domElement.style.cursor = 'auto';
}
}
get cursorStyle() {
return this._cursorStyle;
}
connect( element ) {

@@ -599,2 +629,63 @@

/**
* Programmatically pan the camera.
*
* @param {number} deltaX - The horizontal pan amount in pixels.
* @param {number} deltaY - The vertical pan amount in pixels.
*/
pan( deltaX, deltaY ) {
this._pan( deltaX, deltaY );
this.update();
}
/**
* Programmatically dolly in (zoom in for perspective camera).
*
* @param {number} dollyScale - The dolly scale factor.
*/
dollyIn( dollyScale ) {
this._dollyIn( dollyScale );
this.update();
}
/**
* Programmatically dolly out (zoom out for perspective camera).
*
* @param {number} dollyScale - The dolly scale factor.
*/
dollyOut( dollyScale ) {
this._dollyOut( dollyScale );
this.update();
}
/**
* Programmatically rotate the camera left (around the vertical axis).
*
* @param {number} angle - The rotation angle in radians.
*/
rotateLeft( angle ) {
this._rotateLeft( angle );
this.update();
}
/**
* Programmatically rotate the camera up (around the horizontal axis).
*
* @param {number} angle - The rotation angle in radians.
*/
rotateUp( angle ) {
this._rotateUp( angle );
this.update();
}
update( deltaTime = null ) {

@@ -1487,2 +1578,8 @@

if ( this._cursorStyle === 'grab' ) {
this.domElement.style.cursor = 'grabbing';
}
}

@@ -1523,2 +1620,8 @@

if ( this._cursorStyle === 'grab' ) {
this.domElement.style.cursor = 'grab';
}
break;

@@ -1525,0 +1628,0 @@

import {
LinearFilter,
MathUtils,
Matrix3,
NearestFilter,
PerspectiveCamera,
RGBAFormat,
ShaderMaterial,
StereoCamera,
Vector3,
WebGLRenderTarget
} from 'three';
import { FullScreenQuad } from '../postprocessing/Pass.js';
import { frameCorners } from '../utils/CameraUtils.js';
const _cameraL = /*@__PURE__*/ new PerspectiveCamera();
const _cameraR = /*@__PURE__*/ new PerspectiveCamera();
// Reusable vectors for screen corner calculations
const _eyeL = /*@__PURE__*/ new Vector3();
const _eyeR = /*@__PURE__*/ new Vector3();
const _screenCenter = /*@__PURE__*/ new Vector3();
const _screenBottomLeft = /*@__PURE__*/ new Vector3();
const _screenBottomRight = /*@__PURE__*/ new Vector3();
const _screenTopLeft = /*@__PURE__*/ new Vector3();
const _right = /*@__PURE__*/ new Vector3();
const _up = /*@__PURE__*/ new Vector3();
const _forward = /*@__PURE__*/ new Vector3();
/**
* A class that creates an anaglyph effect.
* A class that creates an anaglyph effect using physically-correct
* off-axis stereo projection.
*
* This implementation uses CameraUtils.frameCorners() to align stereo
* camera frustums to a virtual screen plane, providing accurate depth
* perception with zero parallax at the plane distance.
*
* Note that this class can only be used with {@link WebGLRenderer}.

@@ -45,4 +67,26 @@ * When using {@link WebGPURenderer}, use {@link AnaglyphPassNode}.

const _stereo = new StereoCamera();
/**
* The interpupillary distance (eye separation) in world units.
* Typical human IPD is 0.064 meters (64mm).
*
* @type {number}
* @default 0.064
*/
this.eyeSep = 0.064;
/**
* The distance in world units from the viewer to the virtual
* screen plane where zero parallax (screen depth) occurs.
* Objects at this distance appear at the screen surface.
* Objects closer appear in front of the screen (negative parallax).
* Objects further appear behind the screen (positive parallax).
*
* The screen dimensions are derived from the camera's FOV and aspect ratio
* at this distance, ensuring the stereo view matches the camera's field of view.
*
* @type {number}
* @default 0.5
*/
this.planeDistance = 0.5;
const _params = { minFilter: LinearFilter, magFilter: NearestFilter, format: RGBAFormat };

@@ -53,2 +97,5 @@

_cameraL.layers.enable( 1 );
_cameraR.layers.enable( 2 );
const _material = new ShaderMaterial( {

@@ -146,12 +193,60 @@

_stereo.update( camera );
// Get the camera's local coordinate axes from its world matrix
camera.matrixWorld.extractBasis( _right, _up, _forward );
_right.normalize();
_up.normalize();
_forward.normalize();
// Calculate eye positions
const halfSep = this.eyeSep / 2;
_eyeL.copy( camera.position ).addScaledVector( _right, - halfSep );
_eyeR.copy( camera.position ).addScaledVector( _right, halfSep );
// Calculate screen center (at planeDistance in front of the camera center)
_screenCenter.copy( camera.position ).addScaledVector( _forward, - this.planeDistance );
// Calculate screen dimensions from camera FOV and aspect ratio
const halfHeight = this.planeDistance * Math.tan( MathUtils.DEG2RAD * camera.fov / 2 );
const halfWidth = halfHeight * camera.aspect;
// Calculate screen corners
_screenBottomLeft.copy( _screenCenter )
.addScaledVector( _right, - halfWidth )
.addScaledVector( _up, - halfHeight );
_screenBottomRight.copy( _screenCenter )
.addScaledVector( _right, halfWidth )
.addScaledVector( _up, - halfHeight );
_screenTopLeft.copy( _screenCenter )
.addScaledVector( _right, - halfWidth )
.addScaledVector( _up, halfHeight );
// Set up left eye camera
_cameraL.position.copy( _eyeL );
_cameraL.near = camera.near;
_cameraL.far = camera.far;
frameCorners( _cameraL, _screenBottomLeft, _screenBottomRight, _screenTopLeft, true );
_cameraL.matrixWorld.compose( _cameraL.position, _cameraL.quaternion, _cameraL.scale );
_cameraL.matrixWorldInverse.copy( _cameraL.matrixWorld ).invert();
// Set up right eye camera
_cameraR.position.copy( _eyeR );
_cameraR.near = camera.near;
_cameraR.far = camera.far;
frameCorners( _cameraR, _screenBottomLeft, _screenBottomRight, _screenTopLeft, true );
_cameraR.matrixWorld.compose( _cameraR.position, _cameraR.quaternion, _cameraR.scale );
_cameraR.matrixWorldInverse.copy( _cameraR.matrixWorld ).invert();
// Render left eye
renderer.setRenderTarget( _renderTargetL );
renderer.clear();
renderer.render( scene, _stereo.cameraL );
renderer.render( scene, _cameraL );
// Render right eye
renderer.setRenderTarget( _renderTargetR );
renderer.clear();
renderer.render( scene, _stereo.cameraR );
renderer.render( scene, _cameraR );
// Composite anaglyph
renderer.setRenderTarget( null );

@@ -158,0 +253,0 @@ _quad.render( renderer );

@@ -40,2 +40,3 @@ import {

this.name = 'RoomEnvironment';
this.position.y = - 3.5;

@@ -42,0 +43,0 @@ const geometry = new BoxGeometry();

@@ -93,3 +93,3 @@ import {

if ( renderTarget.isWebGLCubeRenderTarget || renderTarget.isWebGL3DRenderTarget || renderTarget.isWebGLArrayRenderTarget ) {
if ( renderTarget.isCubeRenderTarget || renderTarget.isWebGLCubeRenderTarget || renderTarget.isWebGL3DRenderTarget || renderTarget.isWebGLArrayRenderTarget ) {

@@ -96,0 +96,0 @@ throw Error( 'EXRExporter.parse: Unsupported render target type, expected instance of WebGLRenderTarget.' );

@@ -504,3 +504,2 @@ import {

const name = getName( object, usedNames );
const transform = buildMatrix( object.matrix );

@@ -518,5 +517,25 @@ if ( object.matrix.determinant() < 0 ) {

node.addProperty( `matrix4d xformOp:transform = ${transform}` );
node.addProperty( 'uniform token[] xformOpOrder = ["xformOp:transform"]' );
if ( object.pivot !== null ) {
// Export with pivot using separate transform ops
const p = object.position;
const q = object.quaternion;
const s = object.scale;
const piv = object.pivot;
node.addProperty( `float3 xformOp:translate = (${p.x.toPrecision( PRECISION )}, ${p.y.toPrecision( PRECISION )}, ${p.z.toPrecision( PRECISION )})` );
node.addProperty( `float3 xformOp:translate:pivot = (${piv.x.toPrecision( PRECISION )}, ${piv.y.toPrecision( PRECISION )}, ${piv.z.toPrecision( PRECISION )})` );
node.addProperty( `quatf xformOp:orient = (${q.w.toPrecision( PRECISION )}, ${q.x.toPrecision( PRECISION )}, ${q.y.toPrecision( PRECISION )}, ${q.z.toPrecision( PRECISION )})` );
node.addProperty( `float3 xformOp:scale = (${s.x.toPrecision( PRECISION )}, ${s.y.toPrecision( PRECISION )}, ${s.z.toPrecision( PRECISION )})` );
node.addProperty( 'uniform token[] xformOpOrder = ["xformOp:translate", "xformOp:translate:pivot", "xformOp:orient", "xformOp:scale", "!invert!xformOp:translate:pivot"]' );
} else {
// Export as single transform matrix
const transform = buildMatrix( object.matrix );
node.addProperty( `matrix4d xformOp:transform = ${transform}` );
node.addProperty( 'uniform token[] xformOpOrder = ["xformOp:transform"]' );
}
return node;

@@ -523,0 +542,0 @@

@@ -68,2 +68,16 @@ import {

/**
* Controls the position of the helper in the viewport.
* Use `top`/`bottom` for vertical positioning and `left`/`right` for horizontal.
* If `left` is `null`, `right` is used. If `top` is `null`, `bottom` is used.
*
* @type {{top: number|null, right: number, bottom: number, left: number|null}}
*/
this.location = {
top: null,
right: 0,
bottom: 0,
left: null
};
const color1 = new Color( '#ff4466' );

@@ -146,4 +160,4 @@ const color2 = new Color( '#88ff44' );

/**
* Renders the helper in a separate view in the bottom-right corner
* of the viewport.
* Renders the helper in a separate view in the viewport.
* Position is controlled by the `location` property.
*

@@ -162,5 +176,28 @@ * @param {WebGLRenderer|WebGPURenderer} renderer - The renderer.

const x = domElement.offsetWidth - dim;
const y = renderer.isWebGPURenderer ? domElement.offsetHeight - dim : 0;
const location = this.location;
let x, y;
if ( location.left !== null ) {
x = location.left;
} else {
x = domElement.offsetWidth - dim - location.right;
}
if ( location.top !== null ) {
// Position from top
y = renderer.isWebGPURenderer ? location.top : domElement.offsetHeight - dim - location.top;
} else {
// Position from bottom
y = renderer.isWebGPURenderer ? domElement.offsetHeight - dim - location.bottom : location.bottom;
}
renderer.clearDepth();

@@ -197,7 +234,29 @@

const rect = domElement.getBoundingClientRect();
const offsetX = rect.left + ( domElement.offsetWidth - dim );
const offsetY = rect.top + ( domElement.offsetHeight - dim );
mouse.x = ( ( event.clientX - offsetX ) / ( rect.right - offsetX ) ) * 2 - 1;
mouse.y = - ( ( event.clientY - offsetY ) / ( rect.bottom - offsetY ) ) * 2 + 1;
const location = this.location;
let offsetX, offsetY;
if ( location.left !== null ) {
offsetX = rect.left + location.left;
} else {
offsetX = rect.left + domElement.offsetWidth - dim - location.right;
}
if ( location.top !== null ) {
offsetY = rect.top + location.top;
} else {
offsetY = rect.top + domElement.offsetHeight - dim - location.bottom;
}
mouse.x = ( ( event.clientX - offsetX ) / dim ) * 2 - 1;
mouse.y = - ( ( event.clientY - offsetY ) / dim ) * 2 + 1;
raycaster.setFromCamera( mouse, orthoCamera );

@@ -204,0 +263,0 @@

@@ -113,3 +113,3 @@

resolveConsole( type, message ) {
resolveConsole( type, message, stackTrace = null ) {

@@ -130,4 +130,12 @@ switch ( type ) {

console.warn( message );
if ( stackTrace && stackTrace.isStackTrace ) {
console.warn( stackTrace.getError( message ) );
} else {
console.warn( message );
}
break;

@@ -139,4 +147,12 @@

console.error( message );
if ( stackTrace && stackTrace.isStackTrace ) {
console.error( stackTrace.getError( message ) );
} else {
console.error( message );
}
break;

@@ -143,0 +159,0 @@

@@ -36,5 +36,11 @@ import { Tab } from '../ui/Tab.js';

const filtersGroup = document.createElement( 'div' );
filtersGroup.className = 'console-filters-group';
const copyButton = document.createElement( 'button' );
copyButton.className = 'console-copy-button';
copyButton.title = 'Copy all';
copyButton.innerHTML = '<svg xmlns="http://www.w3.org/2000/svg" width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><rect x="9" y="9" width="13" height="13" rx="2" ry="2"></rect><path d="M5 15H4a2 2 0 0 1-2-2V4a2 2 0 0 1 2-2h9a2 2 0 0 1 2 2v1"></path></svg>';
copyButton.addEventListener( 'click', () => this.copyAll( copyButton ) );
const buttonsGroup = document.createElement( 'div' );
buttonsGroup.className = 'console-buttons-group';
Object.keys( this.filters ).forEach( type => {

@@ -57,7 +63,7 @@

label.append( type.charAt( 0 ).toUpperCase() + type.slice( 1 ) );
filtersGroup.appendChild( label );
buttonsGroup.appendChild( label );
} );
filtersGroup.addEventListener( 'change', ( e ) => {
buttonsGroup.addEventListener( 'change', ( e ) => {

@@ -74,4 +80,6 @@ const type = e.target.dataset.type;

buttonsGroup.appendChild( copyButton );
header.appendChild( filterInput );
header.appendChild( filtersGroup );
header.appendChild( buttonsGroup );
this.content.appendChild( header );

@@ -98,2 +106,28 @@

copyAll( button ) {
const win = this.logContainer.ownerDocument.defaultView;
const selection = win.getSelection();
const selectedText = selection.toString();
const textInConsole = selectedText && this.logContainer.contains( selection.anchorNode );
let text;
if ( textInConsole ) {
text = selectedText;
} else {
const messages = this.logContainer.querySelectorAll( '.log-message:not(.hidden)' );
text = Array.from( messages ).map( msg => msg.dataset.rawText ).join( '\n' );
}
navigator.clipboard.writeText( text );
button.classList.add( 'copied' );
setTimeout( () => button.classList.remove( 'copied' ), 350 );
}
_getIcon( type, subType ) {

@@ -100,0 +134,0 @@

@@ -16,2 +16,4 @@ import { Tab } from '../ui/Tab.js';

this.objects = [];
}

@@ -97,4 +99,13 @@

this._registerParameter( object, property, editor, subItem );
}
_registerParameter( object, property, editor, subItem ) {
this.objects.push( { object: object, key: property, editor: editor, subItem: subItem } );
}
addFolder( name ) {

@@ -296,2 +307,4 @@

this._registerParameter( object, property, editor, subItem );
return editor;

@@ -321,2 +334,4 @@

this.groups = [];
}

@@ -329,2 +344,3 @@

this.paramList.add( group.paramList );
this.groups.push( group );

@@ -331,0 +347,0 @@ return group;

@@ -1060,3 +1060,3 @@ export class Style {

.console-filters-group {
.console-buttons-group {
display: flex;

@@ -1078,2 +1078,24 @@ gap: 20px;

.console-copy-button {
background: transparent;
border: none;
color: var(--text-secondary);
cursor: pointer;
padding: 4px;
display: flex;
align-items: center;
justify-content: center;
border-radius: 4px;
transition: color 0.2s, background-color 0.2s;
}
.console-copy-button:hover {
color: var(--text-primary);
background-color: var(--profiler-hover);
}
.console-copy-button.copied {
color: var(--color-green);
}
#console-log {

@@ -1086,2 +1108,4 @@ display: flex;

flex-grow: 1;
user-select: text;
-webkit-user-select: text;
}

@@ -1088,0 +1112,0 @@

// This file is part of meshoptimizer library and is distributed under the terms of MIT License.
// Copyright (C) 2016-2024, by Arseny Kapoulkine (arseny.kapoulkine@gmail.com)
// Copyright (C) 2016-2025, by Arseny Kapoulkine (arseny.kapoulkine@gmail.com)
var MeshoptDecoder = (function () {
// Built with clang version 18.1.2
// Built from meshoptimizer 0.22
// Built with clang version 19.1.5-wasi-sdk
// Built from meshoptimizer 1.0
var wasm_base =
'b9H79Tebbbe8Fv9Gbb9Gvuuuuueu9Giuuub9Geueu9Giuuueuikqbeeedddillviebeoweuec:q:Odkr;leDo9TW9T9VV95dbH9F9F939H79T9F9J9H229F9Jt9VV7bb8A9TW79O9V9Wt9F9KW9J9V9KW9wWVtW949c919M9MWVbeY9TW79O9V9Wt9F9KW9J9V9KW69U9KW949c919M9MWVbdE9TW79O9V9Wt9F9KW9J9V9KW69U9KW949tWG91W9U9JWbiL9TW79O9V9Wt9F9KW9J9V9KWS9P2tWV9p9JtblK9TW79O9V9Wt9F9KW9J9V9KWS9P2tWV9r919HtbvL9TW79O9V9Wt9F9KW9J9V9KWS9P2tWVT949Wbol79IV9Rbrq;w8Wqdbk;esezu8Jjjjjbcj;eb9Rgv8Kjjjjbc9:hodnadcefal0mbcuhoaiRbbc:Ge9hmbavaialfgrad9Radz1jjjbhwcj;abad9Uc;WFbGgocjdaocjd6EhDaicefhocbhqdnindndndnaeaq9nmbaDaeaq9RaqaDfae6Egkcsfglcl4cifcd4hxalc9WGgmTmecbhPawcjdfhsaohzinaraz9Rax6mvarazaxfgo9RcK6mvczhlcbhHinalgic9WfgOawcj;cbffhldndndndndnazaOco4fRbbaHcoG4ciGPlbedibkal9cb83ibalcwf9cb83ibxikalaoRblaoRbbgOco4gAaAciSgAE86bbawcj;cbfaifglcGfaoclfaAfgARbbaOcl4ciGgCaCciSgCE86bbalcVfaAaCfgARbbaOcd4ciGgCaCciSgCE86bbalc7faAaCfgARbbaOciGgOaOciSgOE86bbalctfaAaOfgARbbaoRbegOco4gCaCciSgCE86bbalc91faAaCfgARbbaOcl4ciGgCaCciSgCE86bbalc4faAaCfgARbbaOcd4ciGgCaCciSgCE86bbalc93faAaCfgARbbaOciGgOaOciSgOE86bbalc94faAaOfgARbbaoRbdgOco4gCaCciSgCE86bbalc95faAaCfgARbbaOcl4ciGgCaCciSgCE86bbalc96faAaCfgARbbaOcd4ciGgCaCciSgCE86bbalc97faAaCfgARbbaOciGgOaOciSgOE86bbalc98faAaOfgORbbaoRbigoco4gAaAciSgAE86bbalc99faOaAfgORbbaocl4ciGgAaAciSgAE86bbalc9:faOaAfgORbbaocd4ciGgAaAciSgAE86bbalcufaOaAfglRbbaociGgoaociSgoE86bbalaofhoxdkalaoRbwaoRbbgOcl4gAaAcsSgAE86bbawcj;cbfaifglcGfaocwfaAfgARbbaOcsGgOaOcsSgOE86bbalcVfaAaOfgORbbaoRbegAcl4gCaCcsSgCE86bbalc7faOaCfgORbbaAcsGgAaAcsSgAE86bbalctfaOaAfgORbbaoRbdgAcl4gCaCcsSgCE86bbalc91faOaCfgORbbaAcsGgAaAcsSgAE86bbalc4faOaAfgORbbaoRbigAcl4gCaCcsSgCE86bbalc93faOaCfgORbbaAcsGgAaAcsSgAE86bbalc94faOaAfgORbbaoRblgAcl4gCaCcsSgCE86bbalc95faOaCfgORbbaAcsGgAaAcsSgAE86bbalc96faOaAfgORbbaoRbvgAcl4gCaCcsSgCE86bbalc97faOaCfgORbbaAcsGgAaAcsSgAE86bbalc98faOaAfgORbbaoRbogAcl4gCaCcsSgCE86bbalc99faOaCfgORbbaAcsGgAaAcsSgAE86bbalc9:faOaAfgORbbaoRbrgocl4gAaAcsSgAE86bbalcufaOaAfglRbbaocsGgoaocsSgoE86bbalaofhoxekalao8Pbb83bbalcwfaocwf8Pbb83bbaoczfhokdnaiam9pmbaHcdfhHaiczfhlarao9RcL0mekkaiam6mvaoTmvdnakTmbawaPfRbbhHawcj;cbfhlashiakhOinaialRbbgzce4cbazceG9R7aHfgH86bbaiadfhialcefhlaOcufgOmbkkascefhsaohzaPcefgPad9hmbxikkcbc99arao9Radcaadca0ESEhoxlkaoaxad2fhCdnakmbadhlinaoTmlarao9Rax6mlaoaxfhoalcufglmbkaChoxekcbhmawcjdfhAinarao9Rax6miawamfRbbhHawcj;cbfhlaAhiakhOinaialRbbgzce4cbazceG9R7aHfgH86bbaiadfhialcefhlaOcufgOmbkaAcefhAaoaxfhoamcefgmad9hmbkaChokabaqad2fawcjdfakad2z1jjjb8Aawawcjdfakcufad2fadz1jjjb8Aakaqfhqaombkc9:hoxekc9:hokavcj;ebf8Kjjjjbaok;cseHu8Jjjjjbc;ae9Rgv8Kjjjjbc9:hodnaeci9UgrcHfal0mbcuhoaiRbbgwc;WeGc;Ge9hmbawcsGgwce0mbavc;abfcFecjez:jjjjb8AavcUf9cu83ibavc8Wf9cu83ibavcyf9cu83ibavcaf9cu83ibavcKf9cu83ibavczf9cu83ibav9cu83iwav9cu83ibaialfc9WfhDaicefgqarfhidnaeTmbcmcsawceSEhkcbhxcbhmcbhPcbhwcbhlindnaiaD9nmbc9:hoxikdndnaqRbbgoc;Ve0mbavc;abfalaocu7gscl4fcsGcitfgzydlhrazydbhzdnaocsGgHak9pmbavawasfcsGcdtfydbaxaHEhoaHThsdndnadcd9hmbabaPcetfgHaz87ebaHclfao87ebaHcdfar87ebxekabaPcdtfgHazBdbaHcwfaoBdbaHclfarBdbkaxasfhxcdhHavawcdtfaoBdbawasfhwcehsalhOxdkdndnaHcsSmbaHc987aHamffcefhoxekaicefhoai8SbbgHcFeGhsdndnaHcu9mmbaohixekaicvfhiascFbGhscrhHdninao8SbbgOcFbGaHtasVhsaOcu9kmeaocefhoaHcrfgHc8J9hmbxdkkaocefhikasce4cbasceG9R7amfhokdndnadcd9hmbabaPcetfgHaz87ebaHclfao87ebaHcdfar87ebxekabaPcdtfgHazBdbaHcwfaoBdbaHclfarBdbkcdhHavawcdtfaoBdbcehsawcefhwalhOaohmxekdnaocpe0mbaxcefgHavawaDaocsGfRbbgocl49RcsGcdtfydbaocz6gzEhravawao9RcsGcdtfydbaHazfgAaocsGgHEhoaHThCdndnadcd9hmbabaPcetfgHax87ebaHclfao87ebaHcdfar87ebxekabaPcdtfgHaxBdbaHcwfaoBdbaHclfarBdbkcdhsavawcdtfaxBdbavawcefgwcsGcdtfarBdbcihHavc;abfalcitfgOaxBdlaOarBdbavawazfgwcsGcdtfaoBdbalcefcsGhOawaCfhwaxhzaAaCfhxxekaxcbaiRbbgOEgzaoc;:eSgHfhraOcsGhCaOcl4hAdndnaOcs0mbarcefhoxekarhoavawaA9RcsGcdtfydbhrkdndnaCmbaocefhxxekaohxavawaO9RcsGcdtfydbhokdndnaHTmbaicefhHxekaicdfhHai8SbegscFeGhzdnascu9kmbaicofhXazcFbGhzcrhidninaH8SbbgscFbGaitazVhzascu9kmeaHcefhHaicrfgic8J9hmbkaXhHxekaHcefhHkazce4cbazceG9R7amfgmhzkdndnaAcsSmbaHhsxekaHcefhsaH8SbbgicFeGhrdnaicu9kmbaHcvfhXarcFbGhrcrhidninas8SbbgHcFbGaitarVhraHcu9kmeascefhsaicrfgic8J9hmbkaXhsxekascefhskarce4cbarceG9R7amfgmhrkdndnaCcsSmbashixekascefhias8SbbgocFeGhHdnaocu9kmbascvfhXaHcFbGhHcrhodninai8SbbgscFbGaotaHVhHascu9kmeaicefhiaocrfgoc8J9hmbkaXhixekaicefhikaHce4cbaHceG9R7amfgmhokdndnadcd9hmbabaPcetfgHaz87ebaHclfao87ebaHcdfar87ebxekabaPcdtfgHazBdbaHcwfaoBdbaHclfarBdbkcdhsavawcdtfazBdbavawcefgwcsGcdtfarBdbcihHavc;abfalcitfgXazBdlaXarBdbavawaOcz6aAcsSVfgwcsGcdtfaoBdbawaCTaCcsSVfhwalcefcsGhOkaqcefhqavc;abfaOcitfgOarBdlaOaoBdbavc;abfalasfcsGcitfgraoBdlarazBdbawcsGhwalaHfcsGhlaPcifgPae6mbkkcbc99aiaDSEhokavc;aef8Kjjjjbaok:flevu8Jjjjjbcz9Rhvc9:hodnaecvfal0mbcuhoaiRbbc;:eGc;qe9hmbav9cb83iwaicefhraialfc98fhwdnaeTmbdnadcdSmbcbhDindnaraw6mbc9:skarcefhoar8SbbglcFeGhidndnalcu9mmbaohrxekarcvfhraicFbGhicrhldninao8SbbgdcFbGaltaiVhiadcu9kmeaocefhoalcrfglc8J9hmbxdkkaocefhrkabaDcdtfaic8Etc8F91aicd47avcwfaiceGcdtVgoydbfglBdbaoalBdbaDcefgDae9hmbxdkkcbhDindnaraw6mbc9:skarcefhoar8SbbglcFeGhidndnalcu9mmbaohrxekarcvfhraicFbGhicrhldninao8SbbgdcFbGaltaiVhiadcu9kmeaocefhoalcrfglc8J9hmbxdkkaocefhrkabaDcetfaic8Etc8F91aicd47avcwfaiceGcdtVgoydbfgl87ebaoalBdbaDcefgDae9hmbkkcbc99arawSEhokaok:Lvoeue99dud99eud99dndnadcl9hmbaeTmeindndnabcdfgd8Sbb:Yab8Sbbgi:Ygl:l:tabcefgv8Sbbgo:Ygr:l:tgwJbb;:9cawawNJbbbbawawJbbbb9GgDEgq:mgkaqaicb9iEalMgwawNakaqaocb9iEarMgqaqNMM:r:vglNJbbbZJbbb:;aDEMgr:lJbbb9p9DTmbar:Ohixekcjjjj94hikadai86bbdndnaqalNJbbbZJbbb:;aqJbbbb9GEMgq:lJbbb9p9DTmbaq:Ohdxekcjjjj94hdkavad86bbdndnawalNJbbbZJbbb:;awJbbbb9GEMgw:lJbbb9p9DTmbaw:Ohdxekcjjjj94hdkabad86bbabclfhbaecufgembxdkkaeTmbindndnabclfgd8Ueb:Yab8Uebgi:Ygl:l:tabcdfgv8Uebgo:Ygr:l:tgwJb;:FSawawNJbbbbawawJbbbb9GgDEgq:mgkaqaicb9iEalMgwawNakaqaocb9iEarMgqaqNMM:r:vglNJbbbZJbbb:;aDEMgr:lJbbb9p9DTmbar:Ohixekcjjjj94hikadai87ebdndnaqalNJbbbZJbbb:;aqJbbbb9GEMgq:lJbbb9p9DTmbaq:Ohdxekcjjjj94hdkavad87ebdndnawalNJbbbZJbbb:;awJbbbb9GEMgw:lJbbb9p9DTmbaw:Ohdxekcjjjj94hdkabad87ebabcwfhbaecufgembkkk;oiliui99iue99dnaeTmbcbhiabhlindndnJ;Zl81Zalcof8UebgvciV:Y:vgoal8Ueb:YNgrJb;:FSNJbbbZJbbb:;arJbbbb9GEMgw:lJbbb9p9DTmbaw:OhDxekcjjjj94hDkalclf8Uebhqalcdf8UebhkabaiavcefciGfcetfaD87ebdndnaoak:YNgwJb;:FSNJbbbZJbbb:;awJbbbb9GEMgx:lJbbb9p9DTmbax:OhDxekcjjjj94hDkabaiavciGfgkcd7cetfaD87ebdndnaoaq:YNgoJb;:FSNJbbbZJbbb:;aoJbbbb9GEMgx:lJbbb9p9DTmbax:OhDxekcjjjj94hDkabaiavcufciGfcetfaD87ebdndnJbbjZararN:tawawN:taoaoN:tgrJbbbbarJbbbb9GE:rJb;:FSNJbbbZMgr:lJbbb9p9DTmbar:Ohvxekcjjjj94hvkabakcetfav87ebalcwfhlaiclfhiaecufgembkkk9mbdnadcd4ae2gdTmbinababydbgecwtcw91:Yaece91cjjj98Gcjjj;8if::NUdbabclfhbadcufgdmbkkk9teiucbcbydj1jjbgeabcifc98GfgbBdj1jjbdndnabZbcztgd9nmbcuhiabad9RcFFifcz4nbcuSmekaehikaik;LeeeudndnaeabVciGTmbabhixekdndnadcz9pmbabhixekabhiinaiaeydbBdbaiclfaeclfydbBdbaicwfaecwfydbBdbaicxfaecxfydbBdbaeczfheaiczfhiadc9Wfgdcs0mbkkadcl6mbinaiaeydbBdbaeclfheaiclfhiadc98fgdci0mbkkdnadTmbinaiaeRbb86bbaicefhiaecefheadcufgdmbkkabk;aeedudndnabciGTmbabhixekaecFeGc:b:c:ew2hldndnadcz9pmbabhixekabhiinaialBdbaicxfalBdbaicwfalBdbaiclfalBdbaiczfhiadc9Wfgdcs0mbkkadcl6mbinaialBdbaiclfhiadc98fgdci0mbkkdnadTmbinaiae86bbaicefhiadcufgdmbkkabkkkebcjwklzNbb'; // embed! base
'b9H79Tebbbe8Fv9Gbb9Gvuuuuueu9Giuuub9Geueu9Giuuueuixkbeeeddddillviebeoweuec:W:Odkr;Neqo9TW9T9VV95dbH9F9F939H79T9F9J9H229F9Jt9VV7bb8A9TW79O9V9Wt9F9KW9J9V9KW9wWVtW949c919M9MWVbeY9TW79O9V9Wt9F9KW9J9V9KW69U9KW949c919M9MWVbdE9TW79O9V9Wt9F9KW9J9V9KW69U9KW949tWG91W9U9JWbiL9TW79O9V9Wt9F9KW9J9V9KWS9P2tWV9p9JtblK9TW79O9V9Wt9F9KW9J9V9KWS9P2tWV9r919HtbvL9TW79O9V9Wt9F9KW9J9V9KWS9P2tWVT949WboY9TW79O9V9Wt9F9KW9J9V9KWS9P2tWVJ9V29VVbrl79IV9Rbwq;lZkdbk;jYi5ud9:du8Jjjjjbcj;kb9Rgv8Kjjjjbc9:hodnalTmbcuhoaiRbbgrc;WeGc:Ge9hmbarcsGgwce0mbc9:hoalcufadcd4cbawEgDadfgrcKcaawEgqaraq0Egk6mbaicefhxcj;abad9Uc;WFbGcjdadca0EhmaialfgPar9Rgoadfhsavaoadz:jjjjbgzceVhHcbhOdndninaeaO9nmeaPax9RaD6mdamaeaO9RaOamfgoae6EgAcsfglc9WGhCabaOad2fhXaAcethQaxaDfhiaOaeaoaeao6E9RhLalcl4cifcd4hKazcj;cbfaAfhYcbh8AazcjdfhEaHh3incbhodnawTmbaxa8Acd4fRbbhokaocFeGh5cbh8Eazcj;cbfhqinaih8Fdndndndna5a8Ecet4ciGgoc9:fPdebdkaPa8F9RaA6mrazcj;cbfa8EaA2fa8FaAz:jjjjb8Aa8FaAfhixdkazcj;cbfa8EaA2fcbaAz:kjjjb8Aa8FhixekaPa8F9RaK6mva8FaKfhidnaCTmbaPai9RcK6mbaocdtc:q1jjbfcj1jjbawEhaczhrcbhlinargoc9Wfghaqfhrdndndndndndnaaa8Fahco4fRbbalcoG4ciGcdtfydbPDbedvivvvlvkar9cb83bbarcwf9cb83bbxlkarcbaiRbdai8Xbb9c:c:qj:bw9:9c:q;c1:I1e:d9c:b:c:e1z9:gg9cjjjjjz:dg8J9qE86bbaqaofgrcGfag9c8F1:NghcKtc8F91aicdfa8J9c8N1:Nfg8KRbbG86bbarcVfcba8KahcjeGcr4fghRbbag9cjjjjjl:dg8J9qE86bbarc7fcbaha8J9c8L1:NfghRbbag9cjjjjjd:dg8J9qE86bbarctfcbaha8J9c8K1:NfghRbbag9cjjjjje:dg8J9qE86bbarc91fcbaha8J9c8J1:NfghRbbag9cjjjj;ab:dg8J9qE86bbarc4fcbaha8J9cg1:NfghRbbag9cjjjja:dg8J9qE86bbarc93fcbaha8J9ch1:NfghRbbag9cjjjjz:dgg9qE86bbarc94fcbahag9ca1:NfghRbbai8Xbe9c:c:qj:bw9:9c:q;c1:I1e:d9c:b:c:e1z9:gg9cjjjjjz:dg8J9qE86bbarc95fag9c8F1:NgicKtc8F91aha8J9c8N1:NfghRbbG86bbarc96fcbahaicjeGcr4fgiRbbag9cjjjjjl:dg8J9qE86bbarc97fcbaia8J9c8L1:NfgiRbbag9cjjjjjd:dg8J9qE86bbarc98fcbaia8J9c8K1:NfgiRbbag9cjjjjje:dg8J9qE86bbarc99fcbaia8J9c8J1:NfgiRbbag9cjjjj;ab:dg8J9qE86bbarc9:fcbaia8J9cg1:NfgiRbbag9cjjjja:dg8J9qE86bbarcufcbaia8J9ch1:NfgiRbbag9cjjjjz:dgg9qE86bbaiag9ca1:NfhixikaraiRblaiRbbghco4g8Ka8KciSg8KE86bbaqaofgrcGfaiclfa8Kfg8KRbbahcl4ciGg8La8LciSg8LE86bbarcVfa8Ka8Lfg8KRbbahcd4ciGg8La8LciSg8LE86bbarc7fa8Ka8Lfg8KRbbahciGghahciSghE86bbarctfa8Kahfg8KRbbaiRbeghco4g8La8LciSg8LE86bbarc91fa8Ka8Lfg8KRbbahcl4ciGg8La8LciSg8LE86bbarc4fa8Ka8Lfg8KRbbahcd4ciGg8La8LciSg8LE86bbarc93fa8Ka8Lfg8KRbbahciGghahciSghE86bbarc94fa8Kahfg8KRbbaiRbdghco4g8La8LciSg8LE86bbarc95fa8Ka8Lfg8KRbbahcl4ciGg8La8LciSg8LE86bbarc96fa8Ka8Lfg8KRbbahcd4ciGg8La8LciSg8LE86bbarc97fa8Ka8Lfg8KRbbahciGghahciSghE86bbarc98fa8KahfghRbbaiRbigico4g8Ka8KciSg8KE86bbarc99faha8KfghRbbaicl4ciGg8Ka8KciSg8KE86bbarc9:faha8KfghRbbaicd4ciGg8Ka8KciSg8KE86bbarcufaha8KfgrRbbaiciGgiaiciSgiE86bbaraifhixdkaraiRbwaiRbbghcl4g8Ka8KcsSg8KE86bbaqaofgrcGfaicwfa8Kfg8KRbbahcsGghahcsSghE86bbarcVfa8KahfghRbbaiRbeg8Kcl4g8La8LcsSg8LE86bbarc7faha8LfghRbba8KcsGg8Ka8KcsSg8KE86bbarctfaha8KfghRbbaiRbdg8Kcl4g8La8LcsSg8LE86bbarc91faha8LfghRbba8KcsGg8Ka8KcsSg8KE86bbarc4faha8KfghRbbaiRbig8Kcl4g8La8LcsSg8LE86bbarc93faha8LfghRbba8KcsGg8Ka8KcsSg8KE86bbarc94faha8KfghRbbaiRblg8Kcl4g8La8LcsSg8LE86bbarc95faha8LfghRbba8KcsGg8Ka8KcsSg8KE86bbarc96faha8KfghRbbaiRbvg8Kcl4g8La8LcsSg8LE86bbarc97faha8LfghRbba8KcsGg8Ka8KcsSg8KE86bbarc98faha8KfghRbbaiRbog8Kcl4g8La8LcsSg8LE86bbarc99faha8LfghRbba8KcsGg8Ka8KcsSg8KE86bbarc9:faha8KfghRbbaiRbrgicl4g8Ka8KcsSg8KE86bbarcufaha8KfgrRbbaicsGgiaicsSgiE86bbaraifhixekarai8Pbb83bbarcwfaicwf8Pbb83bbaiczfhikdnaoaC9pmbalcdfhlaoczfhraPai9RcL0mekkaoaC6moaimexokaCmva8FTmvkaqaAfhqa8Ecefg8Ecl9hmbkdndndndnawTmbasa8Acd4fRbbgociGPlbedrbkaATmdaza8Afh8Fazcj;cbfhhcbh8EaEhaina8FRbbhraahocbhlinaoahalfRbbgqce4cbaqceG9R7arfgr86bbaoadfhoaAalcefgl9hmbkaacefhaa8Fcefh8FahaAfhha8Ecefg8Ecl9hmbxikkaATmeaza8Afhaazcj;cbfhhcbhoceh8EaYh8FinaEaofhlaa8Vbbhrcbhoinala8FaofRbbcwtahaofRbbgqVc;:FiGce4cbaqceG9R7arfgr87bbaladfhlaLaocefgofmbka8FaQfh8FcdhoaacdfhaahaQfhha8EceGhlcbh8EalmbxdkkaATmbcbaocl49Rh8Eaza8AfRbbhqcwhoa3hlinalRbbaotaqVhqalcefhlaocwfgoca9hmbkcbhhaEh8FaYhainazcj;cbfahfRbbhrcwhoaahlinalRbbaotarVhralaAfhlaocwfgoca9hmbkara8E93aq7hqcbhoa8Fhlinalaqao486bbalcefhlaocwfgoca9hmbka8Fadfh8FaacefhaahcefghaA9hmbkkaEclfhEa3clfh3a8Aclfg8Aad6mbkaXazcjdfaAad2z:jjjjb8AazazcjdfaAcufad2fadz:jjjjb8AaAaOfhOaihxaimbkc9:hoxdkcbc99aPax9RakSEhoxekc9:hokavcj;kbf8Kjjjjbaok:XseHu8Jjjjjbc;ae9Rgv8Kjjjjbc9:hodnaeci9UgrcHfal0mbcuhoaiRbbgwc;WeGc;Ge9hmbawcsGgDce0mbavc;abfcFecjez:kjjjb8AavcUf9cu83ibavc8Wf9cu83ibavcyf9cu83ibavcaf9cu83ibavcKf9cu83ibavczf9cu83ibav9cu83iwav9cu83ibaialfc9WfhqaicefgwarfhldnaeTmbcmcsaDceSEhkcbhxcbhmcbhrcbhicbhoindnalaq9nmbc9:hoxikdndnawRbbgDc;Ve0mbavc;abfaoaDcu7gPcl4fcsGcitfgsydlhzasydbhHdndnaDcsGgsak9pmbavaiaPfcsGcdtfydbaxasEhDaxasTgOfhxxekdndnascsSmbcehOasc987asamffcefhDxekalcefhDal8SbbgscFeGhPdndnascu9mmbaDhlxekalcvfhlaPcFbGhPcrhsdninaD8SbbgOcFbGastaPVhPaOcu9kmeaDcefhDascrfgsc8J9hmbxdkkaDcefhlkcehOaPce4cbaPceG9R7amfhDkaDhmkavc;abfaocitfgsaDBdbasazBdlavaicdtfaDBdbavc;abfaocefcsGcitfgsaHBdbasaDBdlaocdfhoaOaifhidnadcd9hmbabarcetfgsaH87ebasclfaD87ebascdfaz87ebxdkabarcdtfgsaHBdbascwfaDBdbasclfazBdbxekdnaDcpe0mbaxcefgOavaiaqaDcsGfRbbgscl49RcsGcdtfydbascz6gPEhDavaias9RcsGcdtfydbaOaPfgzascsGgOEhsaOThOdndnadcd9hmbabarcetfgHax87ebaHclfas87ebaHcdfaD87ebxekabarcdtfgHaxBdbaHcwfasBdbaHclfaDBdbkavaicdtfaxBdbavc;abfaocitfgHaDBdbaHaxBdlavaicefgicsGcdtfaDBdbavc;abfaocefcsGcitfgHasBdbaHaDBdlavaiaPfgicsGcdtfasBdbavc;abfaocdfcsGcitfgDaxBdbaDasBdlaocifhoaiaOfhiazaOfhxxekaxcbalRbbgHEgAaDc;:eSgDfhzaHcsGhCaHcl4hXdndnaHcs0mbazcefhOxekazhOavaiaX9RcsGcdtfydbhzkdndnaCmbaOcefhxxekaOhxavaiaH9RcsGcdtfydbhOkdndnaDTmbalcefhDxekalcdfhDal8SbegPcFeGhsdnaPcu9kmbalcofhAascFbGhscrhldninaD8SbbgPcFbGaltasVhsaPcu9kmeaDcefhDalcrfglc8J9hmbkaAhDxekaDcefhDkasce4cbasceG9R7amfgmhAkdndnaXcsSmbaDhsxekaDcefhsaD8SbbglcFeGhPdnalcu9kmbaDcvfhzaPcFbGhPcrhldninas8SbbgDcFbGaltaPVhPaDcu9kmeascefhsalcrfglc8J9hmbkazhsxekascefhskaPce4cbaPceG9R7amfgmhzkdndnaCcsSmbashlxekascefhlas8SbbgDcFeGhPdnaDcu9kmbascvfhOaPcFbGhPcrhDdninal8SbbgscFbGaDtaPVhPascu9kmealcefhlaDcrfgDc8J9hmbkaOhlxekalcefhlkaPce4cbaPceG9R7amfgmhOkdndnadcd9hmbabarcetfgDaA87ebaDclfaO87ebaDcdfaz87ebxekabarcdtfgDaABdbaDcwfaOBdbaDclfazBdbkavc;abfaocitfgDazBdbaDaABdlavaicdtfaABdbavc;abfaocefcsGcitfgDaOBdbaDazBdlavaicefgicsGcdtfazBdbavc;abfaocdfcsGcitfgDaABdbaDaOBdlavaiaHcz6aXcsSVfgicsGcdtfaOBdbaiaCTaCcsSVfhiaocifhokawcefhwaocsGhoaicsGhiarcifgrae6mbkkcbc99alaqSEhokavc;aef8Kjjjjbaok:clevu8Jjjjjbcz9Rhvdnaecvfal9nmbc9:skdnaiRbbc;:eGc;qeSmbcuskav9cb83iwaicefhoaialfc98fhrdnaeTmbdnadcdSmbcbhwindnaoar6mbc9:skaocefhlao8SbbgicFeGhddndnaicu9mmbalhoxekaocvfhoadcFbGhdcrhidninal8SbbgDcFbGaitadVhdaDcu9kmealcefhlaicrfgic8J9hmbxdkkalcefhokabawcdtfadc8Etc8F91adcd47avcwfadceGcdtVglydbfgiBdbalaiBdbawcefgwae9hmbxdkkcbhwindnaoar6mbc9:skaocefhlao8SbbgicFeGhddndnaicu9mmbalhoxekaocvfhoadcFbGhdcrhidninal8SbbgDcFbGaitadVhdaDcu9kmealcefhlaicrfgic8J9hmbxdkkalcefhokabawcetfadc8Etc8F91adcd47avcwfadceGcdtVglydbfgi87ebalaiBdbawcefgwae9hmbkkcbc99aoarSEk:Lvoeue99dud99eud99dndnadcl9hmbaeTmeindndnabcdfgd8Sbb:Yab8Sbbgi:Ygl:l:tabcefgv8Sbbgo:Ygr:l:tgwJbb;:9cawawNJbbbbawawJbbbb9GgDEgq:mgkaqaicb9iEalMgwawNakaqaocb9iEarMgqaqNMM:r:vglNJbbbZJbbb:;aDEMgr:lJbbb9p9DTmbar:Ohixekcjjjj94hikadai86bbdndnaqalNJbbbZJbbb:;aqJbbbb9GEMgq:lJbbb9p9DTmbaq:Ohdxekcjjjj94hdkavad86bbdndnawalNJbbbZJbbb:;awJbbbb9GEMgw:lJbbb9p9DTmbaw:Ohdxekcjjjj94hdkabad86bbabclfhbaecufgembxdkkaeTmbindndnabclfgd8Ueb:Yab8Uebgi:Ygl:l:tabcdfgv8Uebgo:Ygr:l:tgwJb;:FSawawNJbbbbawawJbbbb9GgDEgq:mgkaqaicb9iEalMgwawNakaqaocb9iEarMgqaqNMM:r:vglNJbbbZJbbb:;aDEMgr:lJbbb9p9DTmbar:Ohixekcjjjj94hikadai87ebdndnaqalNJbbbZJbbb:;aqJbbbb9GEMgq:lJbbb9p9DTmbaq:Ohdxekcjjjj94hdkavad87ebdndnawalNJbbbZJbbb:;awJbbbb9GEMgw:lJbbb9p9DTmbaw:Ohdxekcjjjj94hdkabad87ebabcwfhbaecufgembkkk::ioiue99dud99dud99dnaeTmbcbhiabhlindndnal8Uebgv:YgoJ:ji:1Salcof8UebgrciVgw:Y:vgDNJbbbZJbbb:;avcu9kEMgq:lJbbb9p9DTmbaq:Ohkxekcjjjj94hkkalclf8Uebhvalcdf8UebhxabaiarcefciGfcetfak87ebdndnax:YgqaDNJbbbZJbbb:;axcu9kEMgm:lJbbb9p9DTmbam:Ohxxekcjjjj94hxkabaiarciGfgkcd7cetfax87ebdndnav:YgmaDNJbbbZJbbb:;avcu9kEMgP:lJbbb9p9DTmbaP:Ohvxekcjjjj94hvkabaiarcufciGfcetfav87ebdndnawaw2:ZgPaPMaoaoN:taqaqN:tamamN:tgoJbbbbaoJbbbb9GE:raDNJbbbZMgD:lJbbb9p9DTmbaD:Ohrxekcjjjj94hrkabakcetfar87ebalcwfhlaiclfhiaecufgembkkk9mbdnadcd4ae2gdTmbinababydbgecwtcw91:Yaece91cjjj98Gcjjj;8if::NUdbabclfhbadcufgdmbkkk:Tvirud99eudndnadcl9hmbaeTmeindndnabRbbgiabcefgl8Sbbgvabcdfgo8Sbbgrf9R:YJbbuJabcifgwRbbgdce4adVgDcd4aDVgDcl4aDVgD:Z:vgqNJbbbZMgk:lJbbb9p9DTmbak:Ohxxekcjjjj94hxkaoax86bbdndnaraif:YaqNJbbbZMgk:lJbbb9p9DTmbak:Ohoxekcjjjj94hokalao86bbdndnavaifar9R:YaqNJbbbZMgk:lJbbb9p9DTmbak:Ohixekcjjjj94hikabai86bbdndnaDadcetGadceGV:ZaqNJbbbZMgq:lJbbb9p9DTmbaq:Ohdxekcjjjj94hdkawad86bbabclfhbaecufgembxdkkaeTmbindndnab8Vebgiabcdfgl8Uebgvabclfgo8Uebgrf9R:YJbFu9habcofgw8Vebgdce4adVgDcd4aDVgDcl4aDVgDcw4aDVgD:Z:vgqNJbbbZMgk:lJbbb9p9DTmbak:Ohxxekcjjjj94hxkaoax87ebdndnaraif:YaqNJbbbZMgk:lJbbb9p9DTmbak:Ohoxekcjjjj94hokalao87ebdndnavaifar9R:YaqNJbbbZMgk:lJbbb9p9DTmbak:Ohixekcjjjj94hikabai87ebdndnaDadcetGadceGV:ZaqNJbbbZMgq:lJbbb9p9DTmbaq:Ohdxekcjjjj94hdkawad87ebabcwfhbaecufgembkkk9teiucbcbyd:K1jjbgeabcifc98GfgbBd:K1jjbdndnabZbcztgd9nmbcuhiabad9RcFFifcz4nbcuSmekaehikaik;teeeudndnaeabVciGTmbabhixekdndnadcz9pmbabhixekabhiinaiaeydbBdbaiaeydlBdlaiaeydwBdwaiaeydxBdxaeczfheaiczfhiadc9Wfgdcs0mbkkadcl6mbinaiaeydbBdbaeclfheaiclfhiadc98fgdci0mbkkdnadTmbinaiaeRbb86bbaicefhiaecefheadcufgdmbkkabk:3eedudndnabciGTmbabhixekaecFeGc:b:c:ew2hldndnadcz9pmbabhixekabhiinaialBdxaialBdwaialBdlaialBdbaiczfhiadc9Wfgdcs0mbkkadcl6mbinaialBdbaiclfhiadc98fgdci0mbkkdnadTmbinaiae86bbaicefhiadcufgdmbkkabkk81dbcjwk8Kbbbbdbbblbbbwbbbbbbbebbbdbbblbbbwbbbbc:Kwkl8WNbb'; // embed! base
var wasm_simd =
'b9H79TebbbeKl9Gbb9Gvuuuuueu9Giuuub9Geueuikqbbebeedddilve9Weeeviebeoweuec:q:6dkr;leDo9TW9T9VV95dbH9F9F939H79T9F9J9H229F9Jt9VV7bb8A9TW79O9V9Wt9F9KW9J9V9KW9wWVtW949c919M9MWVbdY9TW79O9V9Wt9F9KW9J9V9KW69U9KW949c919M9MWVblE9TW79O9V9Wt9F9KW9J9V9KW69U9KW949tWG91W9U9JWbvL9TW79O9V9Wt9F9KW9J9V9KWS9P2tWV9p9JtboK9TW79O9V9Wt9F9KW9J9V9KWS9P2tWV9r919HtbrL9TW79O9V9Wt9F9KW9J9V9KWS9P2tWVT949Wbwl79IV9RbDq:p9sqlbzik9:evu8Jjjjjbcz9Rhbcbheincbhdcbhiinabcwfadfaicjuaead4ceGglE86bbaialfhiadcefgdcw9hmbkaec:q:yjjbfai86bbaecitc:q1jjbfab8Piw83ibaecefgecjd9hmbkk:N8JlHud97euo978Jjjjjbcj;kb9Rgv8Kjjjjbc9:hodnadcefal0mbcuhoaiRbbc:Ge9hmbavaialfgrad9Rad;8qbbcj;abad9UhlaicefhodnaeTmbadTmbalc;WFbGglcjdalcjd6EhwcbhDinawaeaD9RaDawfae6Egqcsfglc9WGgkci2hxakcethmalcl4cifcd4hPabaDad2fhsakc;ab6hzcbhHincbhOaohAdndninaraA9RaP6meavcj;cbfaOak2fhCaAaPfhocbhidnazmbarao9Rc;Gb6mbcbhlinaCalfhidndndndndnaAalco4fRbbgXciGPlbedibkaipxbbbbbbbbbbbbbbbbpklbxikaiaopbblaopbbbgQclp:meaQpmbzeHdOiAlCvXoQrLgQcdp:meaQpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9ogLpxiiiiiiiiiiiiiiiip8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklbaoclfaYpQbfaKc:q:yjjbfRbbfhoxdkaiaopbbwaopbbbgQclp:meaQpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9ogLpxssssssssssssssssp8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklbaocwfaYpQbfaKc:q:yjjbfRbbfhoxekaiaopbbbpklbaoczfhokdndndndndnaXcd4ciGPlbedibkaipxbbbbbbbbbbbbbbbbpklzxikaiaopbblaopbbbgQclp:meaQpmbzeHdOiAlCvXoQrLgQcdp:meaQpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9ogLpxiiiiiiiiiiiiiiiip8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklzaoclfaYpQbfaKc:q:yjjbfRbbfhoxdkaiaopbbwaopbbbgQclp:meaQpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9ogLpxssssssssssssssssp8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklzaocwfaYpQbfaKc:q:yjjbfRbbfhoxekaiaopbbbpklzaoczfhokdndndndndnaXcl4ciGPlbedibkaipxbbbbbbbbbbbbbbbbpklaxikaiaopbblaopbbbgQclp:meaQpmbzeHdOiAlCvXoQrLgQcdp:meaQpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9ogLpxiiiiiiiiiiiiiiiip8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklaaoclfaYpQbfaKc:q:yjjbfRbbfhoxdkaiaopbbwaopbbbgQclp:meaQpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9ogLpxssssssssssssssssp8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spklaaocwfaYpQbfaKc:q:yjjbfRbbfhoxekaiaopbbbpklaaoczfhokdndndndndnaXco4Plbedibkaipxbbbbbbbbbbbbbbbbpkl8WxikaiaopbblaopbbbgQclp:meaQpmbzeHdOiAlCvXoQrLgQcdp:meaQpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9ogLpxiiiiiiiiiiiiiiiip8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgXcitc:q1jjbfpbibaXc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgXcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spkl8WaoclfaYpQbfaXc:q:yjjbfRbbfhoxdkaiaopbbwaopbbbgQclp:meaQpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9ogLpxssssssssssssssssp8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgXcitc:q1jjbfpbibaXc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgXcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spkl8WaocwfaYpQbfaXc:q:yjjbfRbbfhoxekaiaopbbbpkl8Waoczfhokalc;abfhialcjefak0meaihlarao9Rc;Fb0mbkkdnaiak9pmbaici4hlinarao9RcK6miaCaifhXdndndndndnaAaico4fRbbalcoG4ciGPlbedibkaXpxbbbbbbbbbbbbbbbbpkbbxikaXaopbblaopbbbgQclp:meaQpmbzeHdOiAlCvXoQrLgQcdp:meaQpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9ogLpxiiiiiiiiiiiiiiiip8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spkbbaoclfaYpQbfaKc:q:yjjbfRbbfhoxdkaXaopbbwaopbbbgQclp:meaQpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9ogLpxssssssssssssssssp8JgQp5b9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibaKc:q:yjjbfpbbbgYaYpmbbbbbbbbbbbbbbbbaQp5e9cjF;8;4;W;G;ab9:9cU1:NgKcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPaLaQp9spkbbaocwfaYpQbfaKc:q:yjjbfRbbfhoxekaXaopbbbpkbbaoczfhokalcdfhlaiczfgiak6mbkkaoTmeaohAaOcefgOclSmdxbkkc9:hoxlkdnakTmbavcjdfaHfhiavaHfpbdbhYcbhXinaiavcj;cbfaXfglpblbgLcep9TaLpxeeeeeeeeeeeeeeeegQp9op9Hp9rgLalakfpblbg8Acep9Ta8AaQp9op9Hp9rg8ApmbzeHdOiAlCvXoQrLgEalamfpblbg3cep9Ta3aQp9op9Hp9rg3alaxfpblbg5cep9Ta5aQp9op9Hp9rg5pmbzeHdOiAlCvXoQrLg8EpmbezHdiOAlvCXorQLgQaQpmbedibedibedibediaYp9UgYp9AdbbaiadfglaYaQaQpmlvorlvorlvorlvorp9UgYp9AdbbaladfglaYaQaQpmwDqkwDqkwDqkwDqkp9UgYp9AdbbaladfglaYaQaQpmxmPsxmPsxmPsxmPsp9UgYp9AdbbaladfglaYaEa8EpmwDKYqk8AExm35Ps8E8FgQaQpmbedibedibedibedip9UgYp9AdbbaladfglaYaQaQpmlvorlvorlvorlvorp9UgYp9AdbbaladfglaYaQaQpmwDqkwDqkwDqkwDqkp9UgYp9AdbbaladfglaYaQaQpmxmPsxmPsxmPsxmPsp9UgYp9AdbbaladfglaYaLa8ApmwKDYq8AkEx3m5P8Es8FgLa3a5pmwKDYq8AkEx3m5P8Es8Fg8ApmbezHdiOAlvCXorQLgQaQpmbedibedibedibedip9UgYp9AdbbaladfglaYaQaQpmlvorlvorlvorlvorp9UgYp9AdbbaladfglaYaQaQpmwDqkwDqkwDqkwDqkp9UgYp9AdbbaladfglaYaQaQpmxmPsxmPsxmPsxmPsp9UgYp9AdbbaladfglaYaLa8ApmwDKYqk8AExm35Ps8E8FgQaQpmbedibedibedibedip9UgYp9AdbbaladfglaYaQaQpmlvorlvorlvorlvorp9UgYp9AdbbaladfglaYaQaQpmwDqkwDqkwDqkwDqkp9UgYp9AdbbaladfglaYaQaQpmxmPsxmPsxmPsxmPsp9UgYp9AdbbaladfhiaXczfgXak6mbkkaHclfgHad6mbkasavcjdfaqad2;8qbbavavcjdfaqcufad2fad;8qbbaqaDfgDae6mbkkcbc99arao9Radcaadca0ESEhokavcj;kbf8Kjjjjbaokwbz:bjjjbk::seHu8Jjjjjbc;ae9Rgv8Kjjjjbc9:hodnaeci9UgrcHfal0mbcuhoaiRbbgwc;WeGc;Ge9hmbawcsGgwce0mbavc;abfcFecje;8kbavcUf9cu83ibavc8Wf9cu83ibavcyf9cu83ibavcaf9cu83ibavcKf9cu83ibavczf9cu83ibav9cu83iwav9cu83ibaialfc9WfhDaicefgqarfhidnaeTmbcmcsawceSEhkcbhxcbhmcbhPcbhwcbhlindnaiaD9nmbc9:hoxikdndnaqRbbgoc;Ve0mbavc;abfalaocu7gscl4fcsGcitfgzydlhrazydbhzdnaocsGgHak9pmbavawasfcsGcdtfydbaxaHEhoaHThsdndnadcd9hmbabaPcetfgHaz87ebaHclfao87ebaHcdfar87ebxekabaPcdtfgHazBdbaHcwfaoBdbaHclfarBdbkaxasfhxcdhHavawcdtfaoBdbawasfhwcehsalhOxdkdndnaHcsSmbaHc987aHamffcefhoxekaicefhoai8SbbgHcFeGhsdndnaHcu9mmbaohixekaicvfhiascFbGhscrhHdninao8SbbgOcFbGaHtasVhsaOcu9kmeaocefhoaHcrfgHc8J9hmbxdkkaocefhikasce4cbasceG9R7amfhokdndnadcd9hmbabaPcetfgHaz87ebaHclfao87ebaHcdfar87ebxekabaPcdtfgHazBdbaHcwfaoBdbaHclfarBdbkcdhHavawcdtfaoBdbcehsawcefhwalhOaohmxekdnaocpe0mbaxcefgHavawaDaocsGfRbbgocl49RcsGcdtfydbaocz6gzEhravawao9RcsGcdtfydbaHazfgAaocsGgHEhoaHThCdndnadcd9hmbabaPcetfgHax87ebaHclfao87ebaHcdfar87ebxekabaPcdtfgHaxBdbaHcwfaoBdbaHclfarBdbkcdhsavawcdtfaxBdbavawcefgwcsGcdtfarBdbcihHavc;abfalcitfgOaxBdlaOarBdbavawazfgwcsGcdtfaoBdbalcefcsGhOawaCfhwaxhzaAaCfhxxekaxcbaiRbbgOEgzaoc;:eSgHfhraOcsGhCaOcl4hAdndnaOcs0mbarcefhoxekarhoavawaA9RcsGcdtfydbhrkdndnaCmbaocefhxxekaohxavawaO9RcsGcdtfydbhokdndnaHTmbaicefhHxekaicdfhHai8SbegscFeGhzdnascu9kmbaicofhXazcFbGhzcrhidninaH8SbbgscFbGaitazVhzascu9kmeaHcefhHaicrfgic8J9hmbkaXhHxekaHcefhHkazce4cbazceG9R7amfgmhzkdndnaAcsSmbaHhsxekaHcefhsaH8SbbgicFeGhrdnaicu9kmbaHcvfhXarcFbGhrcrhidninas8SbbgHcFbGaitarVhraHcu9kmeascefhsaicrfgic8J9hmbkaXhsxekascefhskarce4cbarceG9R7amfgmhrkdndnaCcsSmbashixekascefhias8SbbgocFeGhHdnaocu9kmbascvfhXaHcFbGhHcrhodninai8SbbgscFbGaotaHVhHascu9kmeaicefhiaocrfgoc8J9hmbkaXhixekaicefhikaHce4cbaHceG9R7amfgmhokdndnadcd9hmbabaPcetfgHaz87ebaHclfao87ebaHcdfar87ebxekabaPcdtfgHazBdbaHcwfaoBdbaHclfarBdbkcdhsavawcdtfazBdbavawcefgwcsGcdtfarBdbcihHavc;abfalcitfgXazBdlaXarBdbavawaOcz6aAcsSVfgwcsGcdtfaoBdbawaCTaCcsSVfhwalcefcsGhOkaqcefhqavc;abfaOcitfgOarBdlaOaoBdbavc;abfalasfcsGcitfgraoBdlarazBdbawcsGhwalaHfcsGhlaPcifgPae6mbkkcbc99aiaDSEhokavc;aef8Kjjjjbaok:flevu8Jjjjjbcz9Rhvc9:hodnaecvfal0mbcuhoaiRbbc;:eGc;qe9hmbav9cb83iwaicefhraialfc98fhwdnaeTmbdnadcdSmbcbhDindnaraw6mbc9:skarcefhoar8SbbglcFeGhidndnalcu9mmbaohrxekarcvfhraicFbGhicrhldninao8SbbgdcFbGaltaiVhiadcu9kmeaocefhoalcrfglc8J9hmbxdkkaocefhrkabaDcdtfaic8Etc8F91aicd47avcwfaiceGcdtVgoydbfglBdbaoalBdbaDcefgDae9hmbxdkkcbhDindnaraw6mbc9:skarcefhoar8SbbglcFeGhidndnalcu9mmbaohrxekarcvfhraicFbGhicrhldninao8SbbgdcFbGaltaiVhiadcu9kmeaocefhoalcrfglc8J9hmbxdkkaocefhrkabaDcetfaic8Etc8F91aicd47avcwfaiceGcdtVgoydbfgl87ebaoalBdbaDcefgDae9hmbkkcbc99arawSEhokaok:wPliuo97eue978Jjjjjbca9Rhiaec98Ghldndnadcl9hmbdnalTmbcbhvabhdinadadpbbbgocKp:RecKp:Sep;6egraocwp:RecKp:Sep;6earp;Geaoczp:RecKp:Sep;6egwp;Gep;Kep;LegDpxbbbbbbbbbbbbbbbbp:2egqarpxbbbjbbbjbbbjbbbjgkp9op9rp;Kegrpxbb;:9cbb;:9cbb;:9cbb;:9cararp;MeaDaDp;Meawaqawakp9op9rp;Kegrarp;Mep;Kep;Kep;Jep;Negwp;Mepxbbn0bbn0bbn0bbn0gqp;KepxFbbbFbbbFbbbFbbbp9oaopxbbbFbbbFbbbFbbbFp9op9qarawp;Meaqp;Kecwp:RepxbFbbbFbbbFbbbFbbp9op9qaDawp;Meaqp;Keczp:RepxbbFbbbFbbbFbbbFbp9op9qpkbbadczfhdavclfgval6mbkkalaeSmeaipxbbbbbbbbbbbbbbbbgqpklbaiabalcdtfgdaeciGglcdtgv;8qbbdnalTmbaiaipblbgocKp:RecKp:Sep;6egraocwp:RecKp:Sep;6earp;Geaoczp:RecKp:Sep;6egwp;Gep;Kep;LegDaqp:2egqarpxbbbjbbbjbbbjbbbjgkp9op9rp;Kegrpxbb;:9cbb;:9cbb;:9cbb;:9cararp;MeaDaDp;Meawaqawakp9op9rp;Kegrarp;Mep;Kep;Kep;Jep;Negwp;Mepxbbn0bbn0bbn0bbn0gqp;KepxFbbbFbbbFbbbFbbbp9oaopxbbbFbbbFbbbFbbbFp9op9qarawp;Meaqp;Kecwp:RepxbFbbbFbbbFbbbFbbp9op9qaDawp;Meaqp;Keczp:RepxbbFbbbFbbbFbbbFbp9op9qpklbkadaiav;8qbbskdnalTmbcbhvabhdinadczfgxaxpbbbgopxbbbbbbFFbbbbbbFFgkp9oadpbbbgDaopmbediwDqkzHOAKY8AEgwczp:Reczp:Sep;6egraDaopmlvorxmPsCXQL358E8FpxFubbFubbFubbFubbp9op;6eawczp:Sep;6egwp;Gearp;Gep;Kep;Legopxbbbbbbbbbbbbbbbbp:2egqarpxbbbjbbbjbbbjbbbjgmp9op9rp;Kegrpxb;:FSb;:FSb;:FSb;:FSararp;Meaoaop;Meawaqawamp9op9rp;Kegrarp;Mep;Kep;Kep;Jep;Negwp;Mepxbbn0bbn0bbn0bbn0gqp;KepxFFbbFFbbFFbbFFbbp9oaoawp;Meaqp;Keczp:Rep9qgoarawp;Meaqp;KepxFFbbFFbbFFbbFFbbp9ogrpmwDKYqk8AExm35Ps8E8Fp9qpkbbadaDakp9oaoarpmbezHdiOAlvCXorQLp9qpkbbadcafhdavclfgval6mbkkalaeSmbaiaeciGgvcitgdfcbcaad9R;8kbaiabalcitfglad;8qbbdnavTmbaiaipblzgopxbbbbbbFFbbbbbbFFgkp9oaipblbgDaopmbediwDqkzHOAKY8AEgwczp:Reczp:Sep;6egraDaopmlvorxmPsCXQL358E8FpxFubbFubbFubbFubbp9op;6eawczp:Sep;6egwp;Gearp;Gep;Kep;Legopxbbbbbbbbbbbbbbbbp:2egqarpxbbbjbbbjbbbjbbbjgmp9op9rp;Kegrpxb;:FSb;:FSb;:FSb;:FSararp;Meaoaop;Meawaqawamp9op9rp;Kegrarp;Mep;Kep;Kep;Jep;Negwp;Mepxbbn0bbn0bbn0bbn0gqp;KepxFFbbFFbbFFbbFFbbp9oaoawp;Meaqp;Keczp:Rep9qgoarawp;Meaqp;KepxFFbbFFbbFFbbFFbbp9ogrpmwDKYqk8AExm35Ps8E8Fp9qpklzaiaDakp9oaoarpmbezHdiOAlvCXorQLp9qpklbkalaiad;8qbbkk;4wllue97euv978Jjjjjbc8W9Rhidnaec98GglTmbcbhvabhoinaiaopbbbgraoczfgwpbbbgDpmlvorxmPsCXQL358E8Fgqczp:Segkclp:RepklbaopxbbjZbbjZbbjZbbjZpx;Zl81Z;Zl81Z;Zl81Z;Zl81Zakpxibbbibbbibbbibbbp9qp;6ep;NegkaraDpmbediwDqkzHOAKY8AEgrczp:Reczp:Sep;6ep;MegDaDp;Meakarczp:Sep;6ep;Megxaxp;Meakaqczp:Reczp:Sep;6ep;Megqaqp;Mep;Kep;Kep;Lepxbbbbbbbbbbbbbbbbp:4ep;Jepxb;:FSb;:FSb;:FSb;:FSgkp;Mepxbbn0bbn0bbn0bbn0grp;KepxFFbbFFbbFFbbFFbbgmp9oaxakp;Mearp;Keczp:Rep9qgxaDakp;Mearp;Keamp9oaqakp;Mearp;Keczp:Rep9qgkpmbezHdiOAlvCXorQLgrp5baipblbpEb:T:j83ibaocwfarp5eaipblbpEe:T:j83ibawaxakpmwDKYqk8AExm35Ps8E8Fgkp5baipblbpEd:T:j83ibaocKfakp5eaipblbpEi:T:j83ibaocafhoavclfgval6mbkkdnalaeSmbaiaeciGgvcitgofcbcaao9R;8kbaiabalcitfgwao;8qbbdnavTmbaiaipblbgraipblzgDpmlvorxmPsCXQL358E8Fgqczp:Segkclp:RepklaaipxbbjZbbjZbbjZbbjZpx;Zl81Z;Zl81Z;Zl81Z;Zl81Zakpxibbbibbbibbbibbbp9qp;6ep;NegkaraDpmbediwDqkzHOAKY8AEgrczp:Reczp:Sep;6ep;MegDaDp;Meakarczp:Sep;6ep;Megxaxp;Meakaqczp:Reczp:Sep;6ep;Megqaqp;Mep;Kep;Kep;Lepxbbbbbbbbbbbbbbbbp:4ep;Jepxb;:FSb;:FSb;:FSb;:FSgkp;Mepxbbn0bbn0bbn0bbn0grp;KepxFFbbFFbbFFbbFFbbgmp9oaxakp;Mearp;Keczp:Rep9qgxaDakp;Mearp;Keamp9oaqakp;Mearp;Keczp:Rep9qgkpmbezHdiOAlvCXorQLgrp5baipblapEb:T:j83ibaiarp5eaipblapEe:T:j83iwaiaxakpmwDKYqk8AExm35Ps8E8Fgkp5baipblapEd:T:j83izaiakp5eaipblapEi:T:j83iKkawaiao;8qbbkk:Pddiue978Jjjjjbc;ab9Rhidnadcd4ae2glc98GgvTmbcbheabhdinadadpbbbgocwp:Recwp:Sep;6eaocep:SepxbbjFbbjFbbjFbbjFp9opxbbjZbbjZbbjZbbjZp:Uep;Mepkbbadczfhdaeclfgeav6mbkkdnavalSmbaialciGgecdtgdVcbc;abad9R;8kbaiabavcdtfgvad;8qbbdnaeTmbaiaipblbgocwp:Recwp:Sep;6eaocep:SepxbbjFbbjFbbjFbbjFp9opxbbjZbbjZbbjZbbjZp:Uep;Mepklbkavaiad;8qbbkk9teiucbcbydj1jjbgeabcifc98GfgbBdj1jjbdndnabZbcztgd9nmbcuhiabad9RcFFifcz4nbcuSmekaehikaikkkebcjwklz:Dbb'; // embed! simd
'b9H79TebbbeKl9Gbb9Gvuuuuueu9Giuuub9Geueuixkbbebeeddddilve9Weeeviebeoweuec:q:6dkr;Neqo9TW9T9VV95dbH9F9F939H79T9F9J9H229F9Jt9VV7bb8A9TW79O9V9Wt9F9KW9J9V9KW9wWVtW949c919M9MWVbdY9TW79O9V9Wt9F9KW9J9V9KW69U9KW949c919M9MWVblE9TW79O9V9Wt9F9KW9J9V9KW69U9KW949tWG91W9U9JWbvL9TW79O9V9Wt9F9KW9J9V9KWS9P2tWV9p9JtboK9TW79O9V9Wt9F9KW9J9V9KWS9P2tWV9r919HtbrL9TW79O9V9Wt9F9KW9J9V9KWS9P2tWVT949WbwY9TW79O9V9Wt9F9KW9J9V9KWS9P2tWVJ9V29VVbDl79IV9Rbqq:Ctklbzik9:evu8Jjjjjbcz9Rhbcbheincbhdcbhiinabcwfadfaicjuaead4ceGglE86bbaialfhiadcefgdcw9hmbkaec:q:yjjbfai86bbaecitc:q1jjbfab8Piw83ibaecefgecjd9hmbkk:183lYud97dur978Jjjjjbcj;kb9Rgv8Kjjjjbc9:hodnalTmbcuhoaiRbbgrc;WeGc:Ge9hmbarcsGgwce0mbc9:hoalcufadcd4cbawEgDadfgrcKcaawEgqaraq0Egk6mbaicefhxavaialfgmar9Rgoad;8qbbcj;abad9Uc;WFbGcjdadca0EhPdndndnadTmbaoadfhscbhzinaeaz9nmdamax9RaD6miabazad2fhHaxaDfhOaPaeaz9RazaPfae6EgAcsfgocl4cifcd4hCavcj;cbfaoc9WGgXcetfhQavcj;cbfaXci2fhLavcj;cbfaXfhKcbhYaoc;ab6h8AincbhodnawTmbaxaYcd4fRbbhokaocFeGhEcbh3avcj;cbfh5indndndndnaEa3cet4ciGgoc9:fPdebdkamaO9RaX6mwavcj;cbfa3aX2faOaX;8qbbaOaAfhOxdkavcj;cbfa3aX2fcbaX;8kbxekamaO9RaC6moaoclVcbawEhraOaCfhocbhidna8Ambamao9Rc;Gb6mbcbhlina5alfhidndndndndndnaOalco4fRbbgqciGarfPDbedibledibkaipxbbbbbbbbbbbbbbbbpklbxlkaiaopbblaopbbbg8Eclp:mea8EpmbzeHdOiAlCvXoQrLg8Ecdp:mea8EpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9og8Fpxiiiiiiiiiiiiiiiip8Jg8Ep5b9cjF;8;4;W;G;ab9:9cU1:Ngacitc:q1jjbfpbibaac:q:yjjbfRbbgapsa8Ep5e9cjF;8;4;W;G;ab9:9cU1:Nghcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPa8Fa8Ep9spklbaaaoclffahc:q:yjjbfRbbfhoxikaiaopbbwaopbbbg8Eclp:mea8EpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9og8Fpxssssssssssssssssp8Jg8Ep5b9cjF;8;4;W;G;ab9:9cU1:Ngacitc:q1jjbfpbibaac:q:yjjbfRbbgapsa8Ep5e9cjF;8;4;W;G;ab9:9cU1:Nghcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPa8Fa8Ep9spklbaaaocwffahc:q:yjjbfRbbfhoxdkaiaopbbbpklbaoczfhoxekaiaopbbdaoRbbgacitc:q1jjbfpbibaac:q:yjjbfRbbgapsaoRbeghcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPpklbaaaocdffahc:q:yjjbfRbbfhokdndndndndndnaqcd4ciGarfPDbedibledibkaiczfpxbbbbbbbbbbbbbbbbpklbxlkaiczfaopbblaopbbbg8Eclp:mea8EpmbzeHdOiAlCvXoQrLg8Ecdp:mea8EpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9og8Fpxiiiiiiiiiiiiiiiip8Jg8Ep5b9cjF;8;4;W;G;ab9:9cU1:Ngacitc:q1jjbfpbibaac:q:yjjbfRbbgapsa8Ep5e9cjF;8;4;W;G;ab9:9cU1:Nghcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPa8Fa8Ep9spklbaaaoclffahc:q:yjjbfRbbfhoxikaiczfaopbbwaopbbbg8Eclp:mea8EpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9og8Fpxssssssssssssssssp8Jg8Ep5b9cjF;8;4;W;G;ab9:9cU1:Ngacitc:q1jjbfpbibaac:q:yjjbfRbbgapsa8Ep5e9cjF;8;4;W;G;ab9:9cU1:Nghcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPa8Fa8Ep9spklbaaaocwffahc:q:yjjbfRbbfhoxdkaiczfaopbbbpklbaoczfhoxekaiczfaopbbdaoRbbgacitc:q1jjbfpbibaac:q:yjjbfRbbgapsaoRbeghcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPpklbaaaocdffahc:q:yjjbfRbbfhokdndndndndndnaqcl4ciGarfPDbedibledibkaicafpxbbbbbbbbbbbbbbbbpklbxlkaicafaopbblaopbbbg8Eclp:mea8EpmbzeHdOiAlCvXoQrLg8Ecdp:mea8EpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9og8Fpxiiiiiiiiiiiiiiiip8Jg8Ep5b9cjF;8;4;W;G;ab9:9cU1:Ngacitc:q1jjbfpbibaac:q:yjjbfRbbgapsa8Ep5e9cjF;8;4;W;G;ab9:9cU1:Nghcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPa8Fa8Ep9spklbaaaoclffahc:q:yjjbfRbbfhoxikaicafaopbbwaopbbbg8Eclp:mea8EpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9og8Fpxssssssssssssssssp8Jg8Ep5b9cjF;8;4;W;G;ab9:9cU1:Ngacitc:q1jjbfpbibaac:q:yjjbfRbbgapsa8Ep5e9cjF;8;4;W;G;ab9:9cU1:Nghcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPa8Fa8Ep9spklbaaaocwffahc:q:yjjbfRbbfhoxdkaicafaopbbbpklbaoczfhoxekaicafaopbbdaoRbbgacitc:q1jjbfpbibaac:q:yjjbfRbbgapsaoRbeghcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPpklbaaaocdffahc:q:yjjbfRbbfhokdndndndndndnaqco4arfPDbedibledibkaic8Wfpxbbbbbbbbbbbbbbbbpklbxlkaic8Wfaopbblaopbbbg8Eclp:mea8EpmbzeHdOiAlCvXoQrLg8Ecdp:mea8EpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9og8Fpxiiiiiiiiiiiiiiiip8Jg8Ep5b9cjF;8;4;W;G;ab9:9cU1:Ngicitc:q1jjbfpbibaic:q:yjjbfRbbgipsa8Ep5e9cjF;8;4;W;G;ab9:9cU1:Ngqcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPa8Fa8Ep9spklbaiaoclffaqc:q:yjjbfRbbfhoxikaic8Wfaopbbwaopbbbg8Eclp:mea8EpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9og8Fpxssssssssssssssssp8Jg8Ep5b9cjF;8;4;W;G;ab9:9cU1:Ngicitc:q1jjbfpbibaic:q:yjjbfRbbgipsa8Ep5e9cjF;8;4;W;G;ab9:9cU1:Ngqcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPa8Fa8Ep9spklbaiaocwffaqc:q:yjjbfRbbfhoxdkaic8Wfaopbbbpklbaoczfhoxekaic8WfaopbbdaoRbbgicitc:q1jjbfpbibaic:q:yjjbfRbbgipsaoRbegqcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPpklbaiaocdffaqc:q:yjjbfRbbfhokalc;abfhialcjefaX0meaihlamao9Rc;Fb0mbkkdnaiaX9pmbaici4hlinamao9RcK6mwa5aifhqdndndndndndnaOaico4fRbbalcoG4ciGarfPDbedibledibkaqpxbbbbbbbbbbbbbbbbpkbbxlkaqaopbblaopbbbg8Eclp:mea8EpmbzeHdOiAlCvXoQrLg8Ecdp:mea8EpmbzeHdOiAlCvXoQrLpxiiiiiiiiiiiiiiiip9og8Fpxiiiiiiiiiiiiiiiip8Jg8Ep5b9cjF;8;4;W;G;ab9:9cU1:Ngacitc:q1jjbfpbibaac:q:yjjbfRbbgapsa8Ep5e9cjF;8;4;W;G;ab9:9cU1:Nghcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPa8Fa8Ep9spkbbaaaoclffahc:q:yjjbfRbbfhoxikaqaopbbwaopbbbg8Eclp:mea8EpmbzeHdOiAlCvXoQrLpxssssssssssssssssp9og8Fpxssssssssssssssssp8Jg8Ep5b9cjF;8;4;W;G;ab9:9cU1:Ngacitc:q1jjbfpbibaac:q:yjjbfRbbgapsa8Ep5e9cjF;8;4;W;G;ab9:9cU1:Nghcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPa8Fa8Ep9spkbbaaaocwffahc:q:yjjbfRbbfhoxdkaqaopbbbpkbbaoczfhoxekaqaopbbdaoRbbgacitc:q1jjbfpbibaac:q:yjjbfRbbgapsaoRbeghcitc:q1jjbfpbibp9UpmbedilvorzHOACXQLpPpkbbaaaocdffahc:q:yjjbfRbbfhokalcdfhlaiczfgiaX6mbkkaohOaoTmoka5aXfh5a3cefg3cl9hmbkdndndndnawTmbasaYcd4fRbbglciGPlbedwbkaXTmdavcjdfaYfhlavaYfpbdbhgcbhoinalavcj;cbfaofpblbg8JaKaofpblbg8KpmbzeHdOiAlCvXoQrLg8LaQaofpblbg8MaLaofpblbg8NpmbzeHdOiAlCvXoQrLgypmbezHdiOAlvCXorQLg8Ecep9Ta8Epxeeeeeeeeeeeeeeeeg8Fp9op9Hp9rg8Eagp9Uggp9Abbbaladfglaga8Ea8Epmlvorlvorlvorlvorp9Uggp9Abbbaladfglaga8Ea8EpmwDqkwDqkwDqkwDqkp9Uggp9Abbbaladfglaga8Ea8EpmxmPsxmPsxmPsxmPsp9Uggp9Abbbaladfglaga8LaypmwDKYqk8AExm35Ps8E8Fg8Ecep9Ta8Ea8Fp9op9Hp9rg8Ep9Uggp9Abbbaladfglaga8Ea8Epmlvorlvorlvorlvorp9Uggp9Abbbaladfglaga8Ea8EpmwDqkwDqkwDqkwDqkp9Uggp9Abbbaladfglaga8Ea8EpmxmPsxmPsxmPsxmPsp9Uggp9Abbbaladfglaga8Ja8KpmwKDYq8AkEx3m5P8Es8Fg8Ja8Ma8NpmwKDYq8AkEx3m5P8Es8Fg8KpmbezHdiOAlvCXorQLg8Ecep9Ta8Ea8Fp9op9Hp9rg8Ep9Uggp9Abbbaladfglaga8Ea8Epmlvorlvorlvorlvorp9Uggp9Abbbaladfglaga8Ea8EpmwDqkwDqkwDqkwDqkp9Uggp9Abbbaladfglaga8Ea8EpmxmPsxmPsxmPsxmPsp9Uggp9Abbbaladfglaga8Ja8KpmwDKYqk8AExm35Ps8E8Fg8Ecep9Ta8Ea8Fp9op9Hp9rg8Ep9Ug8Fp9Abbbaladfgla8Fa8Ea8Epmlvorlvorlvorlvorp9Ug8Fp9Abbbaladfgla8Fa8Ea8EpmwDqkwDqkwDqkwDqkp9Ug8Fp9Abbbaladfgla8Fa8Ea8EpmxmPsxmPsxmPsxmPsp9Uggp9AbbbaladfhlaoczfgoaX6mbxikkaXTmeavcjdfaYfhlavaYfpbdbhgcbhoinalavcj;cbfaofpblbg8JaKaofpblbg8KpmbzeHdOiAlCvXoQrLg8LaQaofpblbg8MaLaofpblbg8NpmbzeHdOiAlCvXoQrLgypmbezHdiOAlvCXorQLg8Ecep:nea8Epxebebebebebebebebg8Fp9op:bep9rg8Eagp:oeggp9Abbbaladfglaga8Ea8Epmlvorlvorlvorlvorp:oeggp9Abbbaladfglaga8Ea8EpmwDqkwDqkwDqkwDqkp:oeggp9Abbbaladfglaga8Ea8EpmxmPsxmPsxmPsxmPsp:oeggp9Abbbaladfglaga8LaypmwDKYqk8AExm35Ps8E8Fg8Ecep:nea8Ea8Fp9op:bep9rg8Ep:oeggp9Abbbaladfglaga8Ea8Epmlvorlvorlvorlvorp:oeggp9Abbbaladfglaga8Ea8EpmwDqkwDqkwDqkwDqkp:oeggp9Abbbaladfglaga8Ea8EpmxmPsxmPsxmPsxmPsp:oeggp9Abbbaladfglaga8Ja8KpmwKDYq8AkEx3m5P8Es8Fg8Ja8Ma8NpmwKDYq8AkEx3m5P8Es8Fg8KpmbezHdiOAlvCXorQLg8Ecep:nea8Ea8Fp9op:bep9rg8Ep:oeggp9Abbbaladfglaga8Ea8Epmlvorlvorlvorlvorp:oeggp9Abbbaladfglaga8Ea8EpmwDqkwDqkwDqkwDqkp:oeggp9Abbbaladfglaga8Ea8EpmxmPsxmPsxmPsxmPsp:oeggp9Abbbaladfglaga8Ja8KpmwDKYqk8AExm35Ps8E8Fg8Ecep:nea8Ea8Fp9op:bep9rg8Ep:oeg8Fp9Abbbaladfgla8Fa8Ea8Epmlvorlvorlvorlvorp:oeg8Fp9Abbbaladfgla8Fa8Ea8EpmwDqkwDqkwDqkwDqkp:oeg8Fp9Abbbaladfgla8Fa8Ea8EpmxmPsxmPsxmPsxmPsp:oeggp9AbbbaladfhlaoczfgoaX6mbxdkkaXTmbcbhocbalcl4gl9Rc8FGhiavcjdfaYfhravaYfpbdbh8Finaravcj;cbfaofpblbggaKaofpblbg8JpmbzeHdOiAlCvXoQrLg8KaQaofpblbg8LaLaofpblbg8MpmbzeHdOiAlCvXoQrLg8NpmbezHdiOAlvCXorQLg8Eaip:Rea8Ealp:Sep9qg8Ea8Fp9rg8Fp9Abbbaradfgra8Fa8Ea8Epmlvorlvorlvorlvorp9rg8Fp9Abbbaradfgra8Fa8Ea8EpmwDqkwDqkwDqkwDqkp9rg8Fp9Abbbaradfgra8Fa8Ea8EpmxmPsxmPsxmPsxmPsp9rg8Fp9Abbbaradfgra8Fa8Ka8NpmwDKYqk8AExm35Ps8E8Fg8Eaip:Rea8Ealp:Sep9qg8Ep9rg8Fp9Abbbaradfgra8Fa8Ea8Epmlvorlvorlvorlvorp9rg8Fp9Abbbaradfgra8Fa8Ea8EpmwDqkwDqkwDqkwDqkp9rg8Fp9Abbbaradfgra8Fa8Ea8EpmxmPsxmPsxmPsxmPsp9rg8Fp9Abbbaradfgra8Faga8JpmwKDYq8AkEx3m5P8Es8Fgga8La8MpmwKDYq8AkEx3m5P8Es8Fg8JpmbezHdiOAlvCXorQLg8Eaip:Rea8Ealp:Sep9qg8Ep9rg8Fp9Abbbaradfgra8Fa8Ea8Epmlvorlvorlvorlvorp9rg8Fp9Abbbaradfgra8Fa8Ea8EpmwDqkwDqkwDqkwDqkp9rg8Fp9Abbbaradfgra8Fa8Ea8EpmxmPsxmPsxmPsxmPsp9rg8Fp9Abbbaradfgra8Faga8JpmwDKYqk8AExm35Ps8E8Fg8Eaip:Rea8Ealp:Sep9qg8Ep9rg8Fp9Abbbaradfgra8Fa8Ea8Epmlvorlvorlvorlvorp9rg8Fp9Abbbaradfgra8Fa8Ea8EpmwDqkwDqkwDqkwDqkp9rg8Fp9Abbbaradfgra8Fa8Ea8EpmxmPsxmPsxmPsxmPsp9rg8Fp9AbbbaradfhraoczfgoaX6mbkkaYclfgYad6mbkaHavcjdfaAad2;8qbbavavcjdfaAcufad2fad;8qbbaAazfhzc9:hoaOhxaOmbxlkkaeTmbaDalfhrcbhocuhlinaralaD9RglfaD6mdaPaeao9RaoaPfae6Eaofgoae6mbkaial9Rhxkcbc99amax9RakSEhoxekc9:hokavcj;kbf8Kjjjjbaokwbz:bjjjbk:TseHu8Jjjjjbc;ae9Rgv8Kjjjjbc9:hodnaeci9UgrcHfal0mbcuhoaiRbbgwc;WeGc;Ge9hmbawcsGgDce0mbavc;abfcFecje;8kbavcUf9cu83ibavc8Wf9cu83ibavcyf9cu83ibavcaf9cu83ibavcKf9cu83ibavczf9cu83ibav9cu83iwav9cu83ibaialfc9WfhqaicefgwarfhldnaeTmbcmcsaDceSEhkcbhxcbhmcbhrcbhicbhoindnalaq9nmbc9:hoxikdndnawRbbgDc;Ve0mbavc;abfaoaDcu7gPcl4fcsGcitfgsydlhzasydbhHdndnaDcsGgsak9pmbavaiaPfcsGcdtfydbaxasEhDaxasTgOfhxxekdndnascsSmbcehOasc987asamffcefhDxekalcefhDal8SbbgscFeGhPdndnascu9mmbaDhlxekalcvfhlaPcFbGhPcrhsdninaD8SbbgOcFbGastaPVhPaOcu9kmeaDcefhDascrfgsc8J9hmbxdkkaDcefhlkcehOaPce4cbaPceG9R7amfhDkaDhmkavc;abfaocitfgsaDBdbasazBdlavaicdtfaDBdbavc;abfaocefcsGcitfgsaHBdbasaDBdlaocdfhoaOaifhidnadcd9hmbabarcetfgsaH87ebasclfaD87ebascdfaz87ebxdkabarcdtfgsaHBdbascwfaDBdbasclfazBdbxekdnaDcpe0mbaxcefgOavaiaqaDcsGfRbbgscl49RcsGcdtfydbascz6gPEhDavaias9RcsGcdtfydbaOaPfgzascsGgOEhsaOThOdndnadcd9hmbabarcetfgHax87ebaHclfas87ebaHcdfaD87ebxekabarcdtfgHaxBdbaHcwfasBdbaHclfaDBdbkavaicdtfaxBdbavc;abfaocitfgHaDBdbaHaxBdlavaicefgicsGcdtfaDBdbavc;abfaocefcsGcitfgHasBdbaHaDBdlavaiaPfgicsGcdtfasBdbavc;abfaocdfcsGcitfgDaxBdbaDasBdlaocifhoaiaOfhiazaOfhxxekaxcbalRbbgHEgAaDc;:eSgDfhzaHcsGhCaHcl4hXdndnaHcs0mbazcefhOxekazhOavaiaX9RcsGcdtfydbhzkdndnaCmbaOcefhxxekaOhxavaiaH9RcsGcdtfydbhOkdndnaDTmbalcefhDxekalcdfhDal8SbegPcFeGhsdnaPcu9kmbalcofhAascFbGhscrhldninaD8SbbgPcFbGaltasVhsaPcu9kmeaDcefhDalcrfglc8J9hmbkaAhDxekaDcefhDkasce4cbasceG9R7amfgmhAkdndnaXcsSmbaDhsxekaDcefhsaD8SbbglcFeGhPdnalcu9kmbaDcvfhzaPcFbGhPcrhldninas8SbbgDcFbGaltaPVhPaDcu9kmeascefhsalcrfglc8J9hmbkazhsxekascefhskaPce4cbaPceG9R7amfgmhzkdndnaCcsSmbashlxekascefhlas8SbbgDcFeGhPdnaDcu9kmbascvfhOaPcFbGhPcrhDdninal8SbbgscFbGaDtaPVhPascu9kmealcefhlaDcrfgDc8J9hmbkaOhlxekalcefhlkaPce4cbaPceG9R7amfgmhOkdndnadcd9hmbabarcetfgDaA87ebaDclfaO87ebaDcdfaz87ebxekabarcdtfgDaABdbaDcwfaOBdbaDclfazBdbkavc;abfaocitfgDazBdbaDaABdlavaicdtfaABdbavc;abfaocefcsGcitfgDaOBdbaDazBdlavaicefgicsGcdtfazBdbavc;abfaocdfcsGcitfgDaABdbaDaOBdlavaiaHcz6aXcsSVfgicsGcdtfaOBdbaiaCTaCcsSVfhiaocifhokawcefhwaocsGhoaicsGhiarcifgrae6mbkkcbc99alaqSEhokavc;aef8Kjjjjbaok:clevu8Jjjjjbcz9Rhvdnaecvfal9nmbc9:skdnaiRbbc;:eGc;qeSmbcuskav9cb83iwaicefhoaialfc98fhrdnaeTmbdnadcdSmbcbhwindnaoar6mbc9:skaocefhlao8SbbgicFeGhddndnaicu9mmbalhoxekaocvfhoadcFbGhdcrhidninal8SbbgDcFbGaitadVhdaDcu9kmealcefhlaicrfgic8J9hmbxdkkalcefhokabawcdtfadc8Etc8F91adcd47avcwfadceGcdtVglydbfgiBdbalaiBdbawcefgwae9hmbxdkkcbhwindnaoar6mbc9:skaocefhlao8SbbgicFeGhddndnaicu9mmbalhoxekaocvfhoadcFbGhdcrhidninal8SbbgDcFbGaitadVhdaDcu9kmealcefhlaicrfgic8J9hmbxdkkalcefhokabawcetfadc8Etc8F91adcd47avcwfadceGcdtVglydbfgi87ebalaiBdbawcefgwae9hmbkkcbc99aoarSEk:2Pliur97eue978Jjjjjbc8W9Rhiaec98Ghldndnadcl9hmbdnalTmbcbhvabhdinadadpbbbgocKp:RecKp:Sep;6egraocwp:RecKp:Sep;6earp;Geaoczp:RecKp:Sep;6egwp;Gep;Kep;LegDpxbbbbbbbbbbbbbbbbp:2egqarpxbbbjbbbjbbbjbbbjgkp9op9rp;Kegrpxbb;:9cbb;:9cbb;:9cbb;:9cararp;MeaDaDp;Meawaqawakp9op9rp;Kegrarp;Mep;Kep;Kep;Jep;Negwp;Mepxbbn0bbn0bbn0bbn0gqp;KepxFbbbFbbbFbbbFbbbp9oaopxbbbFbbbFbbbFbbbFp9op9qarawp;Meaqp;Kecwp:RepxbFbbbFbbbFbbbFbbp9op9qaDawp;Meaqp;Keczp:RepxbbFbbbFbbbFbbbFbp9op9qpkbbadczfhdavclfgval6mbkkalaeSmeaipxbbbbbbbbbbbbbbbbgqpklbaiabalcdtfgdaeciGglcdtgv;8qbbdnalTmbaiaipblbgocKp:RecKp:Sep;6egraocwp:RecKp:Sep;6earp;Geaoczp:RecKp:Sep;6egwp;Gep;Kep;LegDaqp:2egqarpxbbbjbbbjbbbjbbbjgkp9op9rp;Kegrpxbb;:9cbb;:9cbb;:9cbb;:9cararp;MeaDaDp;Meawaqawakp9op9rp;Kegrarp;Mep;Kep;Kep;Jep;Negwp;Mepxbbn0bbn0bbn0bbn0gqp;KepxFbbbFbbbFbbbFbbbp9oaopxbbbFbbbFbbbFbbbFp9op9qarawp;Meaqp;Kecwp:RepxbFbbbFbbbFbbbFbbp9op9qaDawp;Meaqp;Keczp:RepxbbFbbbFbbbFbbbFbp9op9qpklbkadaiav;8qbbskaipxFubbFubbFubbFubbgxpklbdnalTmbcbhvabhdinadczfgmampbbbgopxbbbbbbFFbbbbbbFFgkp9oadpbbbgDaopmbediwDqkzHOAKY8AEgwczp:Reczp:Sep;6egraipblbaDaopmlvorxmPsCXQL358E8Fp9op;6eawczp:Sep;6egwp;Gearp;Gep;Kep;Legopxbbbbbbbbbbbbbbbbp:2egqarpxbbbjbbbjbbbjbbbjgPp9op9rp;Kegrpxb;:FSb;:FSb;:FSb;:FSararp;Meaoaop;MeawaqawaPp9op9rp;Kegrarp;Mep;Kep;Kep;Jep;Negwp;Mepxbbn0bbn0bbn0bbn0gqp;KepxFFbbFFbbFFbbFFbbp9oaoawp;Meaqp;Keczp:Rep9qgoarawp;Meaqp;KepxFFbbFFbbFFbbFFbbp9ogrpmwDKYqk8AExm35Ps8E8Fp9qpkbbadaDakp9oaoarpmbezHdiOAlvCXorQLp9qpkbbadcafhdavclfgval6mbkkalaeSmbaiczfpxbbbbbbbbbbbbbbbbgopklbaiaopklbaiabalcitfgdaeciGglcitgv;8qbbaiaxpkladnalTmbaiaipblzgopxbbbbbbFFbbbbbbFFgkp9oaipblbgDaopmbediwDqkzHOAKY8AEgwczp:Reczp:Sep;6egraipblaaDaopmlvorxmPsCXQL358E8Fp9op;6eawczp:Sep;6egwp;Gearp;Gep;Kep;Legopxbbbbbbbbbbbbbbbbp:2egqarpxbbbjbbbjbbbjbbbjgPp9op9rp;Kegrpxb;:FSb;:FSb;:FSb;:FSararp;Meaoaop;MeawaqawaPp9op9rp;Kegrarp;Mep;Kep;Kep;Jep;Negwp;Mepxbbn0bbn0bbn0bbn0gqp;KepxFFbbFFbbFFbbFFbbp9oaoawp;Meaqp;Keczp:Rep9qgoarawp;Meaqp;KepxFFbbFFbbFFbbFFbbp9ogrpmwDKYqk8AExm35Ps8E8Fp9qpklzaiaDakp9oaoarpmbezHdiOAlvCXorQLp9qpklbkadaiav;8qbbkk:Iwllue97euo978Jjjjjbca9Rhidnaec98GglTmbcbhvabhoinaocKfpx:ji:1S:ji:1S:ji:1S:ji:1SaopbbbgraoczfgwpbbbgDpmlvorxmPsCXQL358E8Fgqczp:Segkpxibbbibbbibbbibbbp9qgxp;6ep;Negmaxaxp:1ep;7egxaxp;KearaDpmbediwDqkzHOAKY8AEgxczp:Reczp:Sep;6egrarp;Meaxczp:Sep;6egDaDp;Meaqczp:Reczp:Sep;6egqaqp;Mep;Kep;Kep;Lepxbbbbbbbbbbbbbbbbp:4ep;Jep;Mepxbbn0bbn0bbn0bbn0gxp;KepxFFbbFFbbFFbbFFbbgPp9oamaDp;Meaxp;Keczp:Rep9qgDamarp;Meaxp;KeaPp9oamaqp;Meaxp;Keczp:Rep9qgxpmwDKYqk8AExm35Ps8E8Fgrp5eakclp:RegmpEi:T:j83ibawarp5bampEd:T:j83ibaocwfaDaxpmbezHdiOAlvCXorQLgxp5eampEe:T:j83ibaoaxp5bampEb:T:j83ibaocafhoavclfgval6mbkkdnalaeSmbaiczfpxbbbbbbbbbbbbbbbbgmpklbaiampklbaiabalcitfgoaeciGgvcitgw;8qbbdnavTmbaipx:ji:1S:ji:1S:ji:1S:ji:1SaipblbgraipblzgDpmlvorxmPsCXQL358E8Fgqczp:Segkpxibbbibbbibbbibbbp9qgxp;6ep;Negmaxaxp:1ep;7egxaxp;KearaDpmbediwDqkzHOAKY8AEgxczp:Reczp:Sep;6egrarp;Meaxczp:Sep;6egDaDp;Meaqczp:Reczp:Sep;6egqaqp;Mep;Kep;Kep;Lepxbbbbbbbbbbbbbbbbp:4ep;Jep;Mepxbbn0bbn0bbn0bbn0gxp;KepxFFbbFFbbFFbbFFbbgPp9oamaDp;Meaxp;Keczp:Rep9qgDamarp;Meaxp;KeaPp9oamaqp;Meaxp;Keczp:Rep9qgxpmwDKYqk8AExm35Ps8E8Fgrp5eakclp:RegmpEi:T:j83iKaiarp5bampEd:T:j83izaiaDaxpmbezHdiOAlvCXorQLgxp5eampEe:T:j83iwaiaxp5bampEb:T:j83ibkaoaiaw;8qbbkk;uddiue978Jjjjjbc;ab9Rhidnadcd4ae2glc98GgvTmbcbheabhdinadadpbbbgocwp:Recwp:Sep;6eaocep:SepxbbjFbbjFbbjFbbjFp9opxbbjZbbjZbbjZbbjZp:Uep;Mepkbbadczfhdaeclfgeav6mbkkdnavalSmbaic8WfpxbbbbbbbbbbbbbbbbgopklbaicafaopklbaiczfaopklbaiaopklbaiabavcdtfgdalciGgecdtgv;8qbbdnaeTmbaiaipblbgocwp:Recwp:Sep;6eaocep:SepxbbjFbbjFbbjFbbjFp9opxbbjZbbjZbbjZbbjZp:Uep;Mepklbkadaiav;8qbbkk:CPvdue97euw97eu8Jjjjjbc8W9Rhiaec98Ghldndnadcl9hmbaipxbbbbbbbbbbbbbbbbgvpklbdnalTmbcbhoabhdinadpbbbhradpxbbuJbbuJbbuJbbuJaipblbarcKp:Tep9qgwcep:Seawp9qgDcdp:SeaDp9qgDclp:SeaDp9qgqp;6ep;NegDarcwp:RecKp:SegkarpxFbbbFbbbFbbbFbbbgxp9ogmp:Uep;6ep;Mepxbbn0bbn0bbn0bbn0gPp;Kecwp:RepxbFbbbFbbbFbbbFbbp9oaDamakp:Xearczp:RecKp:Segrp:Uep;6ep;MeaPp;Keaxp9op9qaDamakarp:Uep:Xep;6ep;MeaPp;Keczp:RepxbbFbbbFbbbFbbbFbp9op9qaDaqawcep:Rep9oawpxebbbebbbebbbebbbp9op9qp;6ep;MeaPp;KecKp:Rep9qpkbbadczfhdaoclfgoal6mbkkalaeSmeaiavpklaaicafabalcdtfgdaeciGglcdtgo;8qbbaiavpklbdnalTmbaipblahraipxbbuJbbuJbbuJbbuJaipblbarcKp:Tep9qgwcep:Seawp9qgDcdp:SeaDp9qgDclp:SeaDp9qgqp;6ep;NegDarcwp:RecKp:SegkarpxFbbbFbbbFbbbFbbbgxp9ogmp:Uep;6ep;Mepxbbn0bbn0bbn0bbn0gPp;Kecwp:RepxbFbbbFbbbFbbbFbbp9oaDamakp:Xearczp:RecKp:Segrp:Uep;6ep;MeaPp;Keaxp9op9qaDamakarp:Uep:Xep;6ep;MeaPp;Keczp:RepxbbFbbbFbbbFbbbFbp9op9qaDaqawcep:Rep9oawpxebbbebbbebbbebbbp9op9qp;6ep;MeaPp;KecKp:Rep9qpklakadaicafao;8qbbskaipxbbbbbbbbbbbbbbbbgvpklbdnalTmbcbhoabhdinadczfgspxbFu9hbFu9hbFu9hbFu9hadpbbbgDaspbbbgPpmlvorxmPsCXQL358E8Fgmczp:Teaipblbp9qgrcep:Searp9qgwcdp:Seawp9qgwclp:Seawp9qgwcwp:Seawp9qgqp;6ep;NegwaDaPpmbediwDqkzHOAKY8AEgDpxFFbbFFbbFFbbFFbbgPp9ogkaDczp:Segxp:Ueamczp:Reczp:Segmp:Xep;6ep;Mepxbbn0bbn0bbn0bbn0gDp;KeaPp9oawakaxamp:Uep:Xep;6ep;MeaDp;Keczp:Rep9qgxawaqarcep:Rep9oarpxebbbebbbebbbebbbp9op9qp;6ep;MeaDp;Keczp:Reawamakp:Uep;6ep;MeaDp;KeaPp9op9qgrpmwDKYqk8AExm35Ps8E8FpkbbadaxarpmbezHdiOAlvCXorQLpkbbadcafhdaoclfgoal6mbkkalaeSmbaiczfpxbbbbbbbbbbbbbbbbgrpklbaiarpklbaiabalcitfgdaeciGglcitgo;8qbbaiavpkladnalTmbaipxbFu9hbFu9hbFu9hbFu9haipblbgDaipblzgPpmlvorxmPsCXQL358E8Fgmczp:Teaipblap9qgrcep:Searp9qgwcdp:Seawp9qgwclp:Seawp9qgwcwp:Seawp9qgqp;6ep;NegwaDaPpmbediwDqkzHOAKY8AEgDpxFFbbFFbbFFbbFFbbgPp9ogkaDczp:Segxp:Ueamczp:Reczp:Segmp:Xep;6ep;Mepxbbn0bbn0bbn0bbn0gDp;KeaPp9oawakaxamp:Uep:Xep;6ep;MeaDp;Keczp:Rep9qgxawaqarcep:Rep9oarpxebbbebbbebbbebbbp9op9qp;6ep;MeaDp;Keczp:Reawamakp:Uep;6ep;MeaDp;KeaPp9op9qgrpmwDKYqk8AExm35Ps8E8FpklzaiaxarpmbezHdiOAlvCXorQLpklbkadaiao;8qbbkk9teiucbcbydj1jjbgeabcifc98GfgbBdj1jjbdndnabZbcztgd9nmbcuhiabad9RcFFifcz4nbcuSmekaehikaikkkebcjwklz:Dbb'; // embed! simd

@@ -71,2 +71,3 @@ var detector = new Uint8Array([

EXPONENTIAL: 'meshopt_decodeFilterExp',
COLOR: 'meshopt_decodeFilterColor',
};

@@ -73,0 +74,0 @@

@@ -487,2 +487,8 @@ import {

if ( ( value === true ) !== this.worldUnits ) {
this.needsUpdate = true;
}
if ( value === true ) {

@@ -489,0 +495,0 @@

@@ -22,3 +22,3 @@ import {

} from 'three';
import * as fflate from '../libs/fflate.module.js';
import { unzipSync } from '../libs/fflate.module.js';

@@ -147,3 +147,3 @@ const COLOR_SPACE_3MF = SRGBColorSpace;

zip = fflate.unzipSync( new Uint8Array( data ) );
zip = unzipSync( new Uint8Array( data ) );

@@ -150,0 +150,0 @@ } catch ( e ) {

@@ -11,3 +11,3 @@ import {

} from 'three';
import * as fflate from '../libs/fflate.module.js';
import { unzipSync } from '../libs/fflate.module.js';

@@ -109,3 +109,3 @@ /**

zip = fflate.unzipSync( new Uint8Array( data ) );
zip = unzipSync( new Uint8Array( data ) );

@@ -112,0 +112,0 @@ } catch ( e ) {

@@ -97,3 +97,3 @@ import {

let state = { x: 0, y: 0, z: 0, e: 0, f: 0, extruding: false, relative: false };
let state = { x: 0, y: 0, z: 0, e: 0, f: 0, extruding: false, relative: false, extrusionOverride: false, extrusionRelative: false };
const layers = [];

@@ -151,2 +151,10 @@

function absoluteExtrusion( v1, v2 ) {
const relative = state.extrusionOverride ? state.extrusionRelative : state.relative;
return relative ? v1 + v2 : v2;
}
const lines = data.replace( /;.+/g, '' ).split( '\n' );

@@ -177,10 +185,10 @@

const line = {
x: args.x !== undefined ? absolute( state.x, args.x ) : state.x,
y: args.y !== undefined ? absolute( state.y, args.y ) : state.y,
z: args.z !== undefined ? absolute( state.z, args.z ) : state.z,
e: args.e !== undefined ? absolute( state.e, args.e ) : state.e,
f: args.f !== undefined ? absolute( state.f, args.f ) : state.f,
};
const line = Object.assign( {}, state ); // clone state
if ( args.x !== undefined ) line.x = absolute( state.x, args.x );
if ( args.y !== undefined ) line.y = absolute( state.y, args.y );
if ( args.z !== undefined ) line.z = absolute( state.z, args.z );
if ( args.e !== undefined ) line.e = absoluteExtrusion( state.e, args.e );
if ( args.f !== undefined ) line.f = absolute( state.f, args.f );
//Layer change detection is or made by watching Z, it's made by watching when we extrude at a new Z position

@@ -212,2 +220,5 @@ if ( delta( state.e, line.e ) > 0 ) {

// reset M82/M83 extrusion override
state.extrusionOverride = false;
} else if ( cmd === 'G91' ) {

@@ -218,2 +229,17 @@

// reset M82/M83 extrusion override
state.extrusionOverride = false;
} else if ( cmd === 'M82' ) {
//M82: Override G91 and put the E axis into absolute mode independent of the other axes
state.extrusionOverride = true;
state.extrusionRelative = false;
} else if ( cmd === 'M83' ) {
//M83: Overrides G90 and put the E axis into relative mode independent of the other axes
state.extrusionOverride = true;
state.extrusionRelative = true;
} else if ( cmd === 'G92' ) {

@@ -220,0 +246,0 @@

@@ -8,3 +8,3 @@ import {

import { ColladaLoader } from '../loaders/ColladaLoader.js';
import * as fflate from '../libs/fflate.module.js';
import { unzipSync } from '../libs/fflate.module.js';

@@ -123,7 +123,7 @@ /**

const zip = fflate.unzipSync( new Uint8Array( data ) );
const zip = unzipSync( new Uint8Array( data ) );
if ( zip[ 'doc.kml' ] ) {
const xml = new DOMParser().parseFromString( fflate.strFromU8( zip[ 'doc.kml' ] ), 'application/xml' );
const xml = new DOMParser().parseFromString( new TextDecoder().decode( zip[ 'doc.kml' ] ), 'application/xml' );

@@ -135,3 +135,3 @@ const model = xml.querySelector( 'Placemark Model Link href' );

const loader = new ColladaLoader( manager );
return loader.parse( fflate.strFromU8( zip[ model.textContent ] ) );
return loader.parse( new TextDecoder().decode( zip[ model.textContent ] ) );

@@ -151,3 +151,3 @@ }

const loader = new ColladaLoader( manager );
return loader.parse( fflate.strFromU8( zip[ path ] ) );
return loader.parse( new TextDecoder().decode( zip[ path ] ) );

@@ -154,0 +154,0 @@ }

@@ -21,3 +21,2 @@ import {

RGBA_BPTC_Format,
RGBA_S3TC_DXT3_Format,
RGBA_ETC2_EAC_Format,

@@ -256,2 +255,3 @@ R11_EAC_Format,

if ( typeof navigator !== 'undefined' &&
typeof navigator.platform !== 'undefined' && typeof navigator.userAgent !== 'undefined' &&
navigator.platform.indexOf( 'Linux' ) >= 0 && navigator.userAgent.indexOf( 'Firefox' ) >= 0 &&

@@ -998,4 +998,4 @@ this.workerConfig.astcSupported && this.workerConfig.etc2Supported &&

[ VK_FORMAT_BC3_SRGB_BLOCK ]: RGBA_S3TC_DXT3_Format,
[ VK_FORMAT_BC3_UNORM_BLOCK ]: RGBA_S3TC_DXT3_Format,
[ VK_FORMAT_BC3_SRGB_BLOCK ]: RGBA_S3TC_DXT5_Format,
[ VK_FORMAT_BC3_UNORM_BLOCK ]: RGBA_S3TC_DXT5_Format,

@@ -1041,5 +1041,5 @@ [ VK_FORMAT_BC4_SNORM_BLOCK ]: SIGNED_RED_RGTC1_Format,

[ VK_FORMAT_EAC_R11_UNORM_BLOCK ]: UnsignedByteType,
[ VK_FORMAT_EAC_R11_UNORM_BLOCK ]: UnsignedByteType,
[ VK_FORMAT_EAC_R11_SNORM_BLOCK ]: UnsignedByteType,
[ VK_FORMAT_EAC_R11G11_UNORM_BLOCK ]: UnsignedByteType,
[ VK_FORMAT_EAC_R11G11_UNORM_BLOCK ]: UnsignedByteType,
[ VK_FORMAT_EAC_R11G11_SNORM_BLOCK ]: UnsignedByteType,

@@ -1046,0 +1046,0 @@ [ VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT ]: HalfFloatType,

@@ -26,3 +26,4 @@ import {

TextureLoader,
Vector2
Vector2,
Vector3
} from 'three';

@@ -189,4 +190,2 @@

this.applyPivots( finalMeshes );
return finalMeshes;

@@ -209,35 +208,11 @@

mesh.userData.pivot = layer.pivot;
const pivot = layer.pivot;
if ( pivot[ 0 ] !== 0 || pivot[ 1 ] !== 0 || pivot[ 2 ] !== 0 ) {
return mesh;
mesh.pivot = new Vector3( pivot[ 0 ], pivot[ 1 ], pivot[ 2 ] );
}
}
// TODO: may need to be reversed in z to convert LWO to three.js coordinates
applyPivots( meshes ) {
return mesh;
meshes.forEach( function ( mesh ) {
mesh.traverse( function ( child ) {
const pivot = child.userData.pivot;
child.position.x += pivot[ 0 ];
child.position.y += pivot[ 1 ];
child.position.z += pivot[ 2 ];
if ( child.parent ) {
const parentPivot = child.parent.userData.pivot;
child.position.x -= parentPivot[ 0 ];
child.position.y -= parentPivot[ 1 ];
child.position.z -= parentPivot[ 2 ];
}
} );
} );
}

@@ -819,9 +794,2 @@

// TODO: z may need to be reversed to account for coordinate system change
geometry.translate( - layer.pivot[ 0 ], - layer.pivot[ 1 ], - layer.pivot[ 2 ] );
// let userData = geometry.userData;
// geometry = geometry.toNonIndexed()
// geometry.userData = userData;
return geometry;

@@ -828,0 +796,0 @@

@@ -7,3 +7,3 @@ import {

} from 'three';
import * as fflate from '../libs/fflate.module.js';
import { gunzipSync } from '../libs/fflate.module.js';
import { Volume } from '../misc/Volume.js';

@@ -360,3 +360,3 @@

// here we start the unzipping and get a typed Uint8Array back
_data = fflate.gunzipSync( new Uint8Array( _data ) );
_data = gunzipSync( new Uint8Array( _data ) );

@@ -363,0 +363,0 @@ } else if ( headerObject.encoding === 'ascii' || headerObject.encoding === 'text' || headerObject.encoding === 'txt' || headerObject.encoding === 'hex' ) {

@@ -521,3 +521,3 @@ import {

const labelIndex = PCDheader.fields.indexOf( 'label' );
label.push( dataview.getInt32( ( PCDheader.points * offset.label ) + PCDheader.size[ labelIndex ] * i, this.littleEndian ) );
label.push( this._getDataView( dataview, ( PCDheader.points * offset.label ) + PCDheader.size[ labelIndex ] * i, PCDheader.type[ labelIndex ], PCDheader.size[ labelIndex ] ) );

@@ -582,3 +582,4 @@ }

label.push( dataview.getInt32( row + offset.label, this.littleEndian ) );
const labelIndex = PCDheader.fields.indexOf( 'label' );
label.push( this._getDataView( dataview, row + offset.label, PCDheader.type[ labelIndex ], PCDheader.size[ labelIndex ] ) );

@@ -585,0 +586,0 @@ }

@@ -21,7 +21,8 @@ import {

* [JPEG headers]
* [XMP metadata describing the MPF container and *both* SDR and gainmap images]
* [Metadata describing the MPF container and both SDR and gainmap images]
* - XMP metadata (legacy format)
* - ISO 21496-1 metadata (current standard)
* [Optional metadata] [EXIF] [ICC Profile]
* [SDR image]
* [XMP metadata describing only the gainmap image]
* [Gainmap image]
* [Gainmap image with metadata]
*

@@ -49,3 +50,4 @@ * Each section is separated by a 0xFFXX byte followed by a descriptor byte (0xFFE0, 0xFFE1, 0xFFE2.)

* - JPEG headers (required)
* - XMP metadata (required)
* - XMP metadata (legacy format, supported)
* - ISO 21496-1 metadata (current standard, supported)
* - XMP validation (not implemented)

@@ -114,3 +116,3 @@ * - EXIF profile (not implemented)

const xmpMetadata = {
const metadata = {
version: null,

@@ -203,7 +205,7 @@ baseRenditionIsHDR: null,

/* XMP Metadata */
/* APP1: XMP Metadata */
this._parseXMPMetadata(
textDecoder.decode( new Uint8Array( section ) ),
xmpMetadata
metadata
);

@@ -213,5 +215,34 @@

/* Data Sections - MPF / EXIF / ICC Profile */
/* APP2: Data Sections - MPF / ICC Profile / ISO 21496-1 Metadata */
const sectionData = new DataView( section.buffer, section.byteOffset + 2, section.byteLength - 2 );
// Check for ISO 21496-1 namespace: "urn:iso:std:iso:ts:21496:-1\0"
const isoNameSpace = 'urn:iso:std:iso:ts:21496:-1\0';
if ( section.byteLength >= isoNameSpace.length + 2 ) {
let isISO = true;
for ( let j = 0; j < isoNameSpace.length; j ++ ) {
if ( section[ 2 + j ] !== isoNameSpace.charCodeAt( j ) ) {
isISO = false;
break;
}
}
if ( isISO ) {
// Parse ISO 21496-1 metadata
const isoData = section.subarray( 2 + isoNameSpace.length );
this._parseISOMetadata( isoData, metadata );
continue;
}
}
// Check for MPF
const sectionHeader = sectionData.getUint32( 2, false );

@@ -285,3 +316,4 @@

/* Minimal sufficient validation - https://developer.android.com/media/platform/hdr-image-format#signal_of_the_format */
if ( ! xmpMetadata.version ) {
// Version can come from either XMP or ISO metadata
if ( ! metadata.version ) {

@@ -295,3 +327,3 @@ throw new Error( 'THREE.UltraHDRLoader: Not a valid UltraHDR image' );

this._applyGainmapToSDR(
xmpMetadata,
metadata,
primaryImage,

@@ -326,2 +358,122 @@ gainmapImage,

/**
* Parses ISO 21496-1 gainmap metadata from binary data.
*
* @private
* @param {Uint8Array} data - The binary ISO metadata.
* @param {Object} metadata - The metadata object to populate.
*/
_parseISOMetadata( data, metadata ) {
const view = new DataView( data.buffer, data.byteOffset, data.byteLength );
// Skip minimum version (2 bytes) and writer version (2 bytes)
let offset = 4;
// Read flags (1 byte)
const flags = view.getUint8( offset );
offset += 1;
const backwardDirection = ( flags & 0x4 ) !== 0;
const useCommonDenominator = ( flags & 0x8 ) !== 0;
let gainMapMin, gainMapMax, gamma, offsetSDR, offsetHDR, hdrCapacityMin, hdrCapacityMax;
if ( useCommonDenominator ) {
// Read common denominator (4 bytes, unsigned)
const commonDenominator = view.getUint32( offset, false );
offset += 4;
// Read baseHdrHeadroom (4 bytes, unsigned)
const baseHdrHeadroomN = view.getUint32( offset, false );
offset += 4;
hdrCapacityMin = Math.log2( baseHdrHeadroomN / commonDenominator );
// Read alternateHdrHeadroom (4 bytes, unsigned)
const alternateHdrHeadroomN = view.getUint32( offset, false );
offset += 4;
hdrCapacityMax = Math.log2( alternateHdrHeadroomN / commonDenominator );
// Read first channel (or only channel) parameters
const gainMapMinN = view.getInt32( offset, false );
offset += 4;
gainMapMin = gainMapMinN / commonDenominator;
const gainMapMaxN = view.getInt32( offset, false );
offset += 4;
gainMapMax = gainMapMaxN / commonDenominator;
const gammaN = view.getUint32( offset, false );
offset += 4;
gamma = gammaN / commonDenominator;
const offsetSDRN = view.getInt32( offset, false );
offset += 4;
offsetSDR = ( offsetSDRN / commonDenominator ) * 255.0;
const offsetHDRN = view.getInt32( offset, false );
offsetHDR = ( offsetHDRN / commonDenominator ) * 255.0;
} else {
// Read baseHdrHeadroom numerator and denominator
const baseHdrHeadroomN = view.getUint32( offset, false );
offset += 4;
const baseHdrHeadroomD = view.getUint32( offset, false );
offset += 4;
hdrCapacityMin = Math.log2( baseHdrHeadroomN / baseHdrHeadroomD );
// Read alternateHdrHeadroom numerator and denominator
const alternateHdrHeadroomN = view.getUint32( offset, false );
offset += 4;
const alternateHdrHeadroomD = view.getUint32( offset, false );
offset += 4;
hdrCapacityMax = Math.log2( alternateHdrHeadroomN / alternateHdrHeadroomD );
// Read first channel parameters
const gainMapMinN = view.getInt32( offset, false );
offset += 4;
const gainMapMinD = view.getUint32( offset, false );
offset += 4;
gainMapMin = gainMapMinN / gainMapMinD;
const gainMapMaxN = view.getInt32( offset, false );
offset += 4;
const gainMapMaxD = view.getUint32( offset, false );
offset += 4;
gainMapMax = gainMapMaxN / gainMapMaxD;
const gammaN = view.getUint32( offset, false );
offset += 4;
const gammaD = view.getUint32( offset, false );
offset += 4;
gamma = gammaN / gammaD;
const offsetSDRN = view.getInt32( offset, false );
offset += 4;
const offsetSDRD = view.getUint32( offset, false );
offset += 4;
offsetSDR = ( offsetSDRN / offsetSDRD ) * 255.0;
const offsetHDRN = view.getInt32( offset, false );
offset += 4;
const offsetHDRD = view.getUint32( offset, false );
offsetHDR = ( offsetHDRN / offsetHDRD ) * 255.0;
}
// Convert log2 values to linear
metadata.version = '1.0'; // ISO standard doesn't encode version string, use default
metadata.baseRenditionIsHDR = backwardDirection;
metadata.gainMapMin = gainMapMin;
metadata.gainMapMax = gainMapMax;
metadata.gamma = gamma;
metadata.offsetSDR = offsetSDR;
metadata.offsetHDR = offsetHDR;
metadata.hdrCapacityMin = hdrCapacityMin;
metadata.hdrCapacityMax = hdrCapacityMax;
}
/**
* Starts loading from the given URL and passes the loaded Ultra HDR texture

@@ -394,3 +546,3 @@ * to the `onLoad()` callback.

_parseXMPMetadata( xmpDataString, xmpMetadata ) {
_parseXMPMetadata( xmpDataString, metadata ) {

@@ -420,24 +572,24 @@ const domParser = new DOMParser();

xmpMetadata.version = gainmapNode.getAttribute( 'hdrgm:Version' );
xmpMetadata.baseRenditionIsHDR =
metadata.version = gainmapNode.getAttribute( 'hdrgm:Version' );
metadata.baseRenditionIsHDR =
gainmapNode.getAttribute( 'hdrgm:BaseRenditionIsHDR' ) === 'True';
xmpMetadata.gainMapMin = parseFloat(
metadata.gainMapMin = parseFloat(
gainmapNode.getAttribute( 'hdrgm:GainMapMin' ) || 0.0
);
xmpMetadata.gainMapMax = parseFloat(
metadata.gainMapMax = parseFloat(
gainmapNode.getAttribute( 'hdrgm:GainMapMax' ) || 1.0
);
xmpMetadata.gamma = parseFloat(
metadata.gamma = parseFloat(
gainmapNode.getAttribute( 'hdrgm:Gamma' ) || 1.0
);
xmpMetadata.offsetSDR = parseFloat(
metadata.offsetSDR = parseFloat(
gainmapNode.getAttribute( 'hdrgm:OffsetSDR' ) / ( 1 / 64 )
);
xmpMetadata.offsetHDR = parseFloat(
metadata.offsetHDR = parseFloat(
gainmapNode.getAttribute( 'hdrgm:OffsetHDR' ) / ( 1 / 64 )
);
xmpMetadata.hdrCapacityMin = parseFloat(
metadata.hdrCapacityMin = parseFloat(
gainmapNode.getAttribute( 'hdrgm:HDRCapacityMin' ) || 0.0
);
xmpMetadata.hdrCapacityMax = parseFloat(
metadata.hdrCapacityMax = parseFloat(
gainmapNode.getAttribute( 'hdrgm:HDRCapacityMax' ) || 1.0

@@ -472,3 +624,3 @@ );

_applyGainmapToSDR(
xmpMetadata,
metadata,
sdrBuffer,

@@ -541,6 +693,6 @@ gainmapBuffer,

/* 1.8 instead of 2 near-perfectly rectifies approximations introduced by precalculated SRGB_TO_LINEAR values */
const maxDisplayBoost = 1.8 ** ( xmpMetadata.hdrCapacityMax * 0.5 );
const maxDisplayBoost = 1.8 ** ( metadata.hdrCapacityMax * 0.5 );
const unclampedWeightFactor =
( Math.log2( maxDisplayBoost ) - xmpMetadata.hdrCapacityMin ) /
( xmpMetadata.hdrCapacityMax - xmpMetadata.hdrCapacityMin );
( Math.log2( maxDisplayBoost ) - metadata.hdrCapacityMin ) /
( metadata.hdrCapacityMax - metadata.hdrCapacityMin );
const weightFactor = Math.min(

@@ -554,8 +706,8 @@ Math.max( unclampedWeightFactor, 0.0 ),

const dataLength = sdrData.length;
const gainMapMin = xmpMetadata.gainMapMin;
const gainMapMax = xmpMetadata.gainMapMax;
const offsetSDR = xmpMetadata.offsetSDR;
const offsetHDR = xmpMetadata.offsetHDR;
const invGamma = 1.0 / xmpMetadata.gamma;
const useGammaOne = xmpMetadata.gamma === 1.0;
const gainMapMin = metadata.gainMapMin;
const gainMapMax = metadata.gainMapMax;
const offsetSDR = metadata.offsetSDR;
const offsetHDR = metadata.offsetHDR;
const invGamma = 1.0 / metadata.gamma;
const useGammaOne = metadata.gamma === 1.0;
const isHalfFloat = this.type === HalfFloatType;

@@ -562,0 +714,0 @@ const toHalfFloat = DataUtils.toHalfFloat;

@@ -1,16 +0,5 @@

import {
BufferAttribute,
BufferGeometry,
ClampToEdgeWrapping,
Group,
NoColorSpace,
Mesh,
MeshPhysicalMaterial,
MirroredRepeatWrapping,
RepeatWrapping,
SRGBColorSpace,
TextureLoader,
Object3D,
Vector2
} from 'three';
// Pre-compiled regex patterns for performance
const DEF_MATCH_REGEX = /^def\s+(?:(\w+)\s+)?"?([^"]+)"?$/;
const VARIANT_STRING_REGEX = /^string\s+(\w+)$/;
const ATTR_MATCH_REGEX = /^(?:uniform\s+)?(\w+(?:\[\])?)\s+(.+)$/;

@@ -21,2 +10,5 @@ class USDAParser {

// Preprocess: strip comments and normalize multiline values
text = this._preprocess( text );
const root = {};

@@ -31,15 +23,19 @@

// Parse USDA file
for ( const line of lines ) {
// console.log( line );
if ( line.includes( '=' ) ) {
const assignment = line.split( '=' );
// Find the first '=' that's not inside quotes
const eqIdx = this._findAssignmentOperator( line );
const lhs = assignment[ 0 ].trim();
const rhs = assignment[ 1 ].trim();
if ( eqIdx === - 1 ) {
string = line.trim();
continue;
}
const lhs = line.slice( 0, eqIdx ).trim();
const rhs = line.slice( eqIdx + 1 ).trim();
if ( rhs.endsWith( '{' ) ) {

@@ -71,2 +67,16 @@

} else if ( line.includes( ':' ) && ! line.includes( '=' ) ) {
// Handle dictionary entries like "0: [(...)...]" for timeSamples
const colonIdx = line.indexOf( ':' );
const key = line.slice( 0, colonIdx ).trim();
const value = line.slice( colonIdx + 1 ).trim();
// Only process if key looks like a number (timeSamples frame)
if ( /^[\d.]+$/.test( key ) ) {
target[ key ] = value;
}
} else if ( line.endsWith( '{' ) ) {

@@ -104,3 +114,3 @@

} else {
} else if ( line.trim() ) {

@@ -117,62 +127,94 @@ string = line.trim();

parse( text, assets ) {
_preprocess( text ) {
const root = this.parseText( text );
// Remove block comments /* ... */
text = this._stripBlockComments( text );
// Build scene graph
// Collapse triple-quoted strings into single lines
text = this._collapseTripleQuotedStrings( text );
function findMeshGeometry( data ) {
// Remove line comments # ... (but preserve #usda header)
// Only remove # comments that aren't at the start of a line or after whitespace
const lines = text.split( '\n' );
const processed = [];
if ( ! data ) return undefined;
let inMultilineValue = false;
let bracketDepth = 0;
let parenDepth = 0;
let accumulated = '';
if ( 'prepend references' in data ) {
for ( let i = 0; i < lines.length; i ++ ) {
const reference = data[ 'prepend references' ];
const parts = reference.split( '@' );
const path = parts[ 1 ].replace( /^.\//, '' );
const id = parts[ 2 ].replace( /^<\//, '' ).replace( />$/, '' );
let line = lines[ i ];
return findGeometry( assets[ path ], id );
// Strip inline comments (but not inside strings)
line = this._stripInlineComment( line );
}
// Track bracket/paren depth for multiline values
const trimmed = line.trim();
return findGeometry( data );
if ( inMultilineValue ) {
}
// Continue accumulating multiline value
accumulated += ' ' + trimmed;
function findGeometry( data, id ) {
// Update depths
for ( const ch of trimmed ) {
if ( ! data ) return undefined;
if ( ch === '[' ) bracketDepth ++;
else if ( ch === ']' ) bracketDepth --;
else if ( ch === '(' && bracketDepth > 0 ) parenDepth ++;
else if ( ch === ')' && bracketDepth > 0 ) parenDepth --;
if ( id !== undefined ) {
}
const def = `def Mesh "${id}"`;
// Check if multiline value is complete
if ( bracketDepth === 0 && parenDepth === 0 ) {
if ( def in data ) {
processed.push( accumulated );
accumulated = '';
inMultilineValue = false;
return data[ def ];
}
}
} else {
for ( const name in data ) {
// Check if this line starts a multiline array value
// Look for patterns like "attr = [" or "attr = @path@[" without closing ]
if ( trimmed.includes( '=' ) ) {
const object = data[ name ];
const eqIdx = this._findAssignmentOperator( trimmed );
if ( name.startsWith( 'def Mesh' ) ) {
if ( eqIdx !== - 1 ) {
return object;
const rhs = trimmed.slice( eqIdx + 1 ).trim();
}
// Count brackets in the value part
let openBrackets = 0;
let closeBrackets = 0;
for ( const ch of rhs ) {
if ( typeof object === 'object' ) {
if ( ch === '[' ) openBrackets ++;
else if ( ch === ']' ) closeBrackets ++;
const geometry = findGeometry( object );
}
if ( geometry ) return geometry;
if ( openBrackets > closeBrackets ) {
// Multiline array detected
inMultilineValue = true;
bracketDepth = openBrackets - closeBrackets;
parenDepth = 0;
accumulated = trimmed;
continue;
}
}
}
processed.push( trimmed );
}

@@ -182,215 +224,197 @@

function buildGeometry( data ) {
return processed.join( '\n' );
if ( ! data ) return undefined;
}
const geometry = new BufferGeometry();
let indices = null;
let counts = null;
let uvs = null;
_stripBlockComments( text ) {
let positionsLength = - 1;
// Iteratively remove /* ... */ comments without regex backtracking
let result = '';
let i = 0;
// index
while ( i < text.length ) {
if ( 'int[] faceVertexIndices' in data ) {
// Check for block comment start
if ( text[ i ] === '/' && i + 1 < text.length && text[ i + 1 ] === '*' ) {
indices = JSON.parse( data[ 'int[] faceVertexIndices' ] );
// Find the closing */
let j = i + 2;
}
while ( j < text.length ) {
// face count
if ( text[ j ] === '*' && j + 1 < text.length && text[ j + 1 ] === '/' ) {
if ( 'int[] faceVertexCounts' in data ) {
// Found closing, skip past it
j += 2;
break;
counts = JSON.parse( data[ 'int[] faceVertexCounts' ] );
indices = toTriangleIndices( indices, counts );
}
}
j ++;
// position
}
if ( 'point3f[] points' in data ) {
// Move past the comment (or to end if unclosed)
i = j;
const positions = JSON.parse( data[ 'point3f[] points' ].replace( /[()]*/g, '' ) );
positionsLength = positions.length;
let attribute = new BufferAttribute( new Float32Array( positions ), 3 );
} else {
if ( indices !== null ) attribute = toFlatBufferAttribute( attribute, indices );
result += text[ i ];
i ++;
geometry.setAttribute( 'position', attribute );
}
// uv
}
if ( 'float2[] primvars:st' in data ) {
return result;
data[ 'texCoord2f[] primvars:st' ] = data[ 'float2[] primvars:st' ];
}
}
_collapseTripleQuotedStrings( text ) {
if ( 'texCoord2f[] primvars:st' in data ) {
let result = '';
let i = 0;
uvs = JSON.parse( data[ 'texCoord2f[] primvars:st' ].replace( /[()]*/g, '' ) );
let attribute = new BufferAttribute( new Float32Array( uvs ), 2 );
while ( i < text.length ) {
if ( indices !== null ) attribute = toFlatBufferAttribute( attribute, indices );
if ( i + 2 < text.length ) {
geometry.setAttribute( 'uv', attribute );
const triple = text.slice( i, i + 3 );
}
if ( triple === '\'\'\'' || triple === '"""' ) {
if ( 'int[] primvars:st:indices' in data && uvs !== null ) {
const quoteChar = triple;
result += quoteChar;
i += 3;
// custom uv index, overwrite uvs with new data
while ( i < text.length ) {
const attribute = new BufferAttribute( new Float32Array( uvs ), 2 );
let indices = JSON.parse( data[ 'int[] primvars:st:indices' ] );
indices = toTriangleIndices( indices, counts );
geometry.setAttribute( 'uv', toFlatBufferAttribute( attribute, indices ) );
if ( i + 2 < text.length && text.slice( i, i + 3 ) === quoteChar ) {
}
result += quoteChar;
i += 3;
break;
// normal
} else {
if ( 'normal3f[] normals' in data ) {
if ( text[ i ] === '\n' ) {
const normals = JSON.parse( data[ 'normal3f[] normals' ].replace( /[()]*/g, '' ) );
let attribute = new BufferAttribute( new Float32Array( normals ), 3 );
result += '\\n';
// normals require a special treatment in USD
} else if ( text[ i ] !== '\r' ) {
if ( normals.length === positionsLength ) {
result += text[ i ];
// raw normal and position data have equal length (like produced by USDZExporter)
}
if ( indices !== null ) attribute = toFlatBufferAttribute( attribute, indices );
i ++;
} else {
}
// unequal length, normals are independent of faceVertexIndices
}
let indices = Array.from( Array( normals.length / 3 ).keys() ); // [ 0, 1, 2, 3 ... ]
indices = toTriangleIndices( indices, counts );
attribute = toFlatBufferAttribute( attribute, indices );
continue;
}
geometry.setAttribute( 'normal', attribute );
} else {
// compute flat vertex normals
geometry.computeVertexNormals();
}
return geometry;
result += text[ i ];
i ++;
}
function toTriangleIndices( rawIndices, counts ) {
return result;
const indices = [];
}
for ( let i = 0; i < counts.length; i ++ ) {
_stripInlineComment( line ) {
const count = counts[ i ];
// Don't strip if line starts with #usda
if ( line.trim().startsWith( '#usda' ) ) return line;
const stride = i * count;
// Find # that's not inside a string
let inString = false;
let stringChar = null;
let escaped = false;
if ( count === 3 ) {
for ( let i = 0; i < line.length; i ++ ) {
const a = rawIndices[ stride + 0 ];
const b = rawIndices[ stride + 1 ];
const c = rawIndices[ stride + 2 ];
const ch = line[ i ];
indices.push( a, b, c );
if ( escaped ) {
} else if ( count === 4 ) {
escaped = false;
continue;
const a = rawIndices[ stride + 0 ];
const b = rawIndices[ stride + 1 ];
const c = rawIndices[ stride + 2 ];
const d = rawIndices[ stride + 3 ];
}
indices.push( a, b, c );
indices.push( a, c, d );
if ( ch === '\\' ) {
} else {
escaped = true;
continue;
console.warn( 'THREE.USDZLoader: Face vertex count of %s unsupported.', count );
}
}
return indices;
if ( ! inString && ( ch === '"' || ch === '\'' ) ) {
}
inString = true;
stringChar = ch;
function toFlatBufferAttribute( attribute, indices ) {
} else if ( inString && ch === stringChar ) {
const array = attribute.array;
const itemSize = attribute.itemSize;
inString = false;
stringChar = null;
const array2 = new array.constructor( indices.length * itemSize );
} else if ( ! inString && ch === '#' ) {
let index = 0, index2 = 0;
// Found comment start outside of string
return line.slice( 0, i ).trimEnd();
for ( let i = 0, l = indices.length; i < l; i ++ ) {
index = indices[ i ] * itemSize;
for ( let j = 0; j < itemSize; j ++ ) {
array2[ index2 ++ ] = array[ index ++ ];
}
}
return new BufferAttribute( array2, itemSize );
}
function findMeshMaterial( data ) {
return line;
if ( ! data ) return undefined;
}
if ( 'rel material:binding' in data ) {
_findAssignmentOperator( line ) {
const reference = data[ 'rel material:binding' ];
const id = reference.replace( /^<\//, '' ).replace( />$/, '' );
const parts = id.split( '/' );
// Find the first '=' that's not inside quotes
let inString = false;
let stringChar = null;
let escaped = false;
return findMaterial( root, ` "${ parts[ 1 ] }"` );
for ( let i = 0; i < line.length; i ++ ) {
}
const ch = line[ i ];
return findMaterial( data );
if ( escaped ) {
}
escaped = false;
continue;
function findMaterial( data, id = '' ) {
}
for ( const name in data ) {
if ( ch === '\\' ) {
const object = data[ name ];
escaped = true;
continue;
if ( name.startsWith( 'def Material' + id ) ) {
}
return object;
if ( ! inString && ( ch === '"' || ch === '\'' ) ) {
}
inString = true;
stringChar = ch;
if ( typeof object === 'object' ) {
} else if ( inString && ch === stringChar ) {
const material = findMaterial( object, id );
inString = false;
stringChar = null;
if ( material ) return material;
} else if ( ! inString && ch === '=' ) {
}
return i;

@@ -401,21 +425,37 @@ }

function setTextureParams( map, data_value ) {
return - 1;
// rotation, scale and translation
}
if ( data_value[ 'float inputs:rotation' ] ) {
/**
* Parse USDA text and return raw spec data in specsByPath format.
* Used by USDComposer for unified scene composition.
*/
parseData( text ) {
map.rotation = parseFloat( data_value[ 'float inputs:rotation' ] );
const root = this.parseText( text );
const specsByPath = {};
}
// Spec types (must match USDCParser/USDComposer)
const SpecType = {
Attribute: 1,
Prim: 6,
Relationship: 8
};
if ( data_value[ 'float2 inputs:scale' ] ) {
// Parse root metadata
const rootFields = {};
if ( '#usda 1.0' in root ) {
map.repeat = new Vector2().fromArray( JSON.parse( '[' + data_value[ 'float2 inputs:scale' ].replace( /[()]*/g, '' ) + ']' ) );
const header = root[ '#usda 1.0' ];
if ( header.upAxis ) {
rootFields.upAxis = header.upAxis.replace( /"/g, '' );
}
if ( data_value[ 'float2 inputs:translation' ] ) {
if ( header.defaultPrim ) {
map.offset = new Vector2().fromArray( JSON.parse( '[' + data_value[ 'float2 inputs:translation' ].replace( /[()]*/g, '' ) + ']' ) );
rootFields.defaultPrim = header.defaultPrim.replace( /"/g, '' );

@@ -426,192 +466,210 @@ }

function buildMaterial( data ) {
specsByPath[ '/' ] = { specType: SpecType.Prim, fields: rootFields };
const material = new MeshPhysicalMaterial();
// Walk the tree and build specsByPath
const walkTree = ( data, parentPath ) => {
if ( data !== undefined ) {
const primChildren = [];
let surface = undefined;
for ( const key in data ) {
const surfaceConnection = data[ 'token outputs:surface.connect' ];
// Skip metadata
if ( key === '#usda 1.0' ) continue;
if ( key === 'variants' ) continue;
if ( surfaceConnection ) {
// Check for primitive definitions
// Matches both 'def TypeName "name"' and 'def "name"' (no type)
const defMatch = key.match( DEF_MATCH_REGEX );
if ( defMatch ) {
const match = /(\w+)\.output/.exec( surfaceConnection );
const typeName = defMatch[ 1 ] || '';
const name = defMatch[ 2 ];
const path = parentPath === '/' ? '/' + name : parentPath + '/' + name;
if ( match ) {
primChildren.push( name );
const surfaceName = match[ 1 ];
surface = data[ `def Shader "${surfaceName}"` ];
const primFields = { typeName };
const primData = data[ key ];
}
// Extract attributes and relationships from this prim
this._extractPrimData( primData, path, primFields, specsByPath, SpecType );
}
specsByPath[ path ] = { specType: SpecType.Prim, fields: primFields };
if ( surface !== undefined ) {
// Recurse into children
walkTree( primData, path );
if ( 'color3f inputs:diffuseColor.connect' in surface ) {
}
const path = surface[ 'color3f inputs:diffuseColor.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
}
material.map = buildTexture( sampler );
material.map.colorSpace = SRGBColorSpace;
// Add primChildren to parent spec
if ( primChildren.length > 0 && specsByPath[ parentPath ] ) {
if ( 'def Shader "Transform2d_diffuse"' in data ) {
specsByPath[ parentPath ].fields.primChildren = primChildren;
setTextureParams( material.map, data[ 'def Shader "Transform2d_diffuse"' ] );
}
}
};
} else if ( 'color3f inputs:diffuseColor' in surface ) {
walkTree( root, '/' );
const color = surface[ 'color3f inputs:diffuseColor' ].replace( /[()]*/g, '' );
material.color.fromArray( JSON.parse( '[' + color + ']' ) );
return { specsByPath };
}
}
if ( 'color3f inputs:emissiveColor.connect' in surface ) {
_extractPrimData( data, path, primFields, specsByPath, SpecType ) {
const path = surface[ 'color3f inputs:emissiveColor.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
if ( ! data || typeof data !== 'object' ) return;
material.emissiveMap = buildTexture( sampler );
material.emissiveMap.colorSpace = SRGBColorSpace;
material.emissive.set( 0xffffff );
for ( const key in data ) {
if ( 'def Shader "Transform2d_emissive"' in data ) {
// Skip nested defs (handled by walkTree)
if ( key.startsWith( 'def ' ) ) continue;
setTextureParams( material.emissiveMap, data[ 'def Shader "Transform2d_emissive"' ] );
if ( key === 'prepend references' ) {
}
primFields.references = [ data[ key ] ];
continue;
} else if ( 'color3f inputs:emissiveColor' in surface ) {
}
const color = surface[ 'color3f inputs:emissiveColor' ].replace( /[()]*/g, '' );
material.emissive.fromArray( JSON.parse( '[' + color + ']' ) );
if ( key === 'payload' ) {
}
primFields.payload = data[ key ];
continue;
if ( 'normal3f inputs:normal.connect' in surface ) {
}
const path = surface[ 'normal3f inputs:normal.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
if ( key === 'variants' ) {
material.normalMap = buildTexture( sampler );
material.normalMap.colorSpace = NoColorSpace;
const variantSelection = {};
const variants = data[ key ];
if ( 'def Shader "Transform2d_normal"' in data ) {
for ( const vKey in variants ) {
setTextureParams( material.normalMap, data[ 'def Shader "Transform2d_normal"' ] );
const match = vKey.match( VARIANT_STRING_REGEX );
if ( match ) {
}
const variantSetName = match[ 1 ];
const variantValue = variants[ vKey ].replace( /"/g, '' );
variantSelection[ variantSetName ] = variantValue;
}
if ( 'float inputs:roughness.connect' in surface ) {
}
const path = surface[ 'float inputs:roughness.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
if ( Object.keys( variantSelection ).length > 0 ) {
material.roughness = 1.0;
material.roughnessMap = buildTexture( sampler );
material.roughnessMap.colorSpace = NoColorSpace;
primFields.variantSelection = variantSelection;
if ( 'def Shader "Transform2d_roughness"' in data ) {
}
setTextureParams( material.roughnessMap, data[ 'def Shader "Transform2d_roughness"' ] );
continue;
}
}
} else if ( 'float inputs:roughness' in surface ) {
if ( key.startsWith( 'rel ' ) ) {
material.roughness = parseFloat( surface[ 'float inputs:roughness' ] );
const relName = key.slice( 4 );
const relPath = path + '.' + relName;
const target = data[ key ].replace( /[<>]/g, '' );
specsByPath[ relPath ] = {
specType: SpecType.Relationship,
fields: { targetPaths: [ target ] }
};
continue;
}
}
if ( 'float inputs:metallic.connect' in surface ) {
// Handle xformOpOrder
if ( key.includes( 'xformOpOrder' ) ) {
const path = surface[ 'float inputs:metallic.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
const ops = data[ key ]
.replace( /[\[\]]/g, '' )
.split( ',' )
.map( s => s.trim().replace( /"/g, '' ) );
primFields.xformOpOrder = ops;
continue;
material.metalness = 1.0;
material.metalnessMap = buildTexture( sampler );
material.metalnessMap.colorSpace = NoColorSpace;
}
if ( 'def Shader "Transform2d_metallic"' in data ) {
// Handle typed attributes
// Format: [qualifier] type attrName (e.g., "uniform token[] joints", "float3 position")
const attrMatch = key.match( ATTR_MATCH_REGEX );
if ( attrMatch ) {
setTextureParams( material.metalnessMap, data[ 'def Shader "Transform2d_metallic"' ] );
const valueType = attrMatch[ 1 ];
const attrName = attrMatch[ 2 ];
const rawValue = data[ key ];
}
// Handle connection attributes (e.g., "inputs:normal.connect = </path>")
if ( attrName.endsWith( '.connect' ) ) {
} else if ( 'float inputs:metallic' in surface ) {
const baseAttrName = attrName.slice( 0, - 8 ); // Remove '.connect'
const attrPath = path + '.' + baseAttrName;
material.metalness = parseFloat( surface[ 'float inputs:metallic' ] );
// Parse connection path - extract from <path> format
let connPath = String( rawValue ).trim();
if ( connPath.startsWith( '<' ) ) connPath = connPath.slice( 1 );
if ( connPath.endsWith( '>' ) ) connPath = connPath.slice( 0, - 1 );
}
// Get or create the attribute spec
if ( ! specsByPath[ attrPath ] ) {
if ( 'float inputs:clearcoat.connect' in surface ) {
specsByPath[ attrPath ] = {
specType: SpecType.Attribute,
fields: { typeName: valueType }
};
const path = surface[ 'float inputs:clearcoat.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
material.clearcoat = 1.0;
material.clearcoatMap = buildTexture( sampler );
material.clearcoatMap.colorSpace = NoColorSpace;
if ( 'def Shader "Transform2d_clearcoat"' in data ) {
setTextureParams( material.clearcoatMap, data[ 'def Shader "Transform2d_clearcoat"' ] );
}
} else if ( 'float inputs:clearcoat' in surface ) {
material.clearcoat = parseFloat( surface[ 'float inputs:clearcoat' ] );
}
if ( 'float inputs:clearcoatRoughness.connect' in surface ) {
specsByPath[ attrPath ].fields.connectionPaths = [ connPath ];
continue;
const path = surface[ 'float inputs:clearcoatRoughness.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
}
material.clearcoatRoughness = 1.0;
material.clearcoatRoughnessMap = buildTexture( sampler );
material.clearcoatRoughnessMap.colorSpace = NoColorSpace;
// Handle timeSamples attributes specially
if ( attrName.endsWith( '.timeSamples' ) && typeof rawValue === 'object' ) {
if ( 'def Shader "Transform2d_clearcoatRoughness"' in data ) {
const baseAttrName = attrName.slice( 0, - 12 ); // Remove '.timeSamples'
const attrPath = path + '.' + baseAttrName;
setTextureParams( material.clearcoatRoughnessMap, data[ 'def Shader "Transform2d_clearcoatRoughness"' ] );
// Parse timeSamples dictionary into times and values arrays
const times = [];
const values = [];
}
for ( const frameKey in rawValue ) {
} else if ( 'float inputs:clearcoatRoughness' in surface ) {
const frame = parseFloat( frameKey );
if ( isNaN( frame ) ) continue;
material.clearcoatRoughness = parseFloat( surface[ 'float inputs:clearcoatRoughness' ] );
times.push( frame );
values.push( this._parseAttributeValue( valueType, rawValue[ frameKey ] ) );
}
if ( 'float inputs:ior' in surface ) {
// Sort by time
const sorted = times.map( ( t, i ) => ( { t, v: values[ i ] } ) ).sort( ( a, b ) => a.t - b.t );
material.ior = parseFloat( surface[ 'float inputs:ior' ] );
specsByPath[ attrPath ] = {
specType: SpecType.Attribute,
fields: {
timeSamples: { times: sorted.map( s => s.t ), values: sorted.map( s => s.v ) },
typeName: valueType
}
};
}
} else {
if ( 'float inputs:occlusion.connect' in surface ) {
// Parse value based on type
const parsedValue = this._parseAttributeValue( valueType, rawValue );
const path = surface[ 'float inputs:occlusion.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
// Store as attribute spec
const attrPath = path + '.' + attrName;
specsByPath[ attrPath ] = {
specType: SpecType.Attribute,
fields: { default: parsedValue, typeName: valueType }
};
material.aoMap = buildTexture( sampler );
material.aoMap.colorSpace = NoColorSpace;
if ( 'def Shader "Transform2d_occlusion"' in data ) {
setTextureParams( material.aoMap, data[ 'def Shader "Transform2d_occlusion"' ] );
}
}
}

@@ -621,125 +679,148 @@

return material;
}
function findTexture( data, id ) {
}
for ( const name in data ) {
_parseAttributeValue( valueType, rawValue ) {
const object = data[ name ];
if ( rawValue === undefined || rawValue === null ) return undefined;
if ( name.startsWith( `def Shader "${ id }"` ) ) {
const str = String( rawValue ).trim();
return object;
// Array types
if ( valueType.endsWith( '[]' ) ) {
}
// Parse JSON-like arrays
try {
if ( typeof object === 'object' ) {
// Handle arrays with parentheses like [(1,2,3), (4,5,6)]
// Remove trailing comma (valid in USDA but not JSON)
let cleaned = str.replace( /\(/g, '[' ).replace( /\)/g, ']' );
if ( cleaned.endsWith( ',' ) ) cleaned = cleaned.slice( 0, - 1 );
const parsed = JSON.parse( cleaned );
const texture = findTexture( object, id );
// Flatten nested arrays for types like point3f[]
if ( Array.isArray( parsed ) && Array.isArray( parsed[ 0 ] ) ) {
if ( texture ) return texture;
return parsed.flat();
}
}
return parsed;
}
} catch ( e ) {
function buildTexture( data ) {
// Try simple array parsing
const cleaned = str.replace( /[\[\]]/g, '' );
return cleaned.split( ',' ).map( s => {
if ( 'asset inputs:file' in data ) {
const trimmed = s.trim();
const num = parseFloat( trimmed );
return isNaN( num ) ? trimmed.replace( /"/g, '' ) : num;
const path = data[ 'asset inputs:file' ].replace( /@*/g, '' ).trim();
} );
const loader = new TextureLoader();
}
const texture = loader.load( assets[ path ] );
}
const map = {
'"clamp"': ClampToEdgeWrapping,
'"mirror"': MirroredRepeatWrapping,
'"repeat"': RepeatWrapping
};
// Vector types (double3, float3, point3f, etc.)
if ( valueType.includes( '3' ) || valueType.includes( '2' ) || valueType.includes( '4' ) ) {
if ( 'token inputs:wrapS' in data ) {
// Parse (x, y, z) format
const cleaned = str.replace( /[()]/g, '' );
const values = cleaned.split( ',' ).map( s => parseFloat( s.trim() ) );
return values;
texture.wrapS = map[ data[ 'token inputs:wrapS' ] ];
}
}
// Quaternion types (quatf, quatd, quath)
// Text format is (w, x, y, z), convert to (x, y, z, w)
if ( valueType.startsWith( 'quat' ) ) {
if ( 'token inputs:wrapT' in data ) {
const cleaned = str.replace( /[()]/g, '' );
const values = cleaned.split( ',' ).map( s => parseFloat( s.trim() ) );
return [ values[ 1 ], values[ 2 ], values[ 3 ], values[ 0 ] ];
texture.wrapT = map[ data[ 'token inputs:wrapT' ] ];
}
}
// Matrix types
if ( valueType.includes( 'matrix' ) ) {
return texture;
const cleaned = str.replace( /[()]/g, '' );
const values = cleaned.split( ',' ).map( s => parseFloat( s.trim() ) );
return values;
}
return null;
}
function buildObject( data ) {
// Scalar numeric types
if ( valueType === 'float' || valueType === 'double' || valueType === 'int' ) {
const geometry = buildGeometry( findMeshGeometry( data ) );
const material = buildMaterial( findMeshMaterial( data ) );
return parseFloat( str );
const mesh = geometry ? new Mesh( geometry, material ) : new Object3D();
}
if ( 'matrix4d xformOp:transform' in data ) {
// String/token types
if ( valueType === 'string' || valueType === 'token' ) {
const array = JSON.parse( '[' + data[ 'matrix4d xformOp:transform' ].replace( /[()]*/g, '' ) + ']' );
return this._parseString( str );
mesh.matrix.fromArray( array );
mesh.matrix.decompose( mesh.position, mesh.quaternion, mesh.scale );
}
}
// Asset path
if ( valueType === 'asset' ) {
return mesh;
return str.replace( /@/g, '' ).replace( /"/g, '' );
}
function buildHierarchy( data, group ) {
// Default: return as string with quotes removed
return this._parseString( str );
for ( const name in data ) {
}
if ( name.startsWith( 'def Scope' ) ) {
_parseString( str ) {
buildHierarchy( data[ name ], group );
// Remove surrounding quotes
if ( ( str.startsWith( '"' ) && str.endsWith( '"' ) ) ||
( str.startsWith( '\'' ) && str.endsWith( '\'' ) ) ) {
} else if ( name.startsWith( 'def Xform' ) ) {
str = str.slice( 1, - 1 );
const mesh = buildObject( data[ name ] );
}
if ( /def Xform "(\w+)"/.test( name ) ) {
// Handle escape sequences
let result = '';
let i = 0;
mesh.name = /def Xform "(\w+)"/.exec( name )[ 1 ];
while ( i < str.length ) {
}
if ( str[ i ] === '\\' && i + 1 < str.length ) {
group.add( mesh );
const next = str[ i + 1 ];
buildHierarchy( data[ name ], mesh );
switch ( next ) {
case 'n': result += '\n'; break;
case 't': result += '\t'; break;
case 'r': result += '\r'; break;
case '\\': result += '\\'; break;
case '"': result += '"'; break;
case '\'': result += '\''; break;
default: result += next; break;
}
}
i += 2;
}
} else {
function buildGroup( data ) {
result += str[ i ];
i ++;
const group = new Group();
}
buildHierarchy( data, group );
return group;
}
return buildGroup( root );
return result;

@@ -746,0 +827,0 @@ }

@@ -1,17 +0,1852 @@

import {
Group
} from 'three';
const textDecoder = new TextDecoder();
// Pre-computed half-float exponent lookup table for fast conversion
// Math.pow(2, exp - 15) for exp = 0..31
const HALF_EXPONENT_TABLE = new Float32Array( 32 );
for ( let i = 0; i < 32; i ++ ) {
HALF_EXPONENT_TABLE[ i ] = Math.pow( 2, i - 15 );
}
// Pre-computed constant for denormalized half-floats: 2^-14
const HALF_DENORM_SCALE = Math.pow( 2, - 14 );
// Type enum values from crateDataTypes.h
const TypeEnum = {
Invalid: 0,
Bool: 1,
UChar: 2,
Int: 3,
UInt: 4,
Int64: 5,
UInt64: 6,
Half: 7,
Float: 8,
Double: 9,
String: 10,
Token: 11,
AssetPath: 12,
Matrix2d: 13,
Matrix3d: 14,
Matrix4d: 15,
Quatd: 16,
Quatf: 17,
Quath: 18,
Vec2d: 19,
Vec2f: 20,
Vec2h: 21,
Vec2i: 22,
Vec3d: 23,
Vec3f: 24,
Vec3h: 25,
Vec3i: 26,
Vec4d: 27,
Vec4f: 28,
Vec4h: 29,
Vec4i: 30,
Dictionary: 31,
TokenListOp: 32,
StringListOp: 33,
PathListOp: 34,
ReferenceListOp: 35,
IntListOp: 36,
Int64ListOp: 37,
UIntListOp: 38,
UInt64ListOp: 39,
PathVector: 40,
TokenVector: 41,
Specifier: 42,
Permission: 43,
Variability: 44,
VariantSelectionMap: 45,
TimeSamples: 46,
Payload: 47,
DoubleVector: 48,
LayerOffsetVector: 49,
StringVector: 50,
ValueBlock: 51,
Value: 52,
UnregisteredValue: 53,
UnregisteredValueListOp: 54,
PayloadListOp: 55,
TimeCode: 56,
PathExpression: 57,
Relocates: 58,
Spline: 59,
AnimationBlock: 60
};
// Field set terminator marker
const FIELD_SET_TERMINATOR = 0xFFFFFFFF;
// Float compression type codes
const FLOAT_COMPRESSION_INT = 0x69; // 'i' - compressed as integers
const FLOAT_COMPRESSION_LUT = 0x74; // 't' - lookup table
// ============================================================================
// LZ4 Decompression (minimal implementation for USD)
// Based on LZ4 block format specification
// ============================================================================
function lz4DecompressBlock( input, inputOffset, inputEnd, output, outputOffset, outputEnd ) {
while ( inputOffset < inputEnd ) {
// Read token
const token = input[ inputOffset ++ ];
if ( inputOffset > inputEnd ) break;
// Literal length
let literalLength = token >> 4;
if ( literalLength === 15 ) {
let b;
do {
if ( inputOffset >= inputEnd ) break;
b = input[ inputOffset ++ ];
literalLength += b;
} while ( b === 255 && inputOffset < inputEnd );
}
// Copy literals
if ( literalLength > 0 ) {
if ( inputOffset + literalLength > inputEnd ) {
literalLength = inputEnd - inputOffset;
}
for ( let i = 0; i < literalLength; i ++ ) {
if ( outputOffset >= outputEnd ) break;
output[ outputOffset ++ ] = input[ inputOffset ++ ];
}
}
// Check if we're at the end (last sequence has no match)
if ( inputOffset >= inputEnd ) break;
// Read match offset (little-endian 16-bit)
if ( inputOffset + 2 > inputEnd ) break;
const matchOffset = input[ inputOffset ++ ] | ( input[ inputOffset ++ ] << 8 );
if ( matchOffset === 0 ) {
// Invalid offset
break;
}
// Match length
let matchLength = ( token & 0x0F ) + 4;
if ( matchLength === 19 ) {
let b;
do {
if ( inputOffset >= inputEnd ) break;
b = input[ inputOffset ++ ];
matchLength += b;
} while ( b === 255 && inputOffset < inputEnd );
}
// Copy match (byte-by-byte to handle overlapping)
const matchPos = outputOffset - matchOffset;
if ( matchPos < 0 ) {
// Invalid match position
break;
}
for ( let i = 0; i < matchLength; i ++ ) {
if ( outputOffset >= outputEnd ) break;
output[ outputOffset ++ ] = output[ matchPos + i ];
}
}
return outputOffset;
}
// USD uses TfFastCompression which wraps LZ4 with chunk headers
function decompressLZ4( input, uncompressedSize ) {
// TfFastCompression format (used by OpenUSD):
// Single chunk (byte 0 == 0): [0] + LZ4 data
// Multi chunk (byte 0 > 0): [numChunks] + [compressedSizes...] + [chunkData...]
const output = new Uint8Array( uncompressedSize );
const numChunks = input[ 0 ];
if ( numChunks === 0 ) {
// Single chunk - all remaining bytes are LZ4 compressed
lz4DecompressBlock( input, 1, input.length, output, 0, uncompressedSize );
return output;
} else {
// Multiple chunks - each chunk decompresses to max 65536 bytes
const CHUNK_SIZE = 65536;
// First, read all chunk sizes
let headerOffset = 1;
const compressedSizes = [];
for ( let i = 0; i < numChunks; i ++ ) {
const size = ( input[ headerOffset ] |
( input[ headerOffset + 1 ] << 8 ) |
( input[ headerOffset + 2 ] << 16 ) |
( input[ headerOffset + 3 ] << 24 ) ) >>> 0;
compressedSizes.push( size );
headerOffset += 4;
}
// Decompress each chunk
let inputOffset = headerOffset;
let outputOffset = 0;
for ( let i = 0; i < numChunks; i ++ ) {
const chunkCompressedSize = compressedSizes[ i ];
const chunkOutputSize = Math.min( CHUNK_SIZE, uncompressedSize - outputOffset );
lz4DecompressBlock(
input, inputOffset, inputOffset + chunkCompressedSize,
output, outputOffset, outputOffset + chunkOutputSize
);
inputOffset += chunkCompressedSize;
outputOffset += chunkOutputSize;
}
return output;
}
}
// ============================================================================
// Integer Decompression (USD-specific delta + variable-width encoding)
// ============================================================================
function decompressIntegers32( compressedData, numInts ) {
// First decompress with LZ4
const encodedSize = numInts * 4 + ( ( numInts * 2 + 7 ) >> 3 ) + 4;
const encoded = decompressLZ4( new Uint8Array( compressedData ), encodedSize );
// Then decode
return decodeIntegers32( encoded, numInts );
}
function decodeIntegers32( data, numInts ) {
const view = new DataView( data.buffer, data.byteOffset, data.byteLength );
let offset = 0;
// Read common value (signed 32-bit)
const commonValue = view.getInt32( offset, true );
offset += 4;
const numCodesBytes = ( numInts * 2 + 7 ) >> 3;
const codesStart = offset;
const vintsStart = offset + numCodesBytes;
const result = new Int32Array( numInts );
let prevVal = 0;
let codesOffset = codesStart;
let vintsOffset = vintsStart;
for ( let i = 0; i < numInts; ) {
const codeByte = data[ codesOffset ++ ];
for ( let j = 0; j < 4 && i < numInts; j ++, i ++ ) {
const code = ( codeByte >> ( j * 2 ) ) & 3;
let delta = 0;
switch ( code ) {
case 0: // Common value
delta = commonValue;
break;
case 1: // 8-bit signed
delta = view.getInt8( vintsOffset );
vintsOffset += 1;
break;
case 2: // 16-bit signed
delta = view.getInt16( vintsOffset, true );
vintsOffset += 2;
break;
case 3: // 32-bit signed
delta = view.getInt32( vintsOffset, true );
vintsOffset += 4;
break;
}
prevVal += delta;
result[ i ] = prevVal;
}
}
return result;
}
// ============================================================================
// Binary Reader Helper
// ============================================================================
class BinaryReader {
constructor( buffer ) {
this.buffer = buffer;
this.view = new DataView( buffer );
this.offset = 0;
}
seek( offset ) {
this.offset = offset;
}
tell() {
return this.offset;
}
readUint8() {
const value = this.view.getUint8( this.offset );
this.offset += 1;
return value;
}
readInt8() {
const value = this.view.getInt8( this.offset );
this.offset += 1;
return value;
}
readUint16() {
const value = this.view.getUint16( this.offset, true );
this.offset += 2;
return value;
}
readInt16() {
const value = this.view.getInt16( this.offset, true );
this.offset += 2;
return value;
}
readUint32() {
const value = this.view.getUint32( this.offset, true );
this.offset += 4;
return value;
}
readInt32() {
const value = this.view.getInt32( this.offset, true );
this.offset += 4;
return value;
}
readUint64() {
const lo = this.view.getUint32( this.offset, true );
const hi = this.view.getUint32( this.offset + 4, true );
this.offset += 8;
// For values that fit in Number, this is safe
return hi * 0x100000000 + lo;
}
readInt64() {
const lo = this.view.getUint32( this.offset, true );
const hi = this.view.getInt32( this.offset + 4, true );
this.offset += 8;
return hi * 0x100000000 + lo;
}
readFloat32() {
const value = this.view.getFloat32( this.offset, true );
this.offset += 4;
return value;
}
readFloat64() {
const value = this.view.getFloat64( this.offset, true );
this.offset += 8;
return value;
}
readBytes( length ) {
const bytes = new Uint8Array( this.buffer, this.offset, length );
this.offset += length;
return bytes;
}
readString( length ) {
const bytes = this.readBytes( length );
let end = 0;
while ( end < length && bytes[ end ] !== 0 ) end ++;
return textDecoder.decode( bytes.subarray( 0, end ) );
}
}
// ============================================================================
// ValueRep - 64-bit packed value representation
// ============================================================================
class ValueRep {
constructor( lo, hi ) {
this.lo = lo; // Lower 32 bits
this.hi = hi; // Upper 32 bits
}
get isArray() {
return ( this.hi & 0x80000000 ) !== 0;
}
get isInlined() {
return ( this.hi & 0x40000000 ) !== 0;
}
get isCompressed() {
return ( this.hi & 0x20000000 ) !== 0;
}
get typeEnum() {
return ( this.hi >> 16 ) & 0xFF;
}
get payload() {
// 48-bit payload: lo (32 bits) + hi lower 16 bits
// Note: JavaScript numbers are IEEE 754 doubles with 53 bits of integer precision,
// so 48-bit values are represented exactly without loss of precision.
return this.lo + ( ( this.hi & 0xFFFF ) * 0x100000000 );
}
getInlinedValue() {
// For inlined scalars, the value is in the lower 32 bits
return this.lo;
}
}
// ============================================================================
// USDC Parser
// ============================================================================
class USDCParser {
parse( /* buffer */ ) {
/**
* Parse USDC file and return raw spec data without building Three.js scene.
* Used by USDComposer for unified scene composition.
*/
parseData( buffer ) {
// TODO
this.buffer = buffer instanceof ArrayBuffer ? buffer : buffer.buffer;
this.reader = new BinaryReader( this.buffer );
this.version = { major: 0, minor: 0, patch: 0 };
return new Group();
this._conversionBuffer = new ArrayBuffer( 4 );
this._conversionView = new DataView( this._conversionBuffer );
this._readBootstrap();
this._readTOC();
this._readTokens();
this._readStrings();
this._readFields();
this._readFieldSets();
this._readPaths();
this._readSpecs();
// Build specsByPath without building scene
this.specsByPath = {};
for ( const spec of this.specs ) {
const path = this.paths[ spec.pathIndex ];
if ( ! path ) continue;
const fields = this._getFieldsForSpec( spec );
this.specsByPath[ path ] = { specType: spec.specType, fields };
}
return { specsByPath: this.specsByPath };
}
_readBootstrap() {
const reader = this.reader;
reader.seek( 0 );
// Read magic "PXR-USDC"
const magic = reader.readString( 8 );
if ( magic !== 'PXR-USDC' ) {
throw new Error( 'Not a valid USDC file' );
}
// Read version
this.version.major = reader.readUint8();
this.version.minor = reader.readUint8();
this.version.patch = reader.readUint8();
reader.readBytes( 5 ); // Skip remaining version bytes
// Read TOC offset
this.tocOffset = reader.readUint64();
// Skip reserved bytes (rest of 128-byte header)
// Already at offset 24, skip to end of bootstrap (88 bytes total for bootstrap struct)
}
_readTOC() {
const reader = this.reader;
reader.seek( this.tocOffset );
// Read number of sections
const numSections = reader.readUint64();
this.sections = {};
for ( let i = 0; i < numSections; i ++ ) {
const name = reader.readString( 16 );
const start = reader.readUint64();
const size = reader.readUint64();
this.sections[ name ] = { start, size };
}
}
_readTokens() {
const section = this.sections[ 'TOKENS' ];
if ( ! section ) return;
const reader = this.reader;
reader.seek( section.start );
const numTokens = reader.readUint64();
this.tokens = [];
if ( this.version.major === 0 && this.version.minor < 4 ) {
// Uncompressed tokens (version < 0.4.0)
const tokensNumBytes = reader.readUint64();
const tokensData = reader.readBytes( tokensNumBytes );
let strStart = 0;
for ( let i = 0; i < numTokens; i ++ ) {
let strEnd = strStart;
while ( strEnd < tokensData.length && tokensData[ strEnd ] !== 0 ) strEnd ++;
this.tokens.push( textDecoder.decode( tokensData.subarray( strStart, strEnd ) ) );
strStart = strEnd + 1;
}
} else {
// Compressed tokens (version >= 0.4.0)
const uncompressedSize = reader.readUint64();
const compressedSize = reader.readUint64();
const compressedData = reader.readBytes( compressedSize );
const tokensData = decompressLZ4( compressedData, uncompressedSize );
let strStart = 0;
for ( let i = 0; i < numTokens; i ++ ) {
let strEnd = strStart;
while ( strEnd < tokensData.length && tokensData[ strEnd ] !== 0 ) strEnd ++;
this.tokens.push( textDecoder.decode( tokensData.subarray( strStart, strEnd ) ) );
strStart = strEnd + 1;
}
}
}
_readStrings() {
const section = this.sections[ 'STRINGS' ];
if ( ! section ) {
this.strings = [];
return;
}
const reader = this.reader;
reader.seek( section.start );
// Strings section has an 8-byte count prefix, but string indices stored
// elsewhere in the file are relative to the section start (not the data).
// So we read the entire section as uint32 values to maintain correct indexing.
const numStrings = Math.floor( section.size / 4 );
this.strings = [];
for ( let i = 0; i < numStrings; i ++ ) {
this.strings.push( reader.readUint32() );
}
}
_readFields() {
const section = this.sections[ 'FIELDS' ];
if ( ! section ) return;
const reader = this.reader;
reader.seek( section.start );
this.fields = [];
if ( this.version.major === 0 && this.version.minor < 4 ) {
// Uncompressed fields
const numFields = Math.floor( section.size / 12 ); // 4 bytes token index + 8 bytes value rep
for ( let i = 0; i < numFields; i ++ ) {
const tokenIndex = reader.readUint32();
const repLo = reader.readUint32();
const repHi = reader.readUint32();
this.fields.push( {
tokenIndex,
valueRep: new ValueRep( repLo, repHi )
} );
}
} else {
// Compressed fields (version >= 0.4.0)
const numFields = reader.readUint64();
// Read compressed token indices
const tokenIndicesCompressedSize = reader.readUint64();
const tokenIndicesCompressed = reader.readBytes( tokenIndicesCompressedSize );
const tokenIndices = decompressIntegers32(
tokenIndicesCompressed.buffer.slice(
tokenIndicesCompressed.byteOffset,
tokenIndicesCompressed.byteOffset + tokenIndicesCompressedSize
),
numFields
);
// Read compressed value reps (LZ4 only, no integer encoding)
const repsCompressedSize = reader.readUint64();
const repsCompressed = reader.readBytes( repsCompressedSize );
const repsData = decompressLZ4( repsCompressed, numFields * 8 );
const repsView = new DataView( repsData.buffer, repsData.byteOffset, repsData.byteLength );
for ( let i = 0; i < numFields; i ++ ) {
const repLo = repsView.getUint32( i * 8, true );
const repHi = repsView.getUint32( i * 8 + 4, true );
this.fields.push( {
tokenIndex: tokenIndices[ i ],
valueRep: new ValueRep( repLo, repHi )
} );
}
}
}
_readFieldSets() {
const section = this.sections[ 'FIELDSETS' ];
if ( ! section ) return;
const reader = this.reader;
reader.seek( section.start );
this.fieldSets = [];
if ( this.version.major === 0 && this.version.minor < 4 ) {
// Uncompressed field sets
const numFieldSets = Math.floor( section.size / 4 );
for ( let i = 0; i < numFieldSets; i ++ ) {
this.fieldSets.push( reader.readUint32() );
}
} else {
// Compressed field sets
const numFieldSets = reader.readUint64();
const compressedSize = reader.readUint64();
const compressed = reader.readBytes( compressedSize );
const indices = decompressIntegers32(
compressed.buffer.slice(
compressed.byteOffset,
compressed.byteOffset + compressedSize
),
numFieldSets
);
for ( let i = 0; i < numFieldSets; i ++ ) {
this.fieldSets.push( indices[ i ] );
}
}
}
_readPaths() {
const section = this.sections[ 'PATHS' ];
if ( ! section ) return;
const reader = this.reader;
reader.seek( section.start );
const numPaths = reader.readUint64();
this.paths = new Array( numPaths ).fill( '' );
if ( this.version.major === 0 && this.version.minor < 4 ) {
// Uncompressed paths - recursive tree structure
this._readPathsRecursive( '' );
} else {
// Compressed paths (version >= 0.4.0)
// Note: numPaths is stored twice - once for array sizing, once in compressed paths section
reader.readUint64(); // Read duplicate numPaths value (matches numPaths above)
const compressedSize1 = reader.readUint64();
const pathIndicesCompressed = reader.readBytes( compressedSize1 );
const pathIndices = decompressIntegers32(
pathIndicesCompressed.buffer.slice(
pathIndicesCompressed.byteOffset,
pathIndicesCompressed.byteOffset + compressedSize1
),
numPaths
);
const compressedSize2 = reader.readUint64();
const elementTokenIndicesCompressed = reader.readBytes( compressedSize2 );
const elementTokenIndices = decompressIntegers32(
elementTokenIndicesCompressed.buffer.slice(
elementTokenIndicesCompressed.byteOffset,
elementTokenIndicesCompressed.byteOffset + compressedSize2
),
numPaths
);
const compressedSize3 = reader.readUint64();
const jumpsCompressed = reader.readBytes( compressedSize3 );
const jumps = decompressIntegers32(
jumpsCompressed.buffer.slice(
jumpsCompressed.byteOffset,
jumpsCompressed.byteOffset + compressedSize3
),
numPaths
);
// Build paths from compressed data
this._buildPathsFromCompressed( pathIndices, elementTokenIndices, jumps );
}
}
_readPathsRecursive( parentPath, depth = 0 ) {
const reader = this.reader;
// Prevent infinite recursion
if ( depth > 1000 ) return;
// Read path item header
const index = reader.readUint32();
const elementTokenIndex = reader.readUint32();
const bits = reader.readUint8();
const hasChild = ( bits & 1 ) !== 0;
const hasSibling = ( bits & 2 ) !== 0;
const isPrimProperty = ( bits & 4 ) !== 0;
// Build path
let path;
if ( parentPath === '' ) {
path = '/';
} else {
const elemToken = this.tokens[ elementTokenIndex ] || '';
if ( isPrimProperty ) {
path = parentPath + '.' + elemToken;
} else {
path = parentPath === '/' ? '/' + elemToken : parentPath + '/' + elemToken;
}
}
this.paths[ index ] = path;
// Process children and siblings
if ( hasChild && hasSibling ) {
// Read sibling offset
const siblingOffset = reader.readUint64();
// Read child
this._readPathsRecursive( path, depth + 1 );
// Read sibling
reader.seek( siblingOffset );
this._readPathsRecursive( parentPath, depth + 1 );
} else if ( hasChild ) {
this._readPathsRecursive( path, depth + 1 );
} else if ( hasSibling ) {
this._readPathsRecursive( parentPath, depth + 1 );
}
}
_buildPathsFromCompressed( pathIndices, elementTokenIndices, jumps ) {
// Jump encoding from USD:
// 0 = only sibling (no child), next entry is sibling
// -1 = only child (no sibling), next entry is child
// -2 = leaf (no child, no sibling)
// >0 = has both child and sibling, value is offset to sibling
const buildPaths = ( startIndex, parentPath ) => {
let curIndex = startIndex;
while ( curIndex < pathIndices.length ) {
const thisIndex = curIndex ++;
const pathIndex = pathIndices[ thisIndex ];
const elementTokenIndex = elementTokenIndices[ thisIndex ];
const jump = jumps[ thisIndex ];
// Build path
let path;
if ( parentPath === '' ) {
path = '/';
parentPath = path;
} else {
const elemToken = this.tokens[ Math.abs( elementTokenIndex ) ] || '';
const isPrimProperty = elementTokenIndex < 0;
if ( isPrimProperty ) {
path = parentPath + '.' + elemToken;
} else {
path = parentPath === '/' ? '/' + elemToken : parentPath + '/' + elemToken;
}
}
this.paths[ pathIndex ] = path;
// Determine children and siblings
const hasChild = jump > 0 || jump === - 1;
const hasSibling = jump >= 0;
if ( hasChild ) {
if ( hasSibling ) {
// Has both child and sibling
// Recursively process sibling subtree
const siblingIndex = thisIndex + jump;
buildPaths( siblingIndex, parentPath );
}
// Child is next entry, continue with new parent path
parentPath = path;
} else if ( hasSibling ) {
// Only sibling, next entry is sibling with same parent
// Just continue loop with curIndex and same parentPath
} else {
// Leaf node, exit loop
break;
}
}
};
buildPaths( 0, '' );
}
_readSpecs() {
const section = this.sections[ 'SPECS' ];
if ( ! section ) return;
const reader = this.reader;
reader.seek( section.start );
this.specs = [];
if ( this.version.major === 0 && this.version.minor < 4 ) {
// Uncompressed specs
// Each spec: pathIndex (4), fieldSetIndex (4), specType (4) = 12 bytes
// For version 0.0.1 there may be different padding
const specSize = ( this.version.minor === 0 && this.version.patch === 1 ) ? 16 : 12;
const numSpecs = Math.floor( section.size / specSize );
for ( let i = 0; i < numSpecs; i ++ ) {
const pathIndex = reader.readUint32();
const fieldSetIndex = reader.readUint32();
const specType = reader.readUint32();
if ( specSize === 16 ) reader.readUint32(); // padding
this.specs.push( { pathIndex, fieldSetIndex, specType } );
}
} else {
// Compressed specs
const numSpecs = reader.readUint64();
const compressedSize1 = reader.readUint64();
const pathIndicesCompressed = reader.readBytes( compressedSize1 );
const pathIndices = decompressIntegers32(
pathIndicesCompressed.buffer.slice(
pathIndicesCompressed.byteOffset,
pathIndicesCompressed.byteOffset + compressedSize1
),
numSpecs
);
const compressedSize2 = reader.readUint64();
const fieldSetIndicesCompressed = reader.readBytes( compressedSize2 );
const fieldSetIndices = decompressIntegers32(
fieldSetIndicesCompressed.buffer.slice(
fieldSetIndicesCompressed.byteOffset,
fieldSetIndicesCompressed.byteOffset + compressedSize2
),
numSpecs
);
const compressedSize3 = reader.readUint64();
const specTypesCompressed = reader.readBytes( compressedSize3 );
const specTypes = decompressIntegers32(
specTypesCompressed.buffer.slice(
specTypesCompressed.byteOffset,
specTypesCompressed.byteOffset + compressedSize3
),
numSpecs
);
for ( let i = 0; i < numSpecs; i ++ ) {
this.specs.push( {
pathIndex: pathIndices[ i ],
fieldSetIndex: fieldSetIndices[ i ],
specType: specTypes[ i ]
} );
}
}
}
// ========================================================================
// Value Reading
// ========================================================================
_readValue( valueRep ) {
const type = valueRep.typeEnum;
const isArray = valueRep.isArray;
const isInlined = valueRep.isInlined;
// Handle TimeSamples specially - they have their own format
if ( type === TypeEnum.TimeSamples ) {
return this._readTimeSamples( valueRep );
}
if ( isInlined ) {
return this._readInlinedValue( valueRep );
}
// Seek to payload offset and read value
const offset = valueRep.payload;
const savedOffset = this.reader.tell();
this.reader.seek( offset );
let value;
if ( isArray ) {
value = this._readArrayValue( valueRep );
} else {
value = this._readScalarValue( type );
}
this.reader.seek( savedOffset );
return value;
}
_readInlinedValue( valueRep ) {
const type = valueRep.typeEnum;
const payload = valueRep.getInlinedValue();
const view = this._conversionView;
switch ( type ) {
case TypeEnum.Bool:
return payload !== 0;
case TypeEnum.UChar:
return payload & 0xFF;
case TypeEnum.Int:
case TypeEnum.UInt:
return payload;
case TypeEnum.Float: {
view.setUint32( 0, payload, true );
return view.getFloat32( 0, true );
}
case TypeEnum.Double: {
// When a double is inlined, it's stored as float32 bits in the payload
view.setUint32( 0, payload, true );
return view.getFloat32( 0, true );
}
case TypeEnum.Token:
return this.tokens[ payload ] || '';
case TypeEnum.String:
return this.tokens[ this.strings[ payload ] ] || '';
case TypeEnum.AssetPath:
return this.tokens[ payload ] || '';
case TypeEnum.Specifier:
return payload; // 0=def, 1=over, 2=class
case TypeEnum.Permission:
case TypeEnum.Variability:
return payload;
// Vec2h: Two half-floats fit in 4 bytes, stored directly
case TypeEnum.Vec2h: {
view.setUint32( 0, payload, true );
return [ this._halfToFloat( view.getUint16( 0, true ) ), this._halfToFloat( view.getUint16( 2, true ) ) ];
}
// Inlined vectors that don't fit in 4 bytes are encoded as signed 8-bit integers
// Vec2f = 8 bytes (2x float32), Vec3f = 12 bytes, Vec4f = 16 bytes, etc.
case TypeEnum.Vec2f:
case TypeEnum.Vec2i: {
view.setUint32( 0, payload, true );
return [ view.getInt8( 0 ), view.getInt8( 1 ) ];
}
case TypeEnum.Vec3f:
case TypeEnum.Vec3i: {
view.setUint32( 0, payload, true );
return [ view.getInt8( 0 ), view.getInt8( 1 ), view.getInt8( 2 ) ];
}
case TypeEnum.Vec4f:
case TypeEnum.Vec4i: {
view.setUint32( 0, payload, true );
return [ view.getInt8( 0 ), view.getInt8( 1 ), view.getInt8( 2 ), view.getInt8( 3 ) ];
}
case TypeEnum.Matrix2d: {
// Inlined Matrix2d stores diagonal values as 2 signed int8 values
view.setUint32( 0, payload, true );
const d0 = view.getInt8( 0 ), d1 = view.getInt8( 1 );
return [ d0, 0, 0, d1 ];
}
case TypeEnum.Matrix3d: {
// Inlined Matrix3d stores diagonal values as 3 signed int8 values
view.setUint32( 0, payload, true );
const d0 = view.getInt8( 0 ), d1 = view.getInt8( 1 ), d2 = view.getInt8( 2 );
return [ d0, 0, 0, 0, d1, 0, 0, 0, d2 ];
}
case TypeEnum.Matrix4d: {
// Inlined Matrix4d stores diagonal values as 4 signed int8 values
view.setUint32( 0, payload, true );
const d0 = view.getInt8( 0 ), d1 = view.getInt8( 1 ), d2 = view.getInt8( 2 ), d3 = view.getInt8( 3 );
return [ d0, 0, 0, 0, 0, d1, 0, 0, 0, 0, d2, 0, 0, 0, 0, d3 ];
}
default:
return payload;
}
}
_readTimeSamples( valueRep ) {
const reader = this.reader;
const offset = valueRep.payload;
const savedOffset = reader.tell();
reader.seek( offset );
// TimeSamples format uses RELATIVE offsets (from OpenUSD _RecursiveRead):
// _RecursiveRead: read int64 relativeOffset at current position, then seek to start + relativeOffset
// After reading timesRep, continue reading from current position (after timesRep)
// Layout at TimeSamples location:
// - int64 timesOffset (relative from start of this int64)
// At (start + timesOffset): timesRep ValueRep, then int64 valuesOffset, then numValues + ValueReps
// Read times relative offset and resolve
const timesStart = reader.tell();
const timesRelOffset = reader.readInt64();
reader.seek( timesStart + timesRelOffset );
const timesRepLo = reader.readUint32();
const timesRepHi = reader.readUint32();
const timesRep = new ValueRep( timesRepLo, timesRepHi );
// Resolve times array
const times = this._readValue( timesRep );
// Continue reading from current position (after timesRep)
// The second _RecursiveRead reads from CURRENT position, not from the beginning
const afterTimesRep = timesStart + timesRelOffset + 8;
reader.seek( afterTimesRep );
// Read values relative offset
const valuesStart = reader.tell();
const valuesRelOffset = reader.readInt64();
reader.seek( valuesStart + valuesRelOffset );
// Read number of values
const numValues = reader.readUint64();
// Read all ValueReps
const valueReps = [];
for ( let i = 0; i < numValues; i ++ ) {
const repLo = reader.readUint32();
const repHi = reader.readUint32();
valueReps.push( new ValueRep( repLo, repHi ) );
}
// Resolve each value
const values = [];
for ( let i = 0; i < numValues; i ++ ) {
values.push( this._readValue( valueReps[ i ] ) );
}
reader.seek( savedOffset );
// Convert times to array if needed
const timesArray = times instanceof Float64Array ? Array.from( times ) : ( Array.isArray( times ) ? times : [ times ] );
return { times: timesArray, values };
}
_readScalarValue( type ) {
const reader = this.reader;
switch ( type ) {
case TypeEnum.Invalid:
return null;
case TypeEnum.Bool:
return reader.readUint8() !== 0;
case TypeEnum.UChar:
return reader.readUint8();
case TypeEnum.Int:
return reader.readInt32();
case TypeEnum.UInt:
return reader.readUint32();
case TypeEnum.Int64:
return reader.readInt64();
case TypeEnum.UInt64:
return reader.readUint64();
case TypeEnum.Half:
return this._readHalf();
case TypeEnum.Float:
return reader.readFloat32();
case TypeEnum.Double:
return reader.readFloat64();
case TypeEnum.String:
case TypeEnum.Token: {
const index = reader.readUint32();
return this.tokens[ index ] || '';
}
case TypeEnum.AssetPath: {
const index = reader.readUint32();
return this.tokens[ index ] || '';
}
case TypeEnum.Vec2f:
return [ reader.readFloat32(), reader.readFloat32() ];
case TypeEnum.Vec2d:
return [ reader.readFloat64(), reader.readFloat64() ];
case TypeEnum.Vec2i:
return [ reader.readInt32(), reader.readInt32() ];
case TypeEnum.Vec3f:
return [ reader.readFloat32(), reader.readFloat32(), reader.readFloat32() ];
case TypeEnum.Vec3d:
return [ reader.readFloat64(), reader.readFloat64(), reader.readFloat64() ];
case TypeEnum.Vec3i:
return [ reader.readInt32(), reader.readInt32(), reader.readInt32() ];
case TypeEnum.Vec4f:
return [ reader.readFloat32(), reader.readFloat32(), reader.readFloat32(), reader.readFloat32() ];
case TypeEnum.Vec4d:
return [ reader.readFloat64(), reader.readFloat64(), reader.readFloat64(), reader.readFloat64() ];
case TypeEnum.Quatf:
return [ reader.readFloat32(), reader.readFloat32(), reader.readFloat32(), reader.readFloat32() ];
case TypeEnum.Quatd:
return [ reader.readFloat64(), reader.readFloat64(), reader.readFloat64(), reader.readFloat64() ];
case TypeEnum.Matrix4d: {
const m = [];
for ( let i = 0; i < 16; i ++ ) m.push( reader.readFloat64() );
return m;
}
case TypeEnum.TokenVector: {
const count = reader.readUint64();
const tokens = [];
for ( let i = 0; i < count; i ++ ) {
const index = reader.readUint32();
tokens.push( this.tokens[ index ] || '' );
}
return tokens;
}
case TypeEnum.PathVector: {
const count = reader.readUint64();
const paths = [];
for ( let i = 0; i < count; i ++ ) {
const index = reader.readUint32();
paths.push( this.paths[ index ] || '' );
}
return paths;
}
case TypeEnum.DoubleVector: {
// DoubleVector is a count-prefixed array of doubles
const count = reader.readUint64();
const arr = new Float64Array( count );
for ( let i = 0; i < count; i ++ ) arr[ i ] = reader.readFloat64();
return arr;
}
case TypeEnum.Dictionary: {
// Dictionary format:
// u64 elementCount
// For each element: u32 keyIndex + i64 valueOffset (relative)
const elementCount = reader.readUint64();
const dict = {};
for ( let i = 0; i < elementCount; i ++ ) {
const keyIdx = reader.readUint32();
const key = this.tokens[ keyIdx ];
// Value offset is relative to current position
const currentPos = reader.position;
const valueOffset = reader.readInt64();
const valuePos = currentPos + valueOffset;
// Save position, read value, restore position
const savedPos = reader.position;
reader.position = valuePos;
// Read the value representation at the offset
const valueRepData = reader.readUint64();
const valueRep = new ValueRep( valueRepData );
// Read the value based on the representation
let value = null;
if ( valueRep.isInlined ) {
value = this._readInlinedValue( valueRep );
} else if ( valueRep.isArray ) {
reader.position = valueRep.payload;
value = this._readArrayValue( valueRep );
} else {
reader.position = valueRep.payload;
value = this._readScalarValue( valueRep.typeEnum );
}
reader.position = savedPos;
if ( key !== undefined && value !== null ) {
dict[ key ] = value;
}
}
return dict;
}
case TypeEnum.TokenListOp:
case TypeEnum.StringListOp:
case TypeEnum.IntListOp:
case TypeEnum.Int64ListOp:
case TypeEnum.UIntListOp:
case TypeEnum.UInt64ListOp:
// These complex types are not needed for geometry loading
// Skip them silently
return null;
case TypeEnum.PathListOp: {
// PathListOp format (from AOUSD Core Spec 16.3.10.25):
// Header byte bitmask:
// - bit 0 (0x01): Make Explicit (clears list)
// - bit 1 (0x02): Add Explicit Items
// - bit 2 (0x04): Add Items
// - bit 3 (0x08): Delete Items
// - bit 4 (0x10): Reorder Items
// - bit 5 (0x20): Prepend Items
// - bit 6 (0x40): Append Items
// Arrays follow in order: Explicit, Add, Prepend, Append, Delete, Reorder
// Each array: uint64 count + count * uint32 path indices
const flags = reader.readUint8();
const hasExplicitItems = ( flags & 0x02 ) !== 0;
const hasAddItems = ( flags & 0x04 ) !== 0;
const hasDeleteItems = ( flags & 0x08 ) !== 0;
const hasReorderItems = ( flags & 0x10 ) !== 0;
const hasPrependItems = ( flags & 0x20 ) !== 0;
const hasAppendItems = ( flags & 0x40 ) !== 0;
const readPathList = () => {
const itemCount = reader.readUint64();
const paths = [];
for ( let i = 0; i < itemCount; i ++ ) {
const pathIdx = reader.readUint32();
paths.push( this.paths[ pathIdx ] );
}
return paths;
};
// Read arrays in spec order: Explicit, Add, Prepend, Append, Delete, Reorder
let explicitPaths = null;
let addPaths = null;
let prependPaths = null;
let appendPaths = null;
if ( hasExplicitItems ) explicitPaths = readPathList();
if ( hasAddItems ) addPaths = readPathList();
if ( hasPrependItems ) prependPaths = readPathList();
if ( hasAppendItems ) appendPaths = readPathList();
if ( hasDeleteItems ) readPathList(); // Skip delete items
if ( hasReorderItems ) readPathList(); // Skip reorder items
// Return the first non-empty list (connections are typically prepended)
if ( prependPaths && prependPaths.length > 0 ) return prependPaths;
if ( explicitPaths && explicitPaths.length > 0 ) return explicitPaths;
if ( appendPaths && appendPaths.length > 0 ) return appendPaths;
if ( addPaths && addPaths.length > 0 ) return addPaths;
return null;
}
case TypeEnum.VariantSelectionMap: {
const elementCount = reader.readUint64();
const map = {};
for ( let i = 0; i < elementCount; i ++ ) {
const keyIdx = reader.readUint32();
const valueIdx = reader.readUint32();
const key = this.tokens[ this.strings[ keyIdx ] ];
const value = this.tokens[ this.strings[ valueIdx ] ];
if ( key && value ) map[ key ] = value;
}
return map;
}
default:
console.warn( 'USDCParser: Unsupported scalar type', type );
return null;
}
}
_readArrayValue( valueRep ) {
const reader = this.reader;
const type = valueRep.typeEnum;
const isCompressed = valueRep.isCompressed;
// Read array size
let size;
if ( this.version.major === 0 && this.version.minor < 7 ) {
size = reader.readUint32();
} else {
size = reader.readUint64();
}
if ( size === 0 ) return [];
// Handle compressed arrays
if ( isCompressed ) {
return this._readCompressedArray( type, size );
}
// Read uncompressed array
switch ( type ) {
case TypeEnum.Int: {
const arr = new Int32Array( size );
for ( let i = 0; i < size; i ++ ) arr[ i ] = reader.readInt32();
return arr;
}
case TypeEnum.UInt: {
const arr = new Uint32Array( size );
for ( let i = 0; i < size; i ++ ) arr[ i ] = reader.readUint32();
return arr;
}
case TypeEnum.Float: {
const arr = new Float32Array( size );
for ( let i = 0; i < size; i ++ ) arr[ i ] = reader.readFloat32();
return arr;
}
case TypeEnum.Double: {
const arr = new Float64Array( size );
for ( let i = 0; i < size; i ++ ) arr[ i ] = reader.readFloat64();
return arr;
}
case TypeEnum.Vec2f: {
const arr = new Float32Array( size * 2 );
for ( let i = 0; i < size * 2; i ++ ) arr[ i ] = reader.readFloat32();
return arr;
}
case TypeEnum.Vec3f: {
const arr = new Float32Array( size * 3 );
for ( let i = 0; i < size * 3; i ++ ) arr[ i ] = reader.readFloat32();
return arr;
}
case TypeEnum.Vec4f: {
const arr = new Float32Array( size * 4 );
for ( let i = 0; i < size * 4; i ++ ) arr[ i ] = reader.readFloat32();
return arr;
}
case TypeEnum.Vec3h: {
// Half-precision vec3 array (used for scales in skeletal animation)
const arr = new Float32Array( size * 3 );
for ( let i = 0; i < size * 3; i ++ ) arr[ i ] = this._readHalf();
return arr;
}
case TypeEnum.Quatf: {
const arr = new Float32Array( size * 4 );
for ( let i = 0; i < size * 4; i ++ ) arr[ i ] = reader.readFloat32();
return arr;
}
case TypeEnum.Quath: {
// Half-precision quaternion array
const arr = new Float32Array( size * 4 );
for ( let i = 0; i < size * 4; i ++ ) arr[ i ] = this._readHalf();
return arr;
}
case TypeEnum.Matrix4d: {
// 4x4 matrix array (16 doubles per matrix, row-major)
const arr = new Float64Array( size * 16 );
for ( let i = 0; i < size * 16; i ++ ) arr[ i ] = reader.readFloat64();
return arr;
}
case TypeEnum.Token: {
const arr = [];
for ( let i = 0; i < size; i ++ ) {
const index = reader.readUint32();
arr.push( this.tokens[ index ] || '' );
}
return arr;
}
case TypeEnum.Half: {
const arr = new Float32Array( size );
for ( let i = 0; i < size; i ++ ) arr[ i ] = this._readHalf();
return arr;
}
default:
console.warn( 'USDCParser: Unsupported array type', type );
return [];
}
}
_readCompressedArray( type, size ) {
const reader = this.reader;
switch ( type ) {
case TypeEnum.Int:
case TypeEnum.UInt: {
const compressedSize = reader.readUint64();
const compressed = reader.readBytes( compressedSize );
return decompressIntegers32(
compressed.buffer.slice(
compressed.byteOffset,
compressed.byteOffset + compressedSize
),
size
);
}
case TypeEnum.Float: {
// Float compression: 'i' = compressed as ints, 't' = lookup table
const code = reader.readInt8();
if ( code === FLOAT_COMPRESSION_INT ) {
const compressedSize = reader.readUint64();
const compressed = reader.readBytes( compressedSize );
const ints = decompressIntegers32(
compressed.buffer.slice(
compressed.byteOffset,
compressed.byteOffset + compressedSize
),
size
);
const floats = new Float32Array( size );
for ( let i = 0; i < size; i ++ ) floats[ i ] = ints[ i ];
return floats;
} else if ( code === FLOAT_COMPRESSION_LUT ) {
const lutSize = reader.readUint32();
const lut = new Float32Array( lutSize );
for ( let i = 0; i < lutSize; i ++ ) lut[ i ] = reader.readFloat32();
const compressedSize = reader.readUint64();
const compressed = reader.readBytes( compressedSize );
const indices = decompressIntegers32(
compressed.buffer.slice(
compressed.byteOffset,
compressed.byteOffset + compressedSize
),
size
);
const floats = new Float32Array( size );
for ( let i = 0; i < size; i ++ ) floats[ i ] = lut[ indices[ i ] ];
return floats;
}
console.warn( 'USDCParser: Unknown float compression code', code );
return new Float32Array( size );
}
default:
console.warn( 'USDCParser: Unsupported compressed array type', type );
return [];
}
}
_readHalf() {
return this._halfToFloat( this.reader.readUint16() );
}
_halfToFloat( h ) {
const sign = ( h & 0x8000 ) >> 15;
const exp = ( h & 0x7C00 ) >> 10;
const frac = h & 0x03FF;
if ( exp === 0 ) {
// Zero or denormalized number
if ( frac === 0 ) {
return sign ? - 0 : 0;
}
// Denormalized: value = ±2^-14 × (frac/1024)
return ( sign ? - 1 : 1 ) * HALF_DENORM_SCALE * ( frac / 1024 );
} else if ( exp === 31 ) {
return frac ? NaN : ( sign ? - Infinity : Infinity );
}
return ( sign ? - 1 : 1 ) * HALF_EXPONENT_TABLE[ exp ] * ( 1 + frac / 1024 );
}
_getFieldsForSpec( spec ) {
const fields = {};
let fieldSetIndex = spec.fieldSetIndex;
// Field sets are terminated by FIELD_SET_TERMINATOR
// Limit iterations to prevent infinite loops from malformed data
const maxIterations = 10000;
let iterations = 0;
while ( fieldSetIndex < this.fieldSets.length && iterations < maxIterations ) {
const fieldIndex = this.fieldSets[ fieldSetIndex ];
// Terminator
if ( fieldIndex === FIELD_SET_TERMINATOR || fieldIndex === - 1 ) break;
const field = this.fields[ fieldIndex ];
if ( field ) {
const name = this.tokens[ field.tokenIndex ];
const value = this._readValue( field.valueRep );
fields[ name ] = value;
}
fieldSetIndex ++;
iterations ++;
}
return fields;
}
}
export { USDCParser };

@@ -6,14 +6,16 @@ import {

import * as fflate from '../libs/fflate.module.js';
import { unzipSync } from '../libs/fflate.module.js';
import { USDAParser } from './usd/USDAParser.js';
import { USDCParser } from './usd/USDCParser.js';
import { USDComposer } from './usd/USDComposer.js';
/**
* A loader for the USDZ format.
* A loader for the USD format (USD, USDA, USDC, USDZ).
*
* USDZ files that use USDC internally are not yet supported, only USDA.
* Supports both ASCII (USDA) and binary (USDC) USD files, as well as
* USDZ archives containing either format.
*
* ```js
* const loader = new USDZLoader();
* const model = await loader.loadAsync( 'saeukkang.usdz' );
* const loader = new USDLoader();
* const model = await loader.loadAsync( 'model.usdz' );
* scene.add( model );

@@ -101,5 +103,6 @@ * ```

if ( filename.endsWith( 'png' ) ) {
if ( filename.endsWith( 'png' ) || filename.endsWith( 'jpg' ) || filename.endsWith( 'jpeg' ) ) {
const blob = new Blob( [ zip[ filename ] ], { type: 'image/png' } );
const type = filename.endsWith( 'png' ) ? 'image/png' : 'image/jpeg';
const blob = new Blob( [ zip[ filename ] ], { type } );
data[ filename ] = URL.createObjectURL( blob );

@@ -109,2 +112,6 @@

}
for ( const filename in zip ) {
if ( filename.endsWith( 'usd' ) || filename.endsWith( 'usda' ) || filename.endsWith( 'usdc' ) ) {

@@ -114,8 +121,15 @@

data[ filename ] = usdc.parse( zip[ filename ].buffer, data );
// Store parsed data (specsByPath) for on-demand composition
const parsedData = usdc.parseData( zip[ filename ].buffer );
data[ filename ] = parsedData;
// Store raw buffer for re-parsing with variant selections
data[ filename + ':buffer' ] = zip[ filename ].buffer;
} else {
const text = fflate.strFromU8( zip[ filename ] );
data[ filename ] = usda.parseText( text );
const text = new TextDecoder().decode( zip[ filename ] );
// Store parsed data (specsByPath) for on-demand composition
data[ filename ] = usda.parseData( text );
// Store raw text for re-parsing with variant selections
data[ filename + ':text' ] = text;

@@ -152,3 +166,3 @@ }

if ( zip.length < 1 ) return undefined;
if ( zip.length < 1 ) return { file: undefined, basePath: '' };

@@ -158,6 +172,9 @@ const firstFileName = Object.keys( zip )[ 0 ];

const lastSlash = firstFileName.lastIndexOf( '/' );
const basePath = lastSlash >= 0 ? firstFileName.slice( 0, lastSlash ) : '';
// As per the USD specification, the first entry in the zip archive is used as the main file ("UsdStage").
// ASCII files can end in either .usda or .usd.
// See https://openusd.org/release/spec_usdz.html#layout
if ( firstFileName.endsWith( 'usda' ) ) return zip[ firstFileName ];
if ( firstFileName.endsWith( 'usda' ) ) return { file: zip[ firstFileName ], basePath };

@@ -173,3 +190,3 @@ if ( firstFileName.endsWith( 'usdc' ) ) {

return zip[ firstFileName ];
return { file: zip[ firstFileName ], basePath };

@@ -186,38 +203,69 @@ } else {

return zip[ firstFileName ];
return { file: zip[ firstFileName ], basePath };
}
return { file: undefined, basePath: '' };
}
// USDA
const scope = this;
// USDA (standalone)
if ( typeof buffer === 'string' ) {
return usda.parse( buffer, {} );
const composer = new USDComposer( scope.manager );
const data = usda.parseData( buffer );
return composer.compose( data, {} );
}
// USDC
// USDC (standalone)
if ( isCrateFile( buffer ) ) {
return usdc.parse( buffer );
const composer = new USDComposer( scope.manager );
const data = usdc.parseData( buffer );
return composer.compose( data, {} );
}
const bytes = new Uint8Array( buffer );
// USDZ
const zip = fflate.unzipSync( new Uint8Array( buffer ) );
if ( bytes[ 0 ] === 0x50 && bytes[ 1 ] === 0x4B ) {
const assets = parseAssets( zip );
const zip = unzipSync( bytes );
// console.log( assets );
const assets = parseAssets( zip );
const file = findUSD( zip );
const { file, basePath } = findUSD( zip );
const text = fflate.strFromU8( file );
const composer = new USDComposer( scope.manager );
let data;
return usda.parse( text, assets );
if ( isCrateFile( file ) ) {
data = usdc.parseData( file.buffer );
} else {
const text = new TextDecoder().decode( file );
data = usda.parseData( text );
}
return composer.compose( data, assets, {}, basePath );
}
// USDA (standalone, as ArrayBuffer)
const composer = new USDComposer( scope.manager );
const text = new TextDecoder().decode( bytes );
const data = usda.parseData( text );
return composer.compose( data, {} );
}

@@ -224,0 +272,0 @@

@@ -10,3 +10,3 @@ import {

} from 'three';
import * as fflate from '../libs/fflate.module.js';
import { unzlibSync } from '../libs/fflate.module.js';

@@ -116,5 +116,17 @@ /**

// pattern for reading vertices, 3 floats or integers
const pat3Floats = /(\-?\d+\.?[\d\-\+e]*)\s+(\-?\d+\.?[\d\-\+e]*)\s+(\-?\d+\.?[\d\-\+e]*)/g;
function parseFloats( line ) {
const result = [];
const parts = line.split( /\s+/ );
for ( let i = 0; i < parts.length; i ++ ) {
if ( parts[ i ] !== '' ) result.push( parseFloat( parts[ i ] ) );
}
return result;
}
// pattern for connectivity, an integer followed by any number of ints

@@ -170,11 +182,12 @@ // the first integer is the number of polygon nodes

// get the vertices
while ( ( result = pat3Floats.exec( line ) ) !== null ) {
if ( patWord.exec( line ) === null ) {
if ( patWord.exec( line ) !== null ) break;
const values = parseFloats( line );
const x = parseFloat( result[ 1 ] );
const y = parseFloat( result[ 2 ] );
const z = parseFloat( result[ 3 ] );
positions.push( x, y, z );
for ( let k = 0; k + 2 < values.length; k += 3 ) {
positions.push( values[ k ], values[ k + 1 ], values[ k + 2 ] );
}
}

@@ -249,13 +262,12 @@

while ( ( result = pat3Floats.exec( line ) ) !== null ) {
if ( patWord.exec( line ) === null ) {
if ( patWord.exec( line ) !== null ) break;
const values = parseFloats( line );
const r = parseFloat( result[ 1 ] );
const g = parseFloat( result[ 2 ] );
const b = parseFloat( result[ 3 ] );
for ( let k = 0; k + 2 < values.length; k += 3 ) {
color.setRGB( r, g, b, SRGBColorSpace );
color.setRGB( values[ k ], values[ k + 1 ], values[ k + 2 ], SRGBColorSpace );
colors.push( color.r, color.g, color.b );
colors.push( color.r, color.g, color.b );
}

@@ -268,11 +280,12 @@ }

while ( ( result = pat3Floats.exec( line ) ) !== null ) {
if ( patWord.exec( line ) === null ) {
if ( patWord.exec( line ) !== null ) break;
const values = parseFloats( line );
const nx = parseFloat( result[ 1 ] );
const ny = parseFloat( result[ 2 ] );
const nz = parseFloat( result[ 3 ] );
normals.push( nx, ny, nz );
for ( let k = 0; k + 2 < values.length; k += 3 ) {
normals.push( values[ k ], values[ k + 1 ], values[ k + 2 ] );
}
}

@@ -408,3 +421,3 @@

const s = [];
while ( c !== 10 ) {
while ( c !== 10 && index < buffer.length ) {

@@ -855,3 +868,3 @@ s.push( String.fromCharCode( c ) );

const data = fflate.unzlibSync( byteData.slice( dataOffsets[ i ], dataOffsets[ i + 1 ] ) );
const data = unzlibSync( byteData.slice( dataOffsets[ i ], dataOffsets[ i + 1 ] ) );
content = data.buffer;

@@ -858,0 +871,0 @@

@@ -1,3 +0,3 @@

import { Color } from 'three/webgpu';
import { attribute, cameraProjectionMatrix, dot, float, Fn, modelViewMatrix, modelViewProjection, NodeMaterial, normalize, positionGeometry, sign, uniform, varyingProperty, vec2, vec4 } from 'three/tsl';
import { Color, NodeMaterial } from 'three/webgpu';
import { attribute, cameraProjectionMatrix, dot, float, Fn, modelViewMatrix, modelViewProjection, normalize, positionGeometry, sign, uniform, varyingProperty, vec2, vec4 } from 'three/tsl';

@@ -4,0 +4,0 @@ /**

@@ -218,3 +218,3 @@ import {

viewport.multiplyScalar( window.devicePixelRatio );
viewport.multiplyScalar( renderer.getPixelRatio() ).floor();

@@ -221,0 +221,0 @@ const renderTarget = renderer.getRenderTarget();

@@ -75,3 +75,9 @@ import {

'sunPosition': { value: new Vector3() },
'up': { value: new Vector3( 0, 1, 0 ) }
'up': { value: new Vector3( 0, 1, 0 ) },
'cloudScale': { value: 0.0002 },
'cloudSpeed': { value: 0.0001 },
'cloudCoverage': { value: 0.4 },
'cloudDensity': { value: 0.4 },
'cloudElevation': { value: 0.5 },
'time': { value: 0.0 }
},

@@ -154,3 +160,2 @@

varying vec3 vSunDirection;
varying float vSunfade;
varying vec3 vBetaR;

@@ -162,3 +167,36 @@ varying vec3 vBetaM;

uniform vec3 up;
uniform float cloudScale;
uniform float cloudSpeed;
uniform float cloudCoverage;
uniform float cloudDensity;
uniform float cloudElevation;
uniform float time;
// Cloud noise functions
float hash( vec2 p ) {
return fract( sin( dot( p, vec2( 127.1, 311.7 ) ) ) * 43758.5453123 );
}
float noise( vec2 p ) {
vec2 i = floor( p );
vec2 f = fract( p );
f = f * f * ( 3.0 - 2.0 * f );
float a = hash( i );
float b = hash( i + vec2( 1.0, 0.0 ) );
float c = hash( i + vec2( 0.0, 1.0 ) );
float d = hash( i + vec2( 1.0, 1.0 ) );
return mix( mix( a, b, f.x ), mix( c, d, f.x ), f.y );
}
float fbm( vec2 p ) {
float value = 0.0;
float amplitude = 0.5;
for ( int i = 0; i < 5; i ++ ) {
value += amplitude * noise( p );
p *= 2.0;
amplitude *= 0.5;
}
return value;
}
// constants for atmospheric scattering

@@ -229,6 +267,40 @@ const float pi = 3.141592653589793238462643383279502884197169;

vec3 retColor = pow( texColor, vec3( 1.0 / ( 1.2 + ( 1.2 * vSunfade ) ) ) );
// Clouds
if ( direction.y > 0.0 && cloudCoverage > 0.0 ) {
gl_FragColor = vec4( retColor, 1.0 );
// Project to cloud plane (higher elevation = clouds appear lower/closer)
float elevation = mix( 1.0, 0.1, cloudElevation );
vec2 cloudUV = direction.xz / ( direction.y * elevation );
cloudUV *= cloudScale;
cloudUV += time * cloudSpeed;
// Multi-octave noise for fluffy clouds
float cloudNoise = fbm( cloudUV * 1000.0 );
cloudNoise += 0.5 * fbm( cloudUV * 2000.0 + 3.7 );
cloudNoise = cloudNoise * 0.5 + 0.5;
// Apply coverage threshold
float cloudMask = smoothstep( 1.0 - cloudCoverage, 1.0 - cloudCoverage + 0.3, cloudNoise );
// Fade clouds near horizon (adjusted by elevation)
float horizonFade = smoothstep( 0.0, 0.1 + 0.2 * cloudElevation, direction.y );
cloudMask *= horizonFade;
// Cloud lighting based on sun position
float sunInfluence = dot( direction, vSunDirection ) * 0.5 + 0.5;
float daylight = max( 0.0, vSunDirection.y * 2.0 );
// Base cloud color affected by atmosphere
vec3 atmosphereColor = Lin * 0.04;
vec3 cloudColor = mix( vec3( 0.3 ), vec3( 1.0 ), daylight );
cloudColor = mix( cloudColor, atmosphereColor + vec3( 1.0 ), sunInfluence * 0.5 );
cloudColor *= vSunE * 0.00002;
// Blend clouds with sky
texColor = mix( texColor, cloudColor, cloudMask * cloudDensity );
}
gl_FragColor = vec4( texColor, 1.0 );
#include <tonemapping_fragment>

@@ -235,0 +307,0 @@ #include <colorspace_fragment>

@@ -9,3 +9,3 @@ import {

import { Fn, float, vec3, acos, add, mul, clamp, cos, dot, exp, max, mix, modelViewProjection, normalize, positionWorld, pow, smoothstep, sub, varyingProperty, vec4, uniform, cameraPosition } from 'three/tsl';
import { Fn, float, vec2, vec3, acos, add, mul, clamp, cos, dot, exp, max, mix, modelViewProjection, normalize, positionWorld, pow, smoothstep, sub, varyingProperty, vec4, uniform, cameraPosition, fract, floor, sin, time, Loop, If } from 'three/tsl';

@@ -87,2 +87,37 @@ /**

/**
* The cloud scale uniform.
*
* @type {UniformNode<float>}
*/
this.cloudScale = uniform( 0.0002 );
/**
* The cloud speed uniform.
*
* @type {UniformNode<float>}
*/
this.cloudSpeed = uniform( 0.0001 );
/**
* The cloud coverage uniform.
*
* @type {UniformNode<float>}
*/
this.cloudCoverage = uniform( 0.4 );
/**
* The cloud density uniform.
*
* @type {UniformNode<float>}
*/
this.cloudDensity = uniform( 0.4 );
/**
* The cloud elevation uniform.
*
* @type {UniformNode<float>}
*/
this.cloudElevation = uniform( 0.5 );
/**
* This flag can be used for type testing.

@@ -110,3 +145,2 @@ *

const vSunE = varyingProperty( 'float' );
const vSunfade = varyingProperty( 'float' );
const vBetaR = varyingProperty( 'vec3' );

@@ -152,6 +186,5 @@ const vBetaM = varyingProperty( 'vec3' );

// varying sun fade
// sun fade
const sunfade = float( 1.0 ).sub( clamp( float( 1.0 ).sub( exp( this.sunPosition.y.div( 450000.0 ) ) ), 0, 1 ) );
vSunfade.assign( sunfade );

@@ -239,8 +272,82 @@ // varying vBetaR

const texColor = add( Lin, L0 ).mul( 0.04 ).add( vec3( 0.0, 0.0003, 0.00075 ) );
const texColor = add( Lin, L0 ).mul( 0.04 ).add( vec3( 0.0, 0.0003, 0.00075 ) ).toVar();
const retColor = pow( texColor, vec3( float( 1.0 ).div( float( 1.2 ).add( vSunfade.mul( 1.2 ) ) ) ) );
// Cloud noise functions
const hash = Fn( ( [ p ] ) => {
return vec4( retColor, 1.0 );
return fract( sin( dot( p, vec2( 127.1, 311.7 ) ) ).mul( 43758.5453123 ) );
} );
const noise = Fn( ( [ p_immutable ] ) => {
const p = vec2( p_immutable ).toVar();
const i = floor( p );
const f = fract( p );
const ff = f.mul( f ).mul( sub( 3.0, f.mul( 2.0 ) ) );
const a = hash( i );
const b = hash( add( i, vec2( 1.0, 0.0 ) ) );
const c = hash( add( i, vec2( 0.0, 1.0 ) ) );
const d = hash( add( i, vec2( 1.0, 1.0 ) ) );
return mix( mix( a, b, ff.x ), mix( c, d, ff.x ), ff.y );
} );
const fbm = Fn( ( [ p_immutable ] ) => {
const p = vec2( p_immutable ).toVar();
const value = float( 0.0 ).toVar();
const amplitude = float( 0.5 ).toVar();
Loop( 5, () => {
value.addAssign( amplitude.mul( noise( p ) ) );
p.mulAssign( 2.0 );
amplitude.mulAssign( 0.5 );
} );
return value;
} );
// Clouds
If( direction.y.greaterThan( 0.0 ).and( this.cloudCoverage.greaterThan( 0.0 ) ), () => {
// Project to cloud plane (higher elevation = clouds appear lower/closer)
const elevation = mix( 1.0, 0.1, this.cloudElevation );
const cloudUV = direction.xz.div( direction.y.mul( elevation ) ).toVar();
cloudUV.mulAssign( this.cloudScale );
cloudUV.addAssign( time.mul( this.cloudSpeed ) );
// Multi-octave noise for fluffy clouds
const cloudNoise = fbm( cloudUV.mul( 1000.0 ) ).add( fbm( cloudUV.mul( 2000.0 ).add( 3.7 ) ).mul( 0.5 ) ).toVar();
cloudNoise.assign( cloudNoise.mul( 0.5 ).add( 0.5 ) );
// Apply coverage threshold
const cloudMask = smoothstep( sub( 1.0, this.cloudCoverage ), sub( 1.0, this.cloudCoverage ).add( 0.3 ), cloudNoise ).toVar();
// Fade clouds near horizon (adjusted by elevation)
const horizonFade = smoothstep( 0.0, add( 0.1, mul( 0.2, this.cloudElevation ) ), direction.y );
cloudMask.mulAssign( horizonFade );
// Cloud lighting based on sun position
const sunInfluence = dot( direction, vSunDirection ).mul( 0.5 ).add( 0.5 );
const daylight = max( 0.0, vSunDirection.y.mul( 2.0 ) );
// Base cloud color affected by atmosphere
const atmosphereColor = Lin.mul( 0.04 );
const cloudColor = mix( vec3( 0.3 ), vec3( 1.0 ), daylight ).toVar();
cloudColor.assign( mix( cloudColor, atmosphereColor.add( vec3( 1.0 ) ), sunInfluence.mul( 0.5 ) ) );
cloudColor.mulAssign( vSunE.mul( 0.00002 ) );
// Blend clouds with sky
texColor.assign( mix( texColor, cloudColor, cloudMask.mul( this.cloudDensity ) ) );
} );
return vec4( texColor, 1.0 );
} )();

@@ -247,0 +354,0 @@

import {
Color,
FrontSide,
HalfFloatType,
Matrix4,

@@ -87,3 +88,3 @@ Mesh,

const renderTarget = new WebGLRenderTarget( textureWidth, textureHeight );
const renderTarget = new WebGLRenderTarget( textureWidth, textureHeight, { type: HalfFloatType } );

@@ -200,6 +201,6 @@ const mirrorShader = {

float theta = max( dot( eyeDirection, surfaceNormal ), 0.0 );
float rf0 = 0.3;
float rf0 = 0.02;
float reflectance = rf0 + ( 1.0 - rf0 ) * pow( ( 1.0 - theta ), 5.0 );
vec3 scatter = max( 0.0, dot( surfaceNormal, eyeDirection ) ) * waterColor;
vec3 albedo = mix( ( sunColor * diffuseLight * 0.3 + scatter ) * getShadowMask(), ( vec3( 0.1 ) + reflectionSample * 0.9 + reflectionSample * specularLight ), reflectance);
vec3 albedo = mix( ( sunColor * diffuseLight * 0.3 + scatter ) * getShadowMask(), reflectionSample + specularLight, reflectance );
vec3 outgoingLight = albedo;

@@ -206,0 +207,0 @@ gl_FragColor = vec4( outgoingLight, alpha );

import {
Clock,
Timer,
Color,

@@ -77,3 +77,3 @@ Matrix4,

const textureMatrix = new Matrix4();
const clock = new Clock();
const timer = new Timer();

@@ -184,3 +184,3 @@ // internal components

const delta = clock.getDelta();
const delta = timer.getDelta();
const config = scope.material.uniforms[ 'config' ];

@@ -212,2 +212,4 @@

timer.update();
updateTextureMatrix( camera );

@@ -214,0 +216,0 @@ updateFlow();

@@ -5,6 +5,6 @@ import {

Vector3,
MeshLambertNodeMaterial
NodeMaterial
} from 'three/webgpu';
import { Fn, add, cameraPosition, div, normalize, positionWorld, sub, time, texture, vec2, vec3, max, dot, reflect, pow, length, float, uniform, reflector, mul, mix, diffuseColor } from 'three/tsl';
import { Fn, add, cameraPosition, div, normalize, positionWorld, sub, time, texture, vec2, max, dot, reflect, pow, length, float, uniform, reflector, mul, mix } from 'three/tsl';

@@ -36,3 +36,3 @@ /**

const material = new MeshLambertNodeMaterial();
const material = new NodeMaterial();

@@ -160,4 +160,2 @@ super( geometry, material );

material.setupOutgoingLight = () => diffuseColor.rgb; // backwards compatibility
material.colorNode = Fn( () => {

@@ -172,6 +170,6 @@

const theta = max( dot( eyeDirection, surfaceNormal ), 0.0 );
const rf0 = float( 0.3 );
const rf0 = float( 0.02 );
const reflectance = mul( pow( float( 1.0 ).sub( theta ), 5.0 ), float( 1.0 ).sub( rf0 ) ).add( rf0 );
const scatter = max( 0.0, dot( surfaceNormal, eyeDirection ) ).mul( this.waterColor );
const albedo = mix( this.sunColor.mul( diffuseLight ).mul( 0.3 ).add( scatter ), mirrorSampler.rgb.mul( specularLight ).add( mirrorSampler.rgb.mul( 0.9 ) ).add( vec3( 0.1 ) ), reflectance );
const albedo = mix( this.sunColor.mul( diffuseLight ).mul( 0.3 ).add( scatter ), mirrorSampler.rgb.add( specularLight ), reflectance );

@@ -178,0 +176,0 @@ return albedo;

@@ -1,4 +0,4 @@

import { Clock, Vector3, Quaternion, Matrix4 } from 'three';
import { Timer, Vector3, Quaternion, Matrix4 } from 'three';
const JOLT_PATH = 'https://cdn.jsdelivr.net/npm/jolt-physics@0.23.0/dist/jolt-physics.wasm-compat.js';
const JOLT_PATH = 'https://cdn.jsdelivr.net/npm/jolt-physics@1.0.0/dist/jolt-physics.wasm-compat.js';

@@ -80,3 +80,3 @@ const frameRate = 60;

const { default: initJolt } = await import( `${JOLT_PATH}` );
const { default: initJolt } = await import( JOLT_PATH /* @vite-ignore */ );
Jolt = await initJolt();

@@ -226,8 +226,10 @@

const clock = new Clock();
const timer = new Timer();
function step() {
let deltaTime = clock.getDelta();
timer.update();
let deltaTime = timer.getDelta();
// Don't go below 30 Hz to prevent spiral of death

@@ -234,0 +236,0 @@ deltaTime = Math.min( deltaTime, 1.0 / 30.0 );

@@ -1,2 +0,2 @@

import { Clock, Vector3, Quaternion, Matrix4 } from 'three';
import { Timer, Vector3, Quaternion, Matrix4 } from 'three';

@@ -100,3 +100,3 @@ const RAPIER_PATH = 'https://cdn.skypack.dev/@dimforge/rapier3d-compat@0.17.3';

RAPIER = await import( `${RAPIER_PATH}` );
RAPIER = await import( RAPIER_PATH /* @vite-ignore */ );
await RAPIER.init();

@@ -305,7 +305,9 @@

const clock = new Clock();
const timer = new Timer();
function step() {
world.timestep = clock.getDelta();
timer.update();
world.timestep = timer.getDelta();
world.step();

@@ -312,0 +314,0 @@

import {
Clock,
HalfFloatType,
NoBlending,
Timer,
Vector2,

@@ -124,8 +124,8 @@ WebGLRenderTarget

/**
* The internal clock for managing time data.
* The internal timer for managing time data.
*
* @private
* @type {Clock}
* @type {Timer}
*/
this.clock = new Clock();
this.timer = new Timer();

@@ -219,5 +219,7 @@ }

this.timer.update();
if ( deltaTime === undefined ) {
deltaTime = this.clock.getDelta();
deltaTime = this.timer.getDelta();

@@ -224,0 +226,0 @@ }

@@ -117,3 +117,3 @@ import {

/**
* Sets the texture threshold. This value defined how strong the texture effects
* Sets the texture threshold. This value defines how strong the texture effects
* the transition. Must be in the range `[0,1]` (0 means full effect, 1 means no effect).

@@ -120,0 +120,0 @@ *

@@ -144,3 +144,3 @@ import {

* `CSS3DRenderer` is particularly interesting if you want to apply 3D effects to a website without
* canvas based rendering. It can also be used in order to combine DOM elements with WebGLcontent.
* canvas based rendering. It can also be used in order to combine DOM elements with WebGL content.
*

@@ -147,0 +147,0 @@ * There are, however, some important limitations:

@@ -191,4 +191,4 @@ import {

/**
* Sets the render quality. Setting to `high` means This value indicates that the browser
* tries to improve the SVG quality over rendering speed and geometric precision.
* Sets the render quality. Setting to `high` makes the browser improve SVG quality
* over rendering speed and geometric precision.
*

@@ -195,0 +195,0 @@ * @param {('low'|'high')} quality - The quality.

@@ -94,4 +94,8 @@ import {

vec3 getViewPosition(const in vec2 screenPosition, const in float depth) {
vec4 clipSpacePosition = vec4(vec3(screenPosition, depth) * 2.0 - 1.0, 1.0);
vec3 getViewPosition( const in vec2 screenPosition, const in float depth ) {
#ifdef USE_REVERSED_DEPTH_BUFFER
vec4 clipSpacePosition = vec4( vec2( screenPosition ) * 2.0 - 1.0, depth, 1.0 );
#else
vec4 clipSpacePosition = vec4( vec3( screenPosition, depth ) * 2.0 - 1.0, 1.0 );
#endif
vec4 viewSpacePosition = cameraProjectionMatrixInverse * clipSpacePosition;

@@ -158,6 +162,15 @@ return viewSpacePosition.xyz / viewSpacePosition.w;

float depth = getDepth(vUv.xy);
if (depth >= 1.0) {
discard;
return;
}
#ifdef USE_REVERSED_DEPTH_BUFFER
if (depth <= 0.0) {
discard;
return;
}
#else
if (depth >= 1.0) {
discard;
return;
}
#endif
vec3 viewPos = getViewPosition(vUv, depth);

@@ -164,0 +177,0 @@ vec3 viewNormal = getViewNormal(vUv);

@@ -11,3 +11,3 @@ /**

*
* Shape (1 = Dot, 2 = Ellipse, 3 = Line, 4 = Square)
* Shape (1 = Dot, 2 = Ellipse, 3 = Line, 4 = Square, 5 = Diamond)
* Blending Mode (1 = Linear, 2 = Multiply, 3 = Add, 4 = Lighter, 5 = Darker)

@@ -53,2 +53,3 @@ *

#define SQRT2_HALF_MINUS_ONE 0.20710678
#define PI 3.14159265
#define PI2 6.28318531

@@ -59,2 +60,3 @@ #define SHAPE_DOT 1

#define SHAPE_SQUARE 4
#define SHAPE_DIAMOND 5
#define BLENDING_LINEAR 1

@@ -135,2 +137,11 @@ #define BLENDING_MULTIPLY 2

} else if ( shape == SHAPE_DIAMOND ) {
float angle45 = PI / 4.0;
float theta = atan( p.y - coord.y, p.x - coord.x ) - angle - angle45;
float sin_t = abs( sin( theta ) );
float cos_t = abs( cos( theta ) );
rad = pow( abs( rad ), 1.4 );
rad = rad_max * ( rad + ( ( sin_t > cos_t ) ? rad - sin_t * rad : rad - cos_t * rad ) );
}

@@ -137,0 +148,0 @@

@@ -89,4 +89,8 @@ import {

vec3 getViewPosition(const in vec2 screenPosition, const in float depth) {
vec4 clipSpacePosition = vec4(vec3(screenPosition, depth) * 2.0 - 1.0, 1.0);
vec3 getViewPosition( const in vec2 screenPosition, const in float depth ) {
#ifdef USE_REVERSED_DEPTH_BUFFER
vec4 clipSpacePosition = vec4( vec2( screenPosition ) * 2.0 - 1.0, depth, 1.0 );
#else
vec4 clipSpacePosition = vec4( vec3( screenPosition, depth ) * 2.0 - 1.0, 1.0 );
#endif
vec4 viewSpacePosition = cameraProjectionMatrixInverse * clipSpacePosition;

@@ -93,0 +97,0 @@ return viewSpacePosition.xyz / viewSpacePosition.w;

@@ -156,5 +156,11 @@ import {

float sampleDepth = getDepth( sampleUv );
if( sampleDepth >= ( 1.0 - EPSILON ) ) {
#ifdef USE_REVERSED_DEPTH_BUFFER
if( sampleDepth <= 0.0 + EPSILON ) {
continue;
}
#else
if( sampleDepth >= 1.0 - EPSILON ) {
continue;
}
#endif

@@ -174,5 +180,12 @@ float sampleViewZ = getViewZ( sampleDepth );

float centerDepth = getDepth( vUv );
if( centerDepth >= ( 1.0 - EPSILON ) ) {
discard;
}
#ifdef USE_REVERSED_DEPTH_BUFFER
if( centerDepth <= 0.0 + EPSILON ) {
discard;
}
#else
if( centerDepth >= 1.0 - EPSILON ) {
discard;
}
#endif

@@ -179,0 +192,0 @@ float centerViewZ = getViewZ( centerDepth );

@@ -82,2 +82,12 @@ import {

#ifdef USE_REVERSED_DEPTH_BUFFER
const float depthThreshold = 0.0;
#else
const float depthThreshold = 1.0;
#endif
float getDepth( const in vec2 screenPosition ) {

@@ -141,3 +151,3 @@

if ( depth == 1.0 ) {
if ( depth == depthThreshold ) {

@@ -144,0 +154,0 @@ gl_FragColor = vec4( 1.0 ); // don't influence background

@@ -99,3 +99,3 @@ import {

float d=-(a*x+b*y+c*z);
float distance=(a*x0+b*y0+c*z0+d)/sqrt(a*a+b*b+c*c);
float distance=a*x0+b*y0+c*z0+d;
return distance;

@@ -174,3 +174,2 @@ }

float totalLen=length(d1-d0);
float xLen=d1.x-d0.x;

@@ -181,7 +180,8 @@ float yLen=d1.y-d0.y;

float ySpan=yLen/totalStep;
for(float i=0.;i<float(MAX_STEP);i++){
float sStep=1./totalStep;
float s=sStep; // start at sStep since loop starts at i=1
for(float i=1.;i<float(MAX_STEP);i++){
if(i>=totalStep) break;
vec2 xy=vec2(d0.x+i*xSpan,d0.y+i*ySpan);
if(xy.x<0.||xy.x>resolution.x||xy.y<0.||xy.y>resolution.y) break;
float s=length(xy-d0)/totalLen;
vec2 uv=xy/resolution;

@@ -227,3 +227,3 @@

vec3 vN=getViewNormal( uv );
if(dot(viewReflectDir,vN)>=0.) continue;
if(dot(viewReflectDir,vN)>=0.) break; // treat backfaces as opaque
float distance=pointPlaneDistance(vP,viewPosition,viewNormal);

@@ -247,2 +247,3 @@ if(distance>maxDistance) break;

}
s+=sStep;
}

@@ -249,0 +250,0 @@ }

@@ -7,3 +7,3 @@ /**

/**
* Based on [PaintEffect postprocess from ro.me](http://code.google.com/p/3-dreams-of-black/source/browse/deploy/js/effects/PaintEffect.js).
* Based on [PaintEffect postprocess from ro.me](https://github.com/dataarts/3-dreams-of-black/blob/master/deploy/js/effects/PaintEffect.js).
*

@@ -10,0 +10,0 @@ * @constant

@@ -242,4 +242,4 @@ import { RenderTarget, Vector2, QuadMesh, NodeMaterial, RendererUtils, TempNode, NodeUpdateType } from 'three/webgpu';

*/
export const afterImage = ( node, damp ) => nodeObject( new AfterImageNode( convertToTexture( node ), nodeObject( damp ) ) );
export const afterImage = ( node, damp ) => new AfterImageNode( convertToTexture( node ), nodeObject( damp ) );
export default AfterImageNode;

@@ -1,10 +0,282 @@

import { Matrix3, NodeMaterial } from 'three/webgpu';
import { clamp, nodeObject, Fn, vec4, uv, uniform, max } from 'three/tsl';
import { Matrix3, NodeMaterial, Vector3 } from 'three/webgpu';
import { clamp, Fn, vec4, uv, uniform, max } from 'three/tsl';
import StereoCompositePassNode from './StereoCompositePassNode.js';
import { frameCorners } from '../../utils/CameraUtils.js';
const _eyeL = /*@__PURE__*/ new Vector3();
const _eyeR = /*@__PURE__*/ new Vector3();
const _screenBottomLeft = /*@__PURE__*/ new Vector3();
const _screenBottomRight = /*@__PURE__*/ new Vector3();
const _screenTopLeft = /*@__PURE__*/ new Vector3();
const _right = /*@__PURE__*/ new Vector3();
const _up = /*@__PURE__*/ new Vector3();
const _forward = /*@__PURE__*/ new Vector3();
const _screenCenter = /*@__PURE__*/ new Vector3();
/**
* A render pass node that creates an anaglyph effect.
* Anaglyph algorithm types.
* @readonly
* @enum {string}
*/
const AnaglyphAlgorithm = {
TRUE: 'true',
GREY: 'grey',
COLOUR: 'colour',
HALF_COLOUR: 'halfColour',
DUBOIS: 'dubois',
OPTIMISED: 'optimised',
COMPROMISE: 'compromise'
};
/**
* Anaglyph color modes.
* @readonly
* @enum {string}
*/
const AnaglyphColorMode = {
RED_CYAN: 'redCyan',
MAGENTA_CYAN: 'magentaCyan',
MAGENTA_GREEN: 'magentaGreen'
};
/**
* Standard luminance coefficients (ITU-R BT.601).
* @private
*/
const LUMINANCE = { R: 0.299, G: 0.587, B: 0.114 };
/**
* Creates an anaglyph matrix pair from left and right channel specifications.
* This provides a more intuitive way to define how source RGB channels map to output RGB channels.
*
* Each specification object has keys 'r', 'g', 'b' for output channels.
* Each output channel value is [rCoef, gCoef, bCoef] defining how much of each input channel contributes.
*
* @private
* @param {Object} leftSpec - Specification for left eye contribution
* @param {Object} rightSpec - Specification for right eye contribution
* @returns {{left: number[], right: number[]}} Column-major arrays for Matrix3
*/
function createMatrixPair( leftSpec, rightSpec ) {
// Convert row-major specification to column-major array for Matrix3
// Matrix3.fromArray expects [col0row0, col0row1, col0row2, col1row0, col1row1, col1row2, col2row0, col2row1, col2row2]
// Which represents:
// | col0row0 col1row0 col2row0 | | m[0] m[3] m[6] |
// | col0row1 col1row1 col2row1 | = | m[1] m[4] m[7] |
// | col0row2 col1row2 col2row2 | | m[2] m[5] m[8] |
function specToColumnMajor( spec ) {
const r = spec.r || [ 0, 0, 0 ]; // Output red channel coefficients [fromR, fromG, fromB]
const g = spec.g || [ 0, 0, 0 ]; // Output green channel coefficients
const b = spec.b || [ 0, 0, 0 ]; // Output blue channel coefficients
// Row-major matrix would be:
// | r[0] r[1] r[2] | (how input RGB maps to output R)
// | g[0] g[1] g[2] | (how input RGB maps to output G)
// | b[0] b[1] b[2] | (how input RGB maps to output B)
// Column-major for Matrix3:
return [
r[ 0 ], g[ 0 ], b[ 0 ], // Column 0: coefficients for input R
r[ 1 ], g[ 1 ], b[ 1 ], // Column 1: coefficients for input G
r[ 2 ], g[ 2 ], b[ 2 ] // Column 2: coefficients for input B
];
}
return {
left: specToColumnMajor( leftSpec ),
right: specToColumnMajor( rightSpec )
};
}
/**
* Shorthand for luminance coefficients.
* @private
*/
const LUM = [ LUMINANCE.R, LUMINANCE.G, LUMINANCE.B ];
/**
* Conversion matrices for different anaglyph algorithms.
* Based on research from "Introducing a New Anaglyph Method: Compromise Anaglyph" by Jure Ahtik
* and various other sources.
*
* Matrices are defined using createMatrixPair for clarity:
* - Each spec object defines how input RGB maps to output RGB
* - Keys 'r', 'g', 'b' represent output channels
* - Values are [rCoef, gCoef, bCoef] for input channel contribution
*
* @private
*/
const ANAGLYPH_MATRICES = {
// True Anaglyph - Red channel from left, luminance to cyan channel for right
// Paper: Left=[R,0,0], Right=[0,0,Lum]
[ AnaglyphAlgorithm.TRUE ]: {
[ AnaglyphColorMode.RED_CYAN ]: createMatrixPair(
{ r: [ 1, 0, 0 ] }, // Left: R -> outR
{ g: LUM, b: LUM } // Right: Lum -> outG, Lum -> outB
),
[ AnaglyphColorMode.MAGENTA_CYAN ]: createMatrixPair(
{ r: [ 1, 0, 0 ], b: [ 0, 0, 0.5 ] }, // Left: R -> outR, partial B -> outB
{ g: LUM, b: [ 0, 0, 0.5 ] } // Right: Lum -> outG, partial B
),
[ AnaglyphColorMode.MAGENTA_GREEN ]: createMatrixPair(
{ r: [ 1, 0, 0 ], b: LUM }, // Left: R -> outR, Lum -> outB
{ g: LUM } // Right: Lum -> outG
)
},
// Grey Anaglyph - Luminance-based, no color, minimal ghosting
// Paper: Left=[Lum,0,0], Right=[0,0,Lum]
[ AnaglyphAlgorithm.GREY ]: {
[ AnaglyphColorMode.RED_CYAN ]: createMatrixPair(
{ r: LUM }, // Left: Lum -> outR
{ g: LUM, b: LUM } // Right: Lum -> outG, Lum -> outB
),
[ AnaglyphColorMode.MAGENTA_CYAN ]: createMatrixPair(
{ r: LUM, b: [ 0.15, 0.29, 0.06 ] }, // Left: Lum -> outR, half-Lum -> outB
{ g: LUM, b: [ 0.15, 0.29, 0.06 ] } // Right: Lum -> outG, half-Lum -> outB
),
[ AnaglyphColorMode.MAGENTA_GREEN ]: createMatrixPair(
{ r: LUM, b: LUM }, // Left: Lum -> outR, Lum -> outB
{ g: LUM } // Right: Lum -> outG
)
},
// Colour Anaglyph - Full color, high retinal rivalry
// Paper: Left=[R,0,0], Right=[0,G,B]
[ AnaglyphAlgorithm.COLOUR ]: {
[ AnaglyphColorMode.RED_CYAN ]: createMatrixPair(
{ r: [ 1, 0, 0 ] }, // Left: R -> outR
{ g: [ 0, 1, 0 ], b: [ 0, 0, 1 ] } // Right: G -> outG, B -> outB
),
[ AnaglyphColorMode.MAGENTA_CYAN ]: createMatrixPair(
{ r: [ 1, 0, 0 ], b: [ 0, 0, 0.5 ] }, // Left: R -> outR, partial B -> outB
{ g: [ 0, 1, 0 ], b: [ 0, 0, 0.5 ] } // Right: G -> outG, partial B -> outB
),
[ AnaglyphColorMode.MAGENTA_GREEN ]: createMatrixPair(
{ r: [ 1, 0, 0 ], b: [ 0, 0, 1 ] }, // Left: R -> outR, B -> outB
{ g: [ 0, 1, 0 ] } // Right: G -> outG
)
},
// Half-Colour Anaglyph - Luminance for left red, full color for right cyan
// Paper: Left=[Lum,0,0], Right=[0,G,B]
[ AnaglyphAlgorithm.HALF_COLOUR ]: {
[ AnaglyphColorMode.RED_CYAN ]: createMatrixPair(
{ r: LUM }, // Left: Lum -> outR
{ g: [ 0, 1, 0 ], b: [ 0, 0, 1 ] } // Right: G -> outG, B -> outB
),
[ AnaglyphColorMode.MAGENTA_CYAN ]: createMatrixPair(
{ r: LUM, b: [ 0.15, 0.29, 0.06 ] }, // Left: Lum -> outR, half-Lum -> outB
{ g: [ 0, 1, 0 ], b: [ 0.15, 0.29, 0.06 ] } // Right: G -> outG, half-Lum -> outB
),
[ AnaglyphColorMode.MAGENTA_GREEN ]: createMatrixPair(
{ r: LUM, b: LUM }, // Left: Lum -> outR, Lum -> outB
{ g: [ 0, 1, 0 ] } // Right: G -> outG
)
},
// Dubois Anaglyph - Least-squares optimized for specific glasses
// From https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.7.6968&rep=rep1&type=pdf
[ AnaglyphAlgorithm.DUBOIS ]: {
[ AnaglyphColorMode.RED_CYAN ]: createMatrixPair(
{
r: [ 0.4561, 0.500484, 0.176381 ],
g: [ - 0.0400822, - 0.0378246, - 0.0157589 ],
b: [ - 0.0152161, - 0.0205971, - 0.00546856 ]
},
{
r: [ - 0.0434706, - 0.0879388, - 0.00155529 ],
g: [ 0.378476, 0.73364, - 0.0184503 ],
b: [ - 0.0721527, - 0.112961, 1.2264 ]
}
),
[ AnaglyphColorMode.MAGENTA_CYAN ]: createMatrixPair(
{
r: [ 0.4561, 0.500484, 0.176381 ],
g: [ - 0.0400822, - 0.0378246, - 0.0157589 ],
b: [ 0.088, 0.088, - 0.003 ]
},
{
r: [ - 0.0434706, - 0.0879388, - 0.00155529 ],
g: [ 0.378476, 0.73364, - 0.0184503 ],
b: [ 0.088, 0.088, 0.613 ]
}
),
[ AnaglyphColorMode.MAGENTA_GREEN ]: createMatrixPair(
{
r: [ 0.4561, 0.500484, 0.176381 ],
b: [ - 0.0434706, - 0.0879388, - 0.00155529 ]
},
{
g: [ 0.378476 + 0.4561, 0.73364 + 0.500484, - 0.0184503 + 0.176381 ]
}
)
},
// Optimised Anaglyph - Improved color with reduced retinal rivalry
// Paper: Left=[0,0.7G+0.3B,0,0], Right=[0,G,B]
[ AnaglyphAlgorithm.OPTIMISED ]: {
[ AnaglyphColorMode.RED_CYAN ]: createMatrixPair(
{ r: [ 0, 0.7, 0.3 ] }, // Left: 0.7G+0.3B -> outR
{ g: [ 0, 1, 0 ], b: [ 0, 0, 1 ] } // Right: G -> outG, B -> outB
),
[ AnaglyphColorMode.MAGENTA_CYAN ]: createMatrixPair(
{ r: [ 0, 0.7, 0.3 ], b: [ 0, 0, 0.5 ] }, // Left: 0.7G+0.3B -> outR, partial B
{ g: [ 0, 1, 0 ], b: [ 0, 0, 0.5 ] } // Right: G -> outG, partial B
),
[ AnaglyphColorMode.MAGENTA_GREEN ]: createMatrixPair(
{ r: [ 0, 0.7, 0.3 ], b: [ 0, 0, 1 ] }, // Left: 0.7G+0.3B -> outR, B -> outB
{ g: [ 0, 1, 0 ] } // Right: G -> outG
)
},
// Compromise Anaglyph - Best balance of color and stereo effect
// From Ahtik, J., "Techniques of Rendering Anaglyphs for Use in Art"
// Paper matrix [8]: Left=[0.439R+0.447G+0.148B, 0, 0], Right=[0, 0.095R+0.934G+0.005B, 0.018R+0.028G+1.057B]
[ AnaglyphAlgorithm.COMPROMISE ]: {
[ AnaglyphColorMode.RED_CYAN ]: createMatrixPair(
{ r: [ 0.439, 0.447, 0.148 ] }, // Left: weighted RGB -> outR
{
g: [ 0.095, 0.934, 0.005 ], // Right: weighted RGB -> outG
b: [ 0.018, 0.028, 1.057 ] // Right: weighted RGB -> outB
}
),
[ AnaglyphColorMode.MAGENTA_CYAN ]: createMatrixPair(
{
r: [ 0.439, 0.447, 0.148 ],
b: [ 0.009, 0.014, 0.074 ] // Partial blue from left
},
{
g: [ 0.095, 0.934, 0.005 ],
b: [ 0.009, 0.014, 0.528 ] // Partial blue from right
}
),
[ AnaglyphColorMode.MAGENTA_GREEN ]: createMatrixPair(
{
r: [ 0.439, 0.447, 0.148 ],
b: [ 0.018, 0.028, 1.057 ]
},
{
g: [ 0.095 + 0.439, 0.934 + 0.447, 0.005 + 0.148 ]
}
)
}
};
/**
* A render pass node that creates an anaglyph effect using physically-correct
* off-axis stereo projection.
*
* This implementation uses CameraUtils.frameCorners() to align stereo
* camera frustums to a virtual screen plane, providing accurate depth
* perception with zero parallax at the plane distance.
*
* @augments StereoCompositePassNode
* @three_import import { anaglyphPass } from 'three/addons/tsl/display/AnaglyphPassNode.js';
* @three_import import { anaglyphPass, AnaglyphAlgorithm, AnaglyphColorMode } from 'three/addons/tsl/display/AnaglyphPassNode.js';
*/

@@ -38,5 +310,45 @@ class AnaglyphPassNode extends StereoCompositePassNode {

// Dubois matrices from https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.7.6968&rep=rep1&type=pdf#page=4
/**
* The interpupillary distance (eye separation) in world units.
* Typical human IPD is 0.064 meters (64mm).
*
* @type {number}
* @default 0.064
*/
this.eyeSep = 0.064;
/**
* The distance in world units from the viewer to the virtual
* screen plane where zero parallax (screen depth) occurs.
* Objects at this distance appear at the screen surface.
* Objects closer appear in front of the screen (negative parallax).
* Objects further appear behind the screen (positive parallax).
*
* The screen dimensions are derived from the camera's FOV and aspect ratio
* at this distance, ensuring the stereo view matches the camera's field of view.
*
* @type {number}
* @default 0.5
*/
this.planeDistance = 0.5;
/**
* The current anaglyph algorithm.
*
* @private
* @type {string}
* @default 'dubois'
*/
this._algorithm = AnaglyphAlgorithm.DUBOIS;
/**
* The current color mode.
*
* @private
* @type {string}
* @default 'redCyan'
*/
this._colorMode = AnaglyphColorMode.RED_CYAN;
/**
* Color matrix node for the left eye.

@@ -47,7 +359,3 @@ *

*/
this._colorMatrixLeft = uniform( new Matrix3().fromArray( [
0.456100, - 0.0400822, - 0.0152161,
0.500484, - 0.0378246, - 0.0205971,
0.176381, - 0.0157589, - 0.00546856
] ) );
this._colorMatrixLeft = uniform( new Matrix3() );

@@ -60,11 +368,141 @@ /**

*/
this._colorMatrixRight = uniform( new Matrix3().fromArray( [
- 0.0434706, 0.378476, - 0.0721527,
- 0.0879388, 0.73364, - 0.112961,
- 0.00155529, - 0.0184503, 1.2264
] ) );
this._colorMatrixRight = uniform( new Matrix3() );
// Initialize with default matrices
this._updateMatrices();
}
/**
* Gets the current anaglyph algorithm.
*
* @type {string}
*/
get algorithm() {
return this._algorithm;
}
/**
* Sets the anaglyph algorithm.
*
* @type {string}
*/
set algorithm( value ) {
if ( this._algorithm !== value ) {
this._algorithm = value;
this._updateMatrices();
}
}
/**
* Gets the current color mode.
*
* @type {string}
*/
get colorMode() {
return this._colorMode;
}
/**
* Sets the color mode.
*
* @type {string}
*/
set colorMode( value ) {
if ( this._colorMode !== value ) {
this._colorMode = value;
this._updateMatrices();
}
}
/**
* Updates the color matrices based on current algorithm and color mode.
*
* @private
*/
_updateMatrices() {
const matrices = ANAGLYPH_MATRICES[ this._algorithm ][ this._colorMode ];
this._colorMatrixLeft.value.fromArray( matrices.left );
this._colorMatrixRight.value.fromArray( matrices.right );
}
/**
* Updates the internal stereo camera using frameCorners for
* physically-correct off-axis projection.
*
* @param {number} coordinateSystem - The current coordinate system.
*/
updateStereoCamera( coordinateSystem ) {
const { stereo, camera } = this;
stereo.cameraL.coordinateSystem = coordinateSystem;
stereo.cameraR.coordinateSystem = coordinateSystem;
// Get the camera's local coordinate axes from its world matrix
camera.matrixWorld.extractBasis( _right, _up, _forward );
_right.normalize();
_up.normalize();
_forward.normalize();
// Calculate eye positions
const halfSep = this.eyeSep / 2;
_eyeL.copy( camera.position ).addScaledVector( _right, - halfSep );
_eyeR.copy( camera.position ).addScaledVector( _right, halfSep );
// Calculate screen center (at planeDistance in front of the camera center)
_screenCenter.copy( camera.position ).addScaledVector( _forward, - this.planeDistance );
// Calculate screen dimensions from camera FOV and aspect ratio
const DEG2RAD = Math.PI / 180;
const halfHeight = this.planeDistance * Math.tan( DEG2RAD * camera.fov / 2 );
const halfWidth = halfHeight * camera.aspect;
// Calculate screen corners
_screenBottomLeft.copy( _screenCenter )
.addScaledVector( _right, - halfWidth )
.addScaledVector( _up, - halfHeight );
_screenBottomRight.copy( _screenCenter )
.addScaledVector( _right, halfWidth )
.addScaledVector( _up, - halfHeight );
_screenTopLeft.copy( _screenCenter )
.addScaledVector( _right, - halfWidth )
.addScaledVector( _up, halfHeight );
// Set up left eye camera
stereo.cameraL.position.copy( _eyeL );
stereo.cameraL.near = camera.near;
stereo.cameraL.far = camera.far;
frameCorners( stereo.cameraL, _screenBottomLeft, _screenBottomRight, _screenTopLeft, true );
stereo.cameraL.matrixWorld.compose( stereo.cameraL.position, stereo.cameraL.quaternion, stereo.cameraL.scale );
stereo.cameraL.matrixWorldInverse.copy( stereo.cameraL.matrixWorld ).invert();
// Set up right eye camera
stereo.cameraR.position.copy( _eyeR );
stereo.cameraR.near = camera.near;
stereo.cameraR.far = camera.far;
frameCorners( stereo.cameraR, _screenBottomLeft, _screenBottomRight, _screenTopLeft, true );
stereo.cameraR.matrixWorld.compose( stereo.cameraR.position, stereo.cameraR.quaternion, stereo.cameraR.scale );
stereo.cameraR.matrixWorldInverse.copy( stereo.cameraR.matrixWorld ).invert();
}
/**
* This method is used to setup the effect's TSL code.

@@ -103,2 +541,4 @@ *

export { AnaglyphAlgorithm, AnaglyphColorMode };
/**

@@ -113,2 +553,2 @@ * TSL function for creating an anaglyph pass node.

*/
export const anaglyphPass = ( scene, camera ) => nodeObject( new AnaglyphPassNode( scene, camera ) );
export const anaglyphPass = ( scene, camera ) => new AnaglyphPassNode( scene, camera );

@@ -280,4 +280,4 @@ import { RenderTarget, Vector2, TempNode, QuadMesh, NodeMaterial, RendererUtils } from 'three/webgpu';

*/
export const anamorphic = ( node, threshold = .9, scale = 3, samples = 32 ) => nodeObject( new AnamorphicNode( convertToTexture( node ), nodeObject( threshold ), nodeObject( scale ), samples ) );
export const anamorphic = ( node, threshold = .9, scale = 3, samples = 32 ) => new AnamorphicNode( convertToTexture( node ), nodeObject( threshold ), nodeObject( scale ), samples );
export default AnamorphicNode;

@@ -15,3 +15,3 @@ import { HalfFloatType, RenderTarget, Vector2, Vector3, TempNode, QuadMesh, NodeMaterial, RendererUtils, NodeUpdateType } from 'three/webgpu';

* ```js
* const postProcessing = new THREE.PostProcessing( renderer );
* const renderPipeline = new THREE.RenderPipeline( renderer );
*

@@ -23,3 +23,3 @@ * const scenePass = pass( scene, camera );

*
* postProcessing.outputNode = scenePassColor.add( bloomPass );
* renderPipeline.outputNode = scenePassColor.add( bloomPass );
* ```

@@ -30,3 +30,3 @@ * By default, the node affects the entire image. For a selective bloom,

* ```js
* const postProcessing = new THREE.PostProcessing( renderer );
* const renderPipeline = new THREE.RenderPipeline( renderer );
*

@@ -43,3 +43,3 @@ * const scenePass = pass( scene, camera );

* const bloomPass = bloom( emissivePass );
* postProcessing.outputNode = scenePassColor.add( bloomPass );
* renderPipeline.outputNode = scenePassColor.add( bloomPass );
* ```

@@ -536,4 +536,4 @@ * @augments TempNode

*/
export const bloom = ( node, strength, radius, threshold ) => nodeObject( new BloomNode( nodeObject( node ), strength, radius, threshold ) );
export const bloom = ( node, strength, radius, threshold ) => new BloomNode( nodeObject( node ), strength, radius, threshold );
export default BloomNode;

@@ -334,2 +334,2 @@ import { DataTexture, RepeatWrapping, Vector2, Vector3, TempNode } from 'three/webgpu';

*/
export const denoise = ( node, depthNode, normalNode, camera ) => nodeObject( new DenoiseNode( convertToTexture( node ), nodeObject( depthNode ), nodeObject( normalNode ), camera ) );
export const denoise = ( node, depthNode, normalNode, camera ) => new DenoiseNode( convertToTexture( node ), nodeObject( depthNode ), nodeObject( normalNode ), camera );

@@ -554,2 +554,2 @@ import { TempNode, NodeMaterial, NodeUpdateType, RenderTarget, Vector2, HalfFloatType, RedFormat, QuadMesh, RendererUtils } from 'three/webgpu';

*/
export const dof = ( node, viewZNode, focusDistance = 1, focalLength = 1, bokehScale = 1 ) => nodeObject( new DepthOfFieldNode( convertToTexture( node ), nodeObject( viewZNode ), nodeObject( focusDistance ), nodeObject( focalLength ), nodeObject( bokehScale ) ) );
export const dof = ( node, viewZNode, focusDistance = 1, focalLength = 1, bokehScale = 1 ) => new DepthOfFieldNode( convertToTexture( node ), nodeObject( viewZNode ), nodeObject( focusDistance ), nodeObject( focalLength ), nodeObject( bokehScale ) );

@@ -104,2 +104,2 @@ import { TempNode } from 'three/webgpu';

*/
export const dotScreen = ( node, angle, scale ) => nodeObject( new DotScreenNode( nodeObject( node ), angle, scale ) );
export const dotScreen = ( node, angle, scale ) => new DotScreenNode( nodeObject( node ), angle, scale );
import { Vector2, TempNode } from 'three/webgpu';
import { nodeObject, Fn, uniformArray, select, float, NodeUpdateType, uv, dot, clamp, uniform, convertToTexture, smoothstep, bool, vec2, vec3, If, Loop, max, min, Break, abs } from 'three/tsl';
import { Fn, uniformArray, select, float, NodeUpdateType, uv, dot, clamp, uniform, convertToTexture, smoothstep, bool, vec2, vec3, If, Loop, max, min, Break, abs } from 'three/tsl';

@@ -365,2 +365,2 @@ /**

*/
export const fxaa = ( node ) => nodeObject( new FXAANode( convertToTexture( node ) ) );
export const fxaa = ( node ) => new FXAANode( convertToTexture( node ) );
import { RenderTarget, Vector2, NodeMaterial, RendererUtils, QuadMesh, TempNode, NodeUpdateType } from 'three/webgpu';
import { nodeObject, Fn, float, uv, uniform, convertToTexture, vec2, vec4, passTexture, premultiplyAlpha, unpremultiplyAlpha } from 'three/tsl';
import { Fn, float, uv, uniform, convertToTexture, vec2, vec4, passTexture, premultiplyAlpha, unpremultiplyAlpha } from 'three/tsl';

@@ -127,2 +127,11 @@ const _quadMesh = /*@__PURE__*/ new QuadMesh();

/**
* This flag can be used for type testing.
*
* @type {boolean}
* @default true
* @readonly
*/
this.isGaussianBlurNode = true;
}

@@ -362,3 +371,3 @@

*/
export const gaussianBlur = ( node, directionNode, sigma, options = {} ) => nodeObject( new GaussianBlurNode( convertToTexture( node ), directionNode, sigma, options ) );
export const gaussianBlur = ( node, directionNode, sigma, options = {} ) => new GaussianBlurNode( convertToTexture( node ), directionNode, sigma, options );

@@ -365,0 +374,0 @@ /**

@@ -15,3 +15,3 @@ import { DataTexture, RenderTarget, RepeatWrapping, Vector2, Vector3, TempNode, QuadMesh, NodeMaterial, RendererUtils, RedFormat } from 'three/webgpu';

* ```js
* const postProcessing = new THREE.PostProcessing( renderer );
* const renderPipeline = new THREE.RenderPipeline( renderer );
*

@@ -30,3 +30,3 @@ * const scenePass = pass( scene, camera );

*
* postProcessing.outputNod = aoPass.getTextureNode().mul( scenePassColor );
* renderPipeline.outputNode = aoPass.getTextureNode().mul( scenePassColor );
* ```

@@ -33,0 +33,0 @@ *

@@ -279,2 +279,2 @@ import { RenderTarget, Vector2, TempNode, NodeUpdateType, QuadMesh, RendererUtils, NodeMaterial } from 'three/webgpu';

*/
export const lensflare = ( node, params ) => nodeObject( new LensflareNode( convertToTexture( node ), params ) );
export const lensflare = ( node, params ) => new LensflareNode( convertToTexture( node ), params );

@@ -109,2 +109,2 @@ import { TempNode } from 'three/webgpu';

*/
export const lut3D = ( node, lut, size, intensity ) => nodeObject( new Lut3DNode( nodeObject( node ), nodeObject( lut ), size, nodeObject( intensity ) ) );
export const lut3D = ( node, lut, size, intensity ) => new Lut3DNode( nodeObject( node ), nodeObject( lut ), size, nodeObject( intensity ) );

@@ -16,3 +16,3 @@ import { DepthTexture, FloatType, RenderTarget, Vector2, TempNode, QuadMesh, NodeMaterial, RendererUtils, NodeUpdateType } from 'three/webgpu';

* ```js
* const postProcessing = new THREE.PostProcessing( renderer );
* const renderPipeline = new THREE.RenderPipeline( renderer );
*

@@ -40,3 +40,3 @@ * const scenePass = pass( scene, camera );

*
* postProcessing.outputNode = outlineColor.add( scenePass );
* renderPipeline.outputNode = outlineColor.add( scenePass );
* ```

@@ -764,2 +764,2 @@ *

*/
export const outline = ( scene, camera, params ) => nodeObject( new OutlineNode( scene, camera, params ) );
export const outline = ( scene, camera, params ) => new OutlineNode( scene, camera, params );
import { NodeMaterial } from 'three/webgpu';
import { nodeObject, Fn, vec4, uv, If, mod, screenCoordinate } from 'three/tsl';
import { Fn, vec4, uv, If, mod, screenCoordinate } from 'three/tsl';
import StereoCompositePassNode from './StereoCompositePassNode.js';

@@ -89,2 +89,2 @@

*/
export const parallaxBarrierPass = ( scene, camera ) => nodeObject( new ParallaxBarrierPassNode( scene, camera ) );
export const parallaxBarrierPass = ( scene, camera ) => new ParallaxBarrierPassNode( scene, camera );

@@ -83,3 +83,3 @@ import { NearestFilter, Vector4, TempNode, NodeUpdateType, PassNode } from 'three/webgpu';

/**
* The `updateBeforeType` is set to `NodeUpdateType.FRAME` since the node updates
* The `updateType` is set to `NodeUpdateType.FRAME` since the node updates
* its internal uniforms once per frame in `updateBefore()`.

@@ -90,3 +90,3 @@ *

*/
this.updateBeforeType = NodeUpdateType.FRAME;
this.updateType = NodeUpdateType.FRAME;

@@ -100,3 +100,3 @@ }

*/
updateBefore() {
update() {

@@ -220,3 +220,3 @@ const map = this.textureNode.value;

const pixelation = ( node, depthNode, normalNode, pixelSize = 6, normalEdgeStrength = 0.3, depthEdgeStrength = 0.4 ) => nodeObject( new PixelationNode( convertToTexture( node ), convertToTexture( depthNode ), convertToTexture( normalNode ), nodeObject( pixelSize ), nodeObject( normalEdgeStrength ), nodeObject( depthEdgeStrength ) ) );
const pixelation = ( node, depthNode, normalNode, pixelSize = 6, normalEdgeStrength = 0.3, depthEdgeStrength = 0.4 ) => new PixelationNode( convertToTexture( node ), convertToTexture( depthNode ), convertToTexture( normalNode ), nodeObject( pixelSize ), nodeObject( normalEdgeStrength ), nodeObject( depthEdgeStrength ) );

@@ -337,4 +337,4 @@ /**

*/
export const pixelationPass = ( scene, camera, pixelSize, normalEdgeStrength, depthEdgeStrength ) => nodeObject( new PixelationPassNode( scene, camera, pixelSize, normalEdgeStrength, depthEdgeStrength ) );
export const pixelationPass = ( scene, camera, pixelSize, normalEdgeStrength, depthEdgeStrength ) => new PixelationPassNode( scene, camera, pixelSize, normalEdgeStrength, depthEdgeStrength );
export default PixelationPassNode;
import { TempNode } from 'three/webgpu';
import { nodeObject, Fn, uv, uniform, vec2, sin, cos, vec4, convertToTexture } from 'three/tsl';
import { Fn, uv, uniform, vec2, sin, cos, vec4, convertToTexture } from 'three/tsl';

@@ -96,2 +96,2 @@ /**

*/
export const rgbShift = ( node, amount, angle ) => nodeObject( new RGBShiftNode( convertToTexture( node ), amount, angle ) );
export const rgbShift = ( node, amount, angle ) => new RGBShiftNode( convertToTexture( node ), amount, angle );
import { Vector2, TempNode, NodeUpdateType } from 'three/webgpu';
import { nodeObject, Fn, uv, uniform, convertToTexture, vec2, vec3, vec4, mat3, luminance, add } from 'three/tsl';
import { Fn, uv, uniform, convertToTexture, vec2, vec3, vec4, mat3, luminance, add } from 'three/tsl';

@@ -168,2 +168,2 @@ /**

*/
export const sobel = ( node ) => nodeObject( new SobelOperatorNode( convertToTexture( node ) ) );
export const sobel = ( node ) => new SobelOperatorNode( convertToTexture( node ) );
import { AdditiveBlending, Color, Vector2, RendererUtils, PassNode, QuadMesh, NodeMaterial } from 'three/webgpu';
import { nodeObject, uniform, mrt, texture, getTextureIndex, unpremultiplyAlpha } from 'three/tsl';
import { uniform, mrt, texture, getTextureIndex, unpremultiplyAlpha } from 'three/tsl';

@@ -358,2 +358,2 @@ const _size = /*@__PURE__*/ new Vector2();

*/
export const ssaaPass = ( scene, camera ) => nodeObject( new SSAAPassNode( scene, camera ) );
export const ssaaPass = ( scene, camera ) => new SSAAPassNode( scene, camera );
import { RenderTarget, Vector2, TempNode, QuadMesh, NodeMaterial, RendererUtils, MathUtils } from 'three/webgpu';
import { clamp, normalize, reference, nodeObject, Fn, NodeUpdateType, uniform, vec4, passTexture, uv, logarithmicDepthToViewZ, viewZToPerspectiveDepth, getViewPosition, screenCoordinate, float, sub, fract, dot, vec2, rand, vec3, Loop, mul, PI, cos, sin, uint, cross, acos, sign, pow, luminance, If, max, abs, Break, sqrt, HALF_PI, div, ceil, shiftRight, convertToTexture, bool, getNormalFromDepth, countOneBits, interleavedGradientNoise } from 'three/tsl';
import { clamp, normalize, reference, Fn, NodeUpdateType, uniform, vec4, passTexture, uv, logarithmicDepthToViewZ, viewZToPerspectiveDepth, getViewPosition, screenCoordinate, float, sub, fract, dot, vec2, rand, vec3, Loop, mul, PI, cos, sin, uint, cross, acos, sign, pow, luminance, If, max, abs, Break, sqrt, HALF_PI, div, ceil, shiftRight, convertToTexture, bool, getNormalFromDepth, countOneBits, interleavedGradientNoise } from 'three/tsl';

@@ -642,2 +642,2 @@ const _quadMesh = /*@__PURE__*/ new QuadMesh();

*/
export const ssgi = ( beautyNode, depthNode, normalNode, camera ) => nodeObject( new SSGINode( convertToTexture( beautyNode ), depthNode, normalNode, camera ) );
export const ssgi = ( beautyNode, depthNode, normalNode, camera ) => new SSGINode( convertToTexture( beautyNode ), depthNode, normalNode, camera );
import { HalfFloatType, RenderTarget, Vector2, RendererUtils, QuadMesh, TempNode, NodeMaterial, NodeUpdateType, LinearFilter, LinearMipmapLinearFilter } from 'three/webgpu';
import { texture, reference, viewZToPerspectiveDepth, logarithmicDepthToViewZ, getScreenPosition, getViewPosition, sqrt, mul, div, cross, float, Continue, Break, Loop, int, max, abs, sub, If, dot, reflect, normalize, screenCoordinate, nodeObject, Fn, passTexture, uv, uniform, perspectiveDepthToViewZ, orthographicDepthToViewZ, vec2, vec3, vec4 } from 'three/tsl';
import { texture, reference, viewZToPerspectiveDepth, logarithmicDepthToViewZ, getScreenPosition, getViewPosition, mul, div, cross, float, Continue, Break, Loop, int, max, abs, sub, If, dot, reflect, normalize, screenCoordinate, nodeObject, Fn, passTexture, uv, uniform, perspectiveDepthToViewZ, orthographicDepthToViewZ, vec2, vec3, vec4 } from 'three/tsl';
import { boxBlur } from './boxBlur.js';

@@ -278,3 +278,4 @@

const mips = this._blurRenderTarget.texture.mipmaps.length - 1;
const lod = float( this.roughnessNode ).mul( mips ).clamp( 0, mips );
const r = float( this.roughnessNode );
const lod = r.mul( r ).mul( mips ).clamp( 0, mips );

@@ -403,6 +404,5 @@ blurredTextureNode = passTexture( this, this._blurRenderTarget.texture ).level( lod );

// planeNormal is already normalized, so denominator is 1
const d = mul( planeNormal.x, planePoint.x ).add( mul( planeNormal.y, planePoint.y ) ).add( mul( planeNormal.z, planePoint.z ) ).negate().toVar();
const denominator = sqrt( mul( planeNormal.x, planeNormal.x, ).add( mul( planeNormal.y, planeNormal.y ) ).add( mul( planeNormal.z, planeNormal.z ) ) ).toVar();
const distance = div( mul( planeNormal.x, point.x ).add( mul( planeNormal.y, point.y ) ).add( mul( planeNormal.z, point.z ) ).add( d ), denominator );
const distance = mul( planeNormal.x, point.x ).add( mul( planeNormal.y, point.y ) ).add( mul( planeNormal.z, point.z ) ).add( d );
return distance;

@@ -594,3 +594,3 @@

const reflectColor = this.colorNode.sample( uvNode );
output.assign( vec4( reflectColor.rgb, op ) );
output.assign( vec4( reflectColor.rgb.mul( op ), 1 ) );
Break();

@@ -659,2 +659,2 @@

*/
export const ssr = ( colorNode, depthNode, normalNode, metalnessNode, roughnessNode = null, camera = null ) => nodeObject( new SSRNode( nodeObject( colorNode ), nodeObject( depthNode ), nodeObject( normalNode ), nodeObject( metalnessNode ), nodeObject( roughnessNode ), camera ) );
export const ssr = ( colorNode, depthNode, normalNode, metalnessNode, roughnessNode = null, camera = null ) => new SSRNode( nodeObject( colorNode ), nodeObject( depthNode ), nodeObject( normalNode ), nodeObject( metalnessNode ), nodeObject( roughnessNode ), camera );
import { RedFormat, RenderTarget, Vector2, RendererUtils, QuadMesh, TempNode, NodeMaterial, NodeUpdateType, UnsignedByteType } from 'three/webgpu';
import { reference, viewZToPerspectiveDepth, logarithmicDepthToViewZ, getScreenPosition, getViewPosition, float, Break, Loop, int, max, abs, If, interleavedGradientNoise, screenCoordinate, nodeObject, Fn, passTexture, uv, uniform, perspectiveDepthToViewZ, orthographicDepthToViewZ, vec2, lightPosition, lightTargetPosition, fract, rand, mix } from 'three/tsl';
import { reference, viewZToPerspectiveDepth, logarithmicDepthToViewZ, getScreenPosition, getViewPosition, float, Break, Loop, int, max, abs, If, interleavedGradientNoise, screenCoordinate, Fn, passTexture, uv, uniform, perspectiveDepthToViewZ, orthographicDepthToViewZ, vec2, lightPosition, lightTargetPosition, fract, rand, mix } from 'three/tsl';

@@ -490,2 +490,2 @@ const _quadMesh = /*@__PURE__*/ new QuadMesh();

*/
export const sss = ( depthNode, camera, mainLight ) => nodeObject( new SSSNode( depthNode, camera, mainLight ) );
export const sss = ( depthNode, camera, mainLight ) => new SSSNode( depthNode, camera, mainLight );
import { StereoCamera, Vector2, PassNode, RendererUtils } from 'three/webgpu';
import { nodeObject } from 'three/tsl';

@@ -120,2 +119,2 @@ const _size = /*@__PURE__*/ new Vector2();

*/
export const stereoPass = ( scene, camera ) => nodeObject( new StereoPassNode( scene, camera ) );
export const stereoPass = ( scene, camera ) => new StereoPassNode( scene, camera );
import { HalfFloatType, Vector2, RenderTarget, RendererUtils, QuadMesh, NodeMaterial, TempNode, NodeUpdateType, Matrix4, DepthTexture } from 'three/webgpu';
import { add, float, If, Fn, max, nodeObject, texture, uniform, uv, vec2, vec4, luminance, convertToTexture, passTexture, velocity, getViewPosition, viewZToPerspectiveDepth, struct, ivec2, mix } from 'three/tsl';
import { add, float, If, Fn, max, texture, uniform, uv, vec2, vec4, luminance, convertToTexture, passTexture, velocity, getViewPosition, viewZToPerspectiveDepth, struct, ivec2, mix } from 'three/tsl';

@@ -379,10 +379,7 @@ const _quadMesh = /*@__PURE__*/ new QuadMesh();

// bind and clear render target to make sure they are initialized after the resize which triggers a dispose()
// make sure render targets are initialized after the resize which triggers a dispose()
renderer.setRenderTarget( this._historyRenderTarget );
renderer.clear();
renderer.initRenderTarget( this._historyRenderTarget );
renderer.initRenderTarget( this._resolveRenderTarget );
renderer.setRenderTarget( this._resolveRenderTarget );
renderer.clear();
// make sure to reset the history with the contents of the beauty buffer otherwise subsequent frames after the

@@ -438,9 +435,9 @@ // resize will fade from a darker color to the correct one because the history was cleared with black.

const postProcessing = builder.context.postProcessing;
const renderPipeline = builder.context.renderPipeline;
if ( postProcessing ) {
if ( renderPipeline ) {
this._needsPostProcessingSync = true;
postProcessing.context.onBeforePostProcessing = () => {
renderPipeline.context.onBeforeRenderPipeline = () => {

@@ -452,3 +449,3 @@ const size = builder.renderer.getDrawingBufferSize( _size );

postProcessing.context.onAfterPostProcessing = () => {
renderPipeline.context.onAfterRenderPipeline = () => {

@@ -732,2 +729,2 @@ this.clearViewOffset();

*/
export const traa = ( beautyNode, depthNode, velocityNode, camera ) => nodeObject( new TRAANode( convertToTexture( beautyNode ), depthNode, velocityNode, camera ) );
export const traa = ( beautyNode, depthNode, velocityNode, camera ) => new TRAANode( convertToTexture( beautyNode ), depthNode, velocityNode, camera );

@@ -141,2 +141,2 @@ import { TempNode } from 'three/webgpu';

*/
export const transition = ( nodeA, nodeB, mixTextureNode, mixRatio, threshold, useTexture ) => nodeObject( new TransitionNode( convertToTexture( nodeA ), convertToTexture( nodeB ), convertToTexture( mixTextureNode ), nodeObject( mixRatio ), nodeObject( threshold ), nodeObject( useTexture ) ) );
export const transition = ( nodeA, nodeB, mixTextureNode, mixRatio, threshold, useTexture ) => new TransitionNode( convertToTexture( nodeA ), convertToTexture( nodeB ), convertToTexture( mixTextureNode ), nodeObject( mixRatio ), nodeObject( threshold ), nodeObject( useTexture ) );
import { TextureLoader } from 'three';
import { Fn, int, ivec2, textureLoad } from 'three/tsl';
import { Fn, int, ivec2, textureLoad, screenUV, screenSize, mod, floor, float, vec3 } from 'three/tsl';

@@ -35,1 +35,40 @@ /**

} );
/**
* This TSL function applies Bayer dithering to a color input. It uses a 4x4 Bayer matrix
* pattern to add structured noise before color quantization, which helps reduce visible
* color banding when limiting color depth.
*
* @tsl
* @function
* @param {Node<vec3>} color - The input color to apply dithering to.
* @param {Node<float>} [steps=32] - The number of color steps per channel.
* @return {Node<vec3>} The dithered color ready for quantization.
*
* @example
* // Apply dithering with posterize
* const ditheredColor = bayerDither( inputColor, 32 );
* const finalColor = posterize( ditheredColor, 32 );
*/
export const bayerDither = Fn( ( [ color, steps = float( 32.0 ) ] ) => {
const screenPos = screenUV.mul( screenSize );
const x = mod( floor( screenPos.x ), float( 4.0 ) );
const y = mod( floor( screenPos.y ), float( 4.0 ) );
// Simplified Bayer matrix approximation
const bayer = mod(
floor( x.add( 1.0 ) ).mul( floor( y.add( 1.0 ) ) ).mul( 17.0 ),
16.0
).div( 16.0 ).sub( 0.5 );
// Apply dither offset before quantization
const ditherOffset = bayer.div( steps );
return vec3(
color.r.add( ditherOffset ),
color.g.add( ditherOffset ),
color.b.add( ditherOffset )
);
} );

@@ -122,3 +122,3 @@ import {

if ( c.isMesh | c.isLineSegments ) {
if ( c.isMesh || c.isLineSegments ) {

@@ -125,0 +125,0 @@ const elemSize = c.isMesh ? 3 : 2;

The MIT License
Copyright © 2010-2025 three.js authors
Copyright © 2010-2026 three.js authors

@@ -5,0 +5,0 @@ Permission is hereby granted, free of charge, to any person obtaining a copy

{
"name": "three",
"version": "0.182.0",
"version": "0.183.0",
"description": "JavaScript 3D library",

@@ -49,8 +49,7 @@ "type": "module",

"build": "rollup -c utils/build/rollup.config.js",
"build-docs": "jsdoc -c utils/docs/jsdoc.config.json && npm run build-llms",
"build-llms": "node utils/llms/build.js",
"build-module": "rollup -c utils/build/rollup.config.js --configOnlyModule",
"build-docs": "jsdoc -c utils/docs/jsdoc.config.json",
"dev": "node utils/build/dev.js && servez -p 8080",
"dev-ssl": "node utils/build/dev.js && servez -p 8080 --ssl",
"preview": "concurrently --names \"ROLLUP,HTTP\" -c \"bgBlue.bold,bgGreen.bold\" \"rollup -c utils/build/rollup.config.js -w -m inline\" \"servez -p 8080\"",
"preview-ssl": "concurrently --names \"ROLLUP,HTTPS\" -c \"bgBlue.bold,bgGreen.bold\" \"rollup -c utils/build/rollup.config.js -w -m inline\" \"servez -p 8080 --ssl\"",
"dev": "node utils/build/dev.js && node utils/server.js -p 8080",
"preview": "node utils/build/preview.js",
"lint-core": "eslint src",

@@ -60,3 +59,2 @@ "lint-addons": "eslint examples/jsm",

"lint-editor": "eslint editor",
"lint-playground": "eslint playground",
"lint-manual": "eslint manual",

@@ -66,3 +64,3 @@ "lint-test": "eslint test",

"lint": "npm run lint-core",
"lint-fix": "npm run lint-core -- --fix && npm run lint-addons -- --fix && npm run lint-examples -- --fix && npm run lint-docs -- --fix && npm run lint-editor -- --fix && npm run lint-playground -- --fix && npm run lint-manual -- --fix && npm run lint-test -- --fix && npm run lint-utils -- --fix",
"lint-fix": "npm run lint-core -- --fix && npm run lint-addons -- --fix && npm run lint-examples -- --fix && npm run lint-editor -- --fix && npm run lint-manual -- --fix && npm run lint-test -- --fix && npm run lint-utils -- --fix",
"test-unit": "qunit test/unit/three.source.unit.js",

@@ -102,3 +100,2 @@ "test-unit-addons": "qunit test/unit/three.addons.unit.js",

"@rollup/plugin-terser": "^0.4.0",
"concurrently": "^9.0.0",
"eslint": "^9.0.0",

@@ -108,18 +105,13 @@ "eslint-config-mdcs": "^5.0.0",

"eslint-plugin-html": "^8.1.3",
"eslint-plugin-import": "^2.32.0",
"eslint-plugin-jsdoc": "^61.4.1",
"globals": "^16.5.0",
"jimp": "^1.6.0",
"eslint-plugin-jsdoc": "^62.0.0",
"globals": "^17.0.0",
"jpeg-js": "^0.4.4",
"jsdoc": "^4.0.5",
"magic-string": "^0.30.0",
"pixelmatch": "^7.0.0",
"pngjs": "^7.0.0",
"puppeteer": "^24.25.0",
"qunit": "^2.19.4",
"rollup": "^4.6.0",
"rollup-plugin-filesize": "^10.0.0",
"servez": "^2.2.4"
"turndown": "^7.2.2"
},
"overrides": {
"jpeg-js": "^0.4.4"
},
"jspm": {

@@ -126,0 +118,0 @@ "files": [

@@ -551,3 +551,3 @@ import { WrapAroundEnding, ZeroCurvatureEnding, ZeroSlopeEnding, LoopPingPong, LoopOnce, LoopRepeat, NormalAnimationBlendMode, AdditiveAnimationBlendMode } from '../constants.js';

// Interna
// Internal

@@ -554,0 +554,0 @@ _update( time, deltaTime, timeDirection, accuIndex ) {

@@ -304,3 +304,3 @@ import * as AnimationUtils from './AnimationUtils.js';

* @param {Object} animation - A serialized animation clip as JSON.
* @param {Array<Bones>} bones - An array of bones.
* @param {Array<Bone>} bones - An array of bones.
* @return {?AnimationClip} The new animation clip.

@@ -307,0 +307,0 @@ */

@@ -51,2 +51,8 @@ import { AnimationAction } from './AnimationAction.js';

if ( typeof __THREE_DEVTOOLS__ !== 'undefined' ) {
__THREE_DEVTOOLS__.dispatchEvent( new CustomEvent( 'observe', { detail: this } ) );
}
}

@@ -53,0 +59,0 @@

import {
InterpolateLinear,
InterpolateSmooth,
InterpolateDiscrete
InterpolateDiscrete,
InterpolateBezier
} from '../constants.js';

@@ -9,2 +10,3 @@ import { CubicInterpolant } from '../math/interpolants/CubicInterpolant.js';

import { DiscreteInterpolant } from '../math/interpolants/DiscreteInterpolant.js';
import { BezierInterpolant } from '../math/interpolants/BezierInterpolant.js';
import * as AnimationUtils from './AnimationUtils.js';

@@ -14,3 +16,3 @@ import { warn, error } from '../utils.js';

/**
* Represents s a timed sequence of keyframes, which are composed of lists of
* Represents a timed sequence of keyframes, which are composed of lists of
* times and related values, and which are used to animate a specific property

@@ -27,3 +29,3 @@ * of an object.

* @param {Array<number|string|boolean>} values - A list of keyframe values.
* @param {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)} [interpolation] - The interpolation type.
* @param {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth|InterpolateBezier)} [interpolation] - The interpolation type.
*/

@@ -147,5 +149,32 @@ constructor( name, times, values, interpolation ) {

/**
* Factory method for creating a new Bezier interpolant.
*
* The Bezier interpolant requires tangent data to be set via the `settings` property
* on the track before creating the interpolant. The settings should contain:
* - `inTangents`: Float32Array with [time, value] pairs per keyframe per component
* - `outTangents`: Float32Array with [time, value] pairs per keyframe per component
*
* @static
* @param {TypedArray} [result] - The result buffer.
* @return {BezierInterpolant} The new interpolant.
*/
InterpolantFactoryMethodBezier( result ) {
const interpolant = new BezierInterpolant( this.times, this.values, this.getValueSize(), result );
// Pass tangent data from track settings to interpolant
if ( this.settings ) {
interpolant.settings = this.settings;
}
return interpolant;
}
/**
* Defines the interpolation factor method for this keyframe track.
*
* @param {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)} interpolation - The interpolation type.
* @param {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth|InterpolateBezier)} interpolation - The interpolation type.
* @return {KeyframeTrack} A reference to this keyframe track.

@@ -177,2 +206,8 @@ */

case InterpolateBezier:
factoryMethod = this.InterpolantFactoryMethodBezier;
break;
}

@@ -214,3 +249,3 @@

*
* @return {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)} The interpolation type.
* @return {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth|InterpolateBezier)} The interpolation type.
*/

@@ -233,2 +268,6 @@ getInterpolation() {

case this.InterpolantFactoryMethodBezier:
return InterpolateBezier;
}

@@ -435,3 +474,3 @@

*
* @return {AnimationClip} A reference to this animation clip.
* @return {KeyframeTrack} A reference to this keyframe track.
*/

@@ -600,3 +639,3 @@ optimize() {

*
* @type {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)}
* @type {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth|InterpolateBezier)}
* @default InterpolateLinear

@@ -603,0 +642,0 @@ */

@@ -91,3 +91,3 @@ import { Quaternion } from '../math/Quaternion.js';

/**
* TODO
* Accumulated weight of the property binding.
*

@@ -100,3 +100,3 @@ * @type {number}

/**
* TODO
* Accumulated additive weight of the property binding.
*

@@ -109,3 +109,3 @@ * @type {number}

/**
* TODO
* Number of active keyframe tracks currently using this property binding.
*

@@ -118,3 +118,3 @@ * @type {number}

/**
* TODO
* Number of keyframe tracks referencing this property binding.
*

@@ -121,0 +121,0 @@ * @type {number}

@@ -194,3 +194,3 @@ import { Object3D } from '../core/Object3D.js';

*
* The property is automatically by one of the `set*()` methods.
* The property is automatically set by one of the `set*()` methods.
*

@@ -197,0 +197,0 @@ * @type {('empty'|'audioNode'|'mediaNode'|'mediaStreamNode'|'buffer')}

import { WebGLCoordinateSystem } from '../constants.js';
import { Matrix4 } from '../math/Matrix4.js';
import { Object3D } from '../core/Object3D.js';
import { Vector3 } from '../math/Vector3.js';
import { Quaternion } from '../math/Quaternion.js';
const _position = /*@__PURE__*/ new Vector3();
const _quaternion = /*@__PURE__*/ new Quaternion();
const _scale = /*@__PURE__*/ new Vector3();
/**

@@ -110,4 +116,16 @@ * Abstract base class for cameras. This class should always be inherited

this.matrixWorldInverse.copy( this.matrixWorld ).invert();
// exclude scale from view matrix to be glTF conform
this.matrixWorld.decompose( _position, _quaternion, _scale );
if ( _scale.x === 1 && _scale.y === 1 && _scale.z === 1 ) {
this.matrixWorldInverse.copy( this.matrixWorld ).invert();
} else {
this.matrixWorldInverse.compose( _position, _quaternion, _scale.set( 1, 1, 1 ) ).invert();
}
}

@@ -119,4 +137,16 @@

this.matrixWorldInverse.copy( this.matrixWorld ).invert();
// exclude scale from view matrix to be glTF conform
this.matrixWorld.decompose( _position, _quaternion, _scale );
if ( _scale.x === 1 && _scale.y === 1 && _scale.z === 1 ) {
this.matrixWorldInverse.copy( this.matrixWorld ).invert();
} else {
this.matrixWorldInverse.compose( _position, _quaternion, _scale.set( 1, 1, 1 ) ).invert();
}
}

@@ -123,0 +153,0 @@

@@ -206,15 +206,34 @@ import { WebGLCoordinateSystem, WebGPUCoordinateSystem } from '../constants.js';

// https://github.com/mrdoob/three.js/issues/31413#issuecomment-3095966812
let reversedDepthBuffer = false;
if ( renderer.isWebGLRenderer === true ) {
reversedDepthBuffer = renderer.state.buffers.depth.getReversed();
} else {
reversedDepthBuffer = renderer.reversedDepthBuffer;
}
renderer.setRenderTarget( renderTarget, 0, activeMipmapLevel );
if ( reversedDepthBuffer && renderer.autoClear === false ) renderer.clearDepth();
renderer.render( scene, cameraPX );
renderer.setRenderTarget( renderTarget, 1, activeMipmapLevel );
if ( reversedDepthBuffer && renderer.autoClear === false ) renderer.clearDepth();
renderer.render( scene, cameraNX );
renderer.setRenderTarget( renderTarget, 2, activeMipmapLevel );
if ( reversedDepthBuffer && renderer.autoClear === false ) renderer.clearDepth();
renderer.render( scene, cameraPY );
renderer.setRenderTarget( renderTarget, 3, activeMipmapLevel );
if ( reversedDepthBuffer && renderer.autoClear === false ) renderer.clearDepth();
renderer.render( scene, cameraNY );
renderer.setRenderTarget( renderTarget, 4, activeMipmapLevel );
if ( reversedDepthBuffer && renderer.autoClear === false ) renderer.clearDepth();
renderer.render( scene, cameraPZ );

@@ -228,2 +247,3 @@

renderer.setRenderTarget( renderTarget, 5, activeMipmapLevel );
if ( reversedDepthBuffer && renderer.autoClear === false ) renderer.clearDepth();
renderer.render( scene, cameraNZ );

@@ -230,0 +250,0 @@

@@ -1,2 +0,2 @@

export const REVISION = '182';
export const REVISION = '183';

@@ -159,2 +159,10 @@ /**

/**
* Represents material blending.
*
* @type {number}
* @constant
*/
export const MaterialBlending = 6;
/**
* A `source + destination` blending equation.

@@ -1162,2 +1170,13 @@ *

/**
* Bezier interpolation mode for keyframe tracks.
*
* Uses cubic Bezier curves with explicit 2D control points.
* Requires tangent data to be set on the track.
*
* @type {number}
* @constant
*/
export const InterpolateBezier = 2303;
/**
* Zero curvature ending for animations.

@@ -1686,2 +1705,12 @@ *

/**
* Compatibility flags for features that may not be supported across all platforms.
*
* @type {Object}
* @constant
*/
export const Compatibility = {
TEXTURE_COMPARE: 'depthTextureCompare'
};
/**
* This type represents mouse buttons and interaction types in context of controls.

@@ -1688,0 +1717,0 @@ *

@@ -0,3 +1,7 @@

import { warn } from '../utils.js';
/**
* Class for keeping track of time.
*
* @deprecated since r183.
*/

@@ -9,2 +13,3 @@ class Clock {

*
* @deprecated since 183.
* @param {boolean} [autoStart=true] - Whether to automatically start the clock when

@@ -57,2 +62,4 @@ * `getDelta()` is called for the first time.

warn( 'THREE.Clock: This module has been deprecated. Please use THREE.Timer instead.' ); // @deprecated, r183
}

@@ -59,0 +66,0 @@

@@ -245,3 +245,4 @@ import { Quaternion } from '../math/Quaternion.js';

* When set to `true`, the engine automatically computes the local matrix from position,
* rotation and scale every frame.
* rotation and scale every frame. If set to `false`, the app is responsible for recomputing
* the local matrix by calling `updateMatrix()`.
*

@@ -257,3 +258,4 @@ * The default values for all 3D objects is defined by `Object3D.DEFAULT_MATRIX_AUTO_UPDATE`.

* When set to `true`, the engine automatically computes the world matrix from the current local
* matrix and the object's transformation hierarchy.
* matrix and the object's transformation hierarchy. If set to `false`, the app is responsible for
* recomputing the world matrix by directly updating the `matrixWorld` property.
*

@@ -360,2 +362,15 @@ * The default values for all 3D objects is defined by `Object3D.DEFAULT_MATRIX_WORLD_AUTO_UPDATE`.

/**
* Whether the 3D object is supposed to be static or not. If set to `true`, it means
* the 3D object is not going to be changed after the initial renderer. This includes
* geometry and material settings. A static 3D object can be processed by the renderer
* slightly faster since certain state checks can be bypassed.
*
* Only relevant in context of {@link WebGPURenderer}.
*
* @type {boolean}
* @default false
*/
this.static = false;
/**
* An object that can be used to store custom data about the 3D object. It

@@ -368,2 +383,12 @@ * should not hold references to functions as these will not be cloned.

/**
* The pivot point for rotation and scale transformations.
* When set, rotation and scale are applied around this point
* instead of the object's origin.
*
* @type {?Vector3}
* @default null
*/
this.pivot = null;
}

@@ -651,3 +676,3 @@

/**
* Converts the given vector from this 3D object's word space to local space.
* Converts the given vector from this 3D object's world space to local space.
*

@@ -1117,2 +1142,15 @@ * @param {Vector3} vector - The vector to convert.

const pivot = this.pivot;
if ( pivot !== null ) {
const px = pivot.x, py = pivot.y, pz = pivot.z;
const te = this.matrix.elements;
te[ 12 ] += px - te[ 0 ] * px - te[ 4 ] * py - te[ 8 ] * pz;
te[ 13 ] += py - te[ 1 ] * px - te[ 5 ] * py - te[ 9 ] * pz;
te[ 14 ] += pz - te[ 2 ] * px - te[ 6 ] * py - te[ 10 ] * pz;
}
this.matrixWorldNeedsUpdate = true;

@@ -1131,3 +1169,3 @@

* @param {boolean} [force=false] - When set to `true`, a recomputation of world matrices is forced even
* when {@link Object3D#matrixWorldAutoUpdate} is set to `false`.
* when {@link Object3D#matrixWorldNeedsUpdate} is `false`.
*/

@@ -1277,2 +1315,3 @@ updateMatrixWorld( force ) {

if ( this.renderOrder !== 0 ) object.renderOrder = this.renderOrder;
if ( this.static !== false ) object.static = this.static;
if ( Object.keys( this.userData ).length > 0 ) object.userData = this.userData;

@@ -1284,4 +1323,9 @@

if ( this.pivot !== null ) object.pivot = this.pivot.toArray();
if ( this.matrixAutoUpdate === false ) object.matrixAutoUpdate = false;
if ( this.morphTargetDictionary !== undefined ) object.morphTargetDictionary = Object.assign( {}, this.morphTargetDictionary );
if ( this.morphTargetInfluences !== undefined ) object.morphTargetInfluences = this.morphTargetInfluences.slice();
// object specific properties

@@ -1560,2 +1604,8 @@

if ( source.pivot !== null ) {
this.pivot = source.pivot.clone();
}
this.matrix.copy( source.matrix );

@@ -1578,2 +1628,4 @@ this.matrixWorld.copy( source.matrixWorld );

this.static = source.static;
this.animations = source.animations.slice();

@@ -1580,0 +1632,0 @@

@@ -127,6 +127,2 @@ import { EventDispatcher } from './EventDispatcher.js';

const image = { width: width, height: height, depth: options.depth };
const texture = new Texture( image );
/**

@@ -140,2 +136,5 @@ * An array of textures. Each color attachment is represented as a separate texture.

const image = { width: width, height: height, depth: options.depth };
const texture = new Texture( image );
const count = options.count;

@@ -142,0 +141,0 @@ for ( let i = 0; i < count; i ++ ) {

@@ -848,9 +848,5 @@ import {

// Section 3.2: Transform view direction to hemisphere configuration
vec3 Vh = normalize(vec3(alpha * V.x, alpha * V.y, V.z));
// Section 4.1: Orthonormal basis
float lensq = Vh.x * Vh.x + Vh.y * Vh.y;
vec3 T1 = lensq > 0.0 ? vec3(-Vh.y, Vh.x, 0.0) / sqrt(lensq) : vec3(1.0, 0.0, 0.0);
vec3 T2 = cross(Vh, T1);
vec3 T1 = vec3(1.0, 0.0, 0.0);
vec3 T2 = cross(V, T1);

@@ -862,7 +858,7 @@ // Section 4.2: Parameterization of projected area

float t2 = r * sin(phi);
float s = 0.5 * (1.0 + Vh.z);
float s = 0.5 * (1.0 + V.z);
t2 = (1.0 - s) * sqrt(1.0 - t1 * t1) + s * t2;
// Section 4.3: Reprojection onto hemisphere
vec3 Nh = t1 * T1 + t2 * T2 + sqrt(max(0.0, 1.0 - t1 * t1 - t2 * t2)) * Vh;
vec3 Nh = t1 * T1 + t2 * T2 + sqrt(max(0.0, 1.0 - t1 * t1 - t2 * t2)) * V;

@@ -869,0 +865,0 @@ // Section 3.4: Transform back to ellipsoid configuration

@@ -28,4 +28,6 @@ import { BufferGeometry } from '../core/BufferGeometry.js';

* @param {number} [arc=Math.PI*2] - Central angle in radians.
* @param {number} [thetaStart=0] - Start of the tubular sweep in radians.
* @param {number} [thetaLength=Math.PI*2] - Length of the tubular sweep in radians.
*/
constructor( radius = 1, tube = 0.4, radialSegments = 12, tubularSegments = 48, arc = Math.PI * 2 ) {
constructor( radius = 1, tube = 0.4, radialSegments = 12, tubularSegments = 48, arc = Math.PI * 2, thetaStart = 0, thetaLength = Math.PI * 2 ) {

@@ -48,3 +50,5 @@ super();

tubularSegments: tubularSegments,
arc: arc
arc: arc,
thetaStart: thetaStart,
thetaLength: thetaLength,
};

@@ -72,6 +76,7 @@

const v = thetaStart + ( j / radialSegments ) * thetaLength;
for ( let i = 0; i <= tubularSegments; i ++ ) {
const u = i / tubularSegments * arc;
const v = j / radialSegments * Math.PI * 2;

@@ -78,0 +83,0 @@ // vertex

@@ -21,2 +21,5 @@ import { Camera } from '../cameras/Camera.js';

*
* When the camera is transformed or its projection matrix is changed, it's necessary
* to call the `update()` method of the respective helper.
*
* ```js

@@ -23,0 +26,0 @@ * const camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 0.1, 1000 );

@@ -14,5 +14,8 @@ import { Vector3 } from '../math/Vector3.js';

* Helper object to assist with visualizing a {@link DirectionalLight}'s
* effect on the scene. This consists of plane and a line representing the
* effect on the scene. This consists of a plane and a line representing the
* light's position and direction.
*
* When the directional light or its target are transformed or light properties
* are changed, it's necessary to call the `update()` method of the respective helper.
*
* ```js

@@ -19,0 +22,0 @@ * const light = new THREE.DirectionalLight( 0xFFFFFF );

@@ -17,2 +17,5 @@ import { Vector3 } from '../math/Vector3.js';

*
* When the hemisphere light is transformed or its light properties are changed,
* it's necessary to call the `update()` method of the respective helper.
*
* ```js

@@ -19,0 +22,0 @@ * const light = new THREE.HemisphereLight( 0xffffbb, 0x080820, 1 );

@@ -60,28 +60,4 @@ import { Mesh } from '../objects/Mesh.js';

/*
// TODO: delete this comment?
const distanceGeometry = new THREE.IcosahedronGeometry( 1, 2 );
const distanceMaterial = new THREE.MeshBasicMaterial( { color: hexColor, fog: false, wireframe: true, opacity: 0.1, transparent: true } );
this.lightSphere = new THREE.Mesh( bulbGeometry, bulbMaterial );
this.lightDistance = new THREE.Mesh( distanceGeometry, distanceMaterial );
const d = light.distance;
if ( d === 0.0 ) {
this.lightDistance.visible = false;
} else {
this.lightDistance.scale.set( d, d, d );
}
this.add( this.lightDistance );
*/
}
/**

@@ -88,0 +64,0 @@ * Frees the GPU-related resources allocated by this instance. Call this

@@ -13,2 +13,5 @@ import { Vector3 } from '../math/Vector3.js';

*
* When the spot light or its target are transformed or light properties are
* changed, it's necessary to call the `update()` method of the respective helper.
*
* ```js

@@ -15,0 +18,0 @@ * const spotLight = new THREE.SpotLight( 0xffffff );

@@ -6,3 +6,3 @@ import { Matrix4 } from '../math/Matrix4.js';

import { Frustum } from '../math/Frustum.js';
import { UnsignedByteType } from '../constants.js';
import { UnsignedByteType, WebGPUCoordinateSystem } from '../constants.js';

@@ -57,2 +57,12 @@ const _projScreenMatrix = /*@__PURE__*/ new Matrix4();

/**
* A node version of `bias`. Only supported with `WebGPURenderer`.
*
* If a bias node is defined, `bias` has no effect.
*
* @type {?Node<float>}
* @default null
*/
this.biasNode = null;
/**
* Defines how much the position used to query the shadow map is offset along

@@ -208,3 +218,3 @@ * the object normal. The default is `0`. Increasing this value can be used to

if ( shadowCamera.reversedDepth ) {
if ( shadowCamera.coordinateSystem === WebGPUCoordinateSystem || shadowCamera.reversedDepth ) {

@@ -214,3 +224,3 @@ shadowMatrix.set(

0.0, 0.5, 0.0, 0.5,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 1.0, 0.0, // Identity Z (preserving the correct [0, 1] range from the projection matrix)
0.0, 0.0, 0.0, 1.0

@@ -299,2 +309,4 @@ );

this.biasNode = source.biasNode;
return this;

@@ -301,0 +313,0 @@

@@ -25,3 +25,4 @@ import { SpotLight } from '../SpotLight.js';

/**
* TODO
* The IES map. It's a lookup table that stores normalized attenuation factors
* (0.0 to 1.0) that represent the light's intensity at a specific angle.
*

@@ -28,0 +29,0 @@ * @type {?Texture}

@@ -38,2 +38,4 @@ /**

if ( isBlobURL( key ) ) return;
// log( 'Cache', 'Adding key:', key );

@@ -56,2 +58,4 @@

if ( isBlobURL( key ) ) return;
// log( 'Cache', 'Checking key:', key );

@@ -88,3 +92,27 @@

/**
* Returns true if the given cache key contains the blob: scheme.
*
* @private
* @param {string} key - The cache key.
* @return {boolean} Whether the given cache key contains the blob: scheme or not.
*/
function isBlobURL( key ) {
try {
const urlString = key.slice( key.indexOf( ':' ) + 1 ); // remove type identifier
const url = new URL( urlString );
return url.protocol === 'blob:';
} catch ( e ) {
// If the string is not a valid URL, it throws an error
return false;
}
}
export { Cache };

@@ -46,3 +46,3 @@ import { Cache } from './Cache.js';

* The expected mime type. Valid values can be found
* [here](hhttps://developer.mozilla.org/en-US/docs/Web/API/DOMParser/parseFromString#mimetype)
* [here](https://developer.mozilla.org/en-US/docs/Web/API/DOMParser/parseFromString#mimetype)
*

@@ -49,0 +49,0 @@ * @type {string}

@@ -13,8 +13,13 @@ import { Cache } from './Cache.js';

* Note that {@link Texture#flipY} and {@link Texture#premultiplyAlpha} are ignored with image bitmaps.
* They needs these configuration on bitmap creation unlike regular images need them on uploading to GPU.
* These options need to be configured via {@link ImageBitmapLoader#setOptions} prior to loading,
* unlike regular images which can be configured on the Texture to set these options on GPU upload instead.
*
* You need to set the equivalent options via {@link ImageBitmapLoader#setOptions} instead.
* To match the default behaviour of {@link Texture}, the following options are needed:
*
* Also note that unlike {@link FileLoader}, this loader avoids multiple concurrent requests to the same URL only if `Cache` is enabled.
* ```js
* { imageOrientation: 'flipY', premultiplyAlpha: 'none' }
* ```
*
* Also note that unlike {@link FileLoader}, this loader will only avoid multiple concurrent requests to the same URL if {@link Cache} is enabled.
*
* ```js

@@ -21,0 +26,0 @@ * const loader = new THREE.ImageBitmapLoader();

@@ -64,2 +64,8 @@ import { DefaultLoadingManager } from './LoadingManager.js';

if ( typeof __THREE_DEVTOOLS__ !== 'undefined' ) {
__THREE_DEVTOOLS__.dispatchEvent( new CustomEvent( 'observe', { detail: this } ) );
}
}

@@ -66,0 +72,0 @@

@@ -22,2 +22,3 @@ import {

import { Color } from '../math/Color.js';
import { Vector3 } from '../math/Vector3.js';
import { Object3D } from '../core/Object3D.js';

@@ -173,4 +174,14 @@ import { Group } from '../objects/Group.js';

const json = JSON.parse( text );
let json;
try {
json = JSON.parse( text );
} catch ( e ) {
throw new Error( 'ObjectLoader: Can\'t parse ' + url + '. ' + e.message );
}
const metadata = json.metadata;

@@ -1119,2 +1130,7 @@

if ( data.pivot !== undefined ) object.pivot = new Vector3().fromArray( data.pivot );
if ( data.morphTargetDictionary !== undefined ) object.morphTargetDictionary = Object.assign( {}, data.morphTargetDictionary );
if ( data.morphTargetInfluences !== undefined ) object.morphTargetInfluences = data.morphTargetInfluences.slice();
if ( data.castShadow !== undefined ) object.castShadow = data.castShadow;

@@ -1137,2 +1153,3 @@ if ( data.receiveShadow !== undefined ) object.receiveShadow = data.receiveShadow;

if ( data.renderOrder !== undefined ) object.renderOrder = data.renderOrder;
if ( data.static !== undefined ) object.static = data.static;
if ( data.userData !== undefined ) object.userData = data.userData;

@@ -1139,0 +1156,0 @@ if ( data.layers !== undefined ) object.layers.mask = data.layers;

@@ -271,2 +271,10 @@ import { MultiplyOperation, TangentSpaceNormalMap } from '../constants.js';

/**
* Scales the effect of the environment map by multiplying its color.
*
* @type {number}
* @default 1
*/
this.envMapIntensity = 1.0;
/**
* The index of refraction (IOR) of air (approximately 1) divided by the

@@ -377,2 +385,3 @@ * index of refraction of the material. It is used with environment mapping

this.reflectivity = source.reflectivity;
this.envMapIntensity = source.envMapIntensity;
this.refractionRatio = source.refractionRatio;

@@ -379,0 +388,0 @@

@@ -288,2 +288,10 @@ import { MultiplyOperation, TangentSpaceNormalMap } from '../constants.js';

/**
* Scales the effect of the environment map by multiplying its color.
*
* @type {number}
* @default 1
*/
this.envMapIntensity = 1.0;
/**
* The index of refraction (IOR) of air (approximately 1) divided by the

@@ -396,2 +404,3 @@ * index of refraction of the material. It is used with environment mapping

this.reflectivity = source.reflectivity;
this.envMapIntensity = source.envMapIntensity;
this.refractionRatio = source.refractionRatio;

@@ -398,0 +407,0 @@

@@ -12,3 +12,3 @@ import NodeMaterial from './NodeMaterial.js';

import { screenDPR, viewport } from '../../nodes/display/ScreenNode.js';
import { viewportSharedTexture } from '../../nodes/display/ViewportSharedTextureNode.js';
import { viewportOpaqueMipTexture } from '../../nodes/display/ViewportTextureNode.js';

@@ -60,3 +60,3 @@ import { LineDashedMaterial } from '../LineDashedMaterial.js';

*/
this.useColor = parameters.vertexColors;
this.vertexColors = parameters.vertexColors;

@@ -138,3 +138,3 @@ /**

const useAlphaToCoverage = this._useAlphaToCoverage;
const useColor = this.useColor;
const vertexColors = this.vertexColors;
const useDash = this._useDash;

@@ -440,3 +440,3 @@ const useWorldUnits = this._useWorldUnits;

if ( useColor ) {
if ( vertexColors ) {

@@ -466,3 +466,3 @@ const instanceColorStart = attribute( 'instanceColorStart' );

this.outputNode = vec4( this.colorNode.rgb.mul( opacityNode ).add( viewportSharedTexture().rgb.mul( opacityNode.oneMinus() ) ), this.colorNode.a );
this.outputNode = vec4( this.colorNode.rgb.mul( opacityNode ).add( viewportOpaqueMipTexture().rgb.mul( opacityNode.oneMinus() ) ), this.colorNode.a );

@@ -469,0 +469,0 @@ }

@@ -176,2 +176,3 @@ const refreshUniforms = [

attributes: this.getAttributesData( geometry.attributes ),
indexId: geometry.index ? geometry.index.id : null,
indexVersion: geometry.index ? geometry.index.version : null,

@@ -236,3 +237,4 @@ drawRange: { start: geometry.drawRange.start, count: geometry.drawRange.count }

attributesData[ name ] = {
version: attribute.version
id: attribute.id,
version: attribute.version,
};

@@ -428,4 +430,5 @@

if ( storedAttributeData.version !== attribute.version ) {
if ( storedAttributeData.id !== attribute.id || storedAttributeData.version !== attribute.version ) {
storedAttributeData.id = attribute.id;
storedAttributeData.version = attribute.version;

@@ -441,7 +444,10 @@ return false;

const index = geometry.index;
const storedIndexId = storedGeometryData.id;
const storedIndexVersion = storedGeometryData.indexVersion;
const currentIndexId = index ? index.id : null;
const currentIndexVersion = index ? index.version : null;
if ( storedIndexVersion !== currentIndexVersion ) {
if ( storedIndexId !== currentIndexId || storedIndexVersion !== currentIndexVersion ) {
storedGeometryData.id = currentIndexId;
storedGeometryData.indexVersion = currentIndexVersion;

@@ -448,0 +454,0 @@ return false;

@@ -505,2 +505,4 @@ import { clearcoat, clearcoatRoughness, sheen, sheenRoughness, iridescence, iridescenceIOR, iridescenceThickness, specularColor, specularColorBlended, specularF90, diffuseColor, metalness, roughness, anisotropy, alphaT, anisotropyT, anisotropyB, ior, transmission, thickness, attenuationDistance, attenuationColor, dispersion } from '../../nodes/core/PropertyNode.js';

this.iorNode = source.iorNode;
this.transmissionNode = source.transmissionNode;

@@ -507,0 +509,0 @@ this.thicknessNode = source.thicknessNode;

@@ -28,3 +28,3 @@ import { Material } from '../Material.js';

import { subBuild } from '../../nodes/core/SubBuildNode.js';
import { error, warn } from '../../utils.js';
import { error } from '../../utils.js';

@@ -245,2 +245,10 @@ /**

/**
* This node can be used to implement a shadow mask for the material.
*
* @type {?Node<bool>}
* @default null
*/
this.maskShadowNode = null;
/**
* The local vertex positions are computed based on multiple factors like the

@@ -395,22 +403,2 @@ * attribute data, morphing or skinning. This node property allows to overwrite

// Deprecated properties
Object.defineProperty( this, 'shadowPositionNode', { // @deprecated, r176
get: () => {
return this.receivedShadowPositionNode;
},
set: ( value ) => {
warn( 'NodeMaterial: ".shadowPositionNode" was renamed to ".receivedShadowPositionNode".' );
this.receivedShadowPositionNode = value;
}
} );
}

@@ -533,6 +521,8 @@

const mvp = subBuild( this.setupVertex( builder ), 'VERTEX' );
const mvp = this.setupVertex( builder );
const vertexNode = this.vertexNode || mvp;
const vertexNode = subBuild( this.vertexNode || mvp, 'VERTEX' );
builder.context.clipSpace = vertexNode;
builder.stack.outputNode = vertexNode;

@@ -802,3 +792,3 @@

builder.context.vertex = builder.removeStack();
builder.context.position = builder.removeStack();

@@ -1347,2 +1337,3 @@ return modelViewProjection;

this.maskNode = source.maskNode;
this.maskShadowNode = source.maskShadowNode;

@@ -1349,0 +1340,0 @@ this.positionNode = source.positionNode;

@@ -287,9 +287,7 @@ import { Vector3 } from './Vector3.js';

c1.copy( p1 ).add( _d1.multiplyScalar( s ) );
c2.copy( p2 ).add( _d2.multiplyScalar( t ) );
c1.copy( p1 ).addScaledVector( _d1, s );
c2.copy( p2 ).addScaledVector( _d2, t );
c1.sub( c2 );
return c1.distanceToSquared( c2 );
return c1.dot( c1 );
}

@@ -296,0 +294,0 @@

@@ -67,3 +67,3 @@ import { warn } from '../utils.js';

* Performs a linear mapping from range `<a1, a2>` to range `<b1, b2>`
* for the given value.
* for the given value. `a2` must be greater than `a1`.
*

@@ -163,5 +163,5 @@ * @param {number} x - The value to be mapped.

*
* @param {number} x - The value to evaluate based on its position between min and max.
* @param {number} min - The min value. Any x value below min will be `0`.
* @param {number} max - The max value. Any x value above max will be `1`.
* @param {number} x - The value to evaluate based on its position between `min` and `max`.
* @param {number} min - The min value. Any `x` value below `min` will be `0`. `min` must be lower than `max`.
* @param {number} max - The max value. Any `x` value above `max` will be `1`. `max` must be greater than `min`.
* @return {number} The alternated value.

@@ -182,7 +182,7 @@ */

* A [variation on smoothstep](https://en.wikipedia.org/wiki/Smoothstep#Variations)
* that has zero 1st and 2nd order derivatives at x=0 and x=1.
* that has zero 1st and 2nd order derivatives at `x=0` and `x=1`.
*
* @param {number} x - The value to evaluate based on its position between min and max.
* @param {number} min - The min value. Any x value below min will be `0`.
* @param {number} max - The max value. Any x value above max will be `1`.
* @param {number} x - The value to evaluate based on its position between `min` and `max`.
* @param {number} min - The min value. Any `x` value below `min` will be `0`. `min` must be lower than `max`.
* @param {number} max - The max value. Any `x` value above `max` will be `1`. `max` must be greater than `min`.
* @return {number} The alternated value.

@@ -300,3 +300,3 @@ */

*
* @param {number} value - The value to find a POT for.
* @param {number} value - The value to find a POT for. Must be greater than `0`.
* @return {number} The smallest power of two that is greater than or equal to the given number.

@@ -313,3 +313,3 @@ */

*
* @param {number} value - The value to find a POT for.
* @param {number} value - The value to find a POT for. Must be greater than `0`.
* @return {number} The largest power of two that is less than or equal to the given number.

@@ -316,0 +316,0 @@ */

@@ -710,3 +710,3 @@ import { WebGLCoordinateSystem, WebGPUCoordinateSystem } from '../constants.js';

// based on http://www.euclideanspace.com/maths/algebra/matrix/functions/inverse/fourD/index.htm
// based on https://github.com/toji/gl-matrix
const te = this.elements,

@@ -719,8 +719,16 @@

t11 = n23 * n34 * n42 - n24 * n33 * n42 + n24 * n32 * n43 - n22 * n34 * n43 - n23 * n32 * n44 + n22 * n33 * n44,
t12 = n14 * n33 * n42 - n13 * n34 * n42 - n14 * n32 * n43 + n12 * n34 * n43 + n13 * n32 * n44 - n12 * n33 * n44,
t13 = n13 * n24 * n42 - n14 * n23 * n42 + n14 * n22 * n43 - n12 * n24 * n43 - n13 * n22 * n44 + n12 * n23 * n44,
t14 = n14 * n23 * n32 - n13 * n24 * n32 - n14 * n22 * n33 + n12 * n24 * n33 + n13 * n22 * n34 - n12 * n23 * n34;
t1 = n11 * n22 - n21 * n12,
t2 = n11 * n32 - n31 * n12,
t3 = n11 * n42 - n41 * n12,
t4 = n21 * n32 - n31 * n22,
t5 = n21 * n42 - n41 * n22,
t6 = n31 * n42 - n41 * n32,
t7 = n13 * n24 - n23 * n14,
t8 = n13 * n34 - n33 * n14,
t9 = n13 * n44 - n43 * n14,
t10 = n23 * n34 - n33 * n24,
t11 = n23 * n44 - n43 * n24,
t12 = n33 * n44 - n43 * n34;
const det = n11 * t11 + n21 * t12 + n31 * t13 + n41 * t14;
const det = t1 * t12 - t2 * t11 + t3 * t10 + t4 * t9 - t5 * t8 + t6 * t7;

@@ -731,21 +739,21 @@ if ( det === 0 ) return this.set( 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 );

te[ 0 ] = t11 * detInv;
te[ 1 ] = ( n24 * n33 * n41 - n23 * n34 * n41 - n24 * n31 * n43 + n21 * n34 * n43 + n23 * n31 * n44 - n21 * n33 * n44 ) * detInv;
te[ 2 ] = ( n22 * n34 * n41 - n24 * n32 * n41 + n24 * n31 * n42 - n21 * n34 * n42 - n22 * n31 * n44 + n21 * n32 * n44 ) * detInv;
te[ 3 ] = ( n23 * n32 * n41 - n22 * n33 * n41 - n23 * n31 * n42 + n21 * n33 * n42 + n22 * n31 * n43 - n21 * n32 * n43 ) * detInv;
te[ 0 ] = ( n22 * t12 - n32 * t11 + n42 * t10 ) * detInv;
te[ 1 ] = ( n31 * t11 - n21 * t12 - n41 * t10 ) * detInv;
te[ 2 ] = ( n24 * t6 - n34 * t5 + n44 * t4 ) * detInv;
te[ 3 ] = ( n33 * t5 - n23 * t6 - n43 * t4 ) * detInv;
te[ 4 ] = t12 * detInv;
te[ 5 ] = ( n13 * n34 * n41 - n14 * n33 * n41 + n14 * n31 * n43 - n11 * n34 * n43 - n13 * n31 * n44 + n11 * n33 * n44 ) * detInv;
te[ 6 ] = ( n14 * n32 * n41 - n12 * n34 * n41 - n14 * n31 * n42 + n11 * n34 * n42 + n12 * n31 * n44 - n11 * n32 * n44 ) * detInv;
te[ 7 ] = ( n12 * n33 * n41 - n13 * n32 * n41 + n13 * n31 * n42 - n11 * n33 * n42 - n12 * n31 * n43 + n11 * n32 * n43 ) * detInv;
te[ 4 ] = ( n32 * t9 - n12 * t12 - n42 * t8 ) * detInv;
te[ 5 ] = ( n11 * t12 - n31 * t9 + n41 * t8 ) * detInv;
te[ 6 ] = ( n34 * t3 - n14 * t6 - n44 * t2 ) * detInv;
te[ 7 ] = ( n13 * t6 - n33 * t3 + n43 * t2 ) * detInv;
te[ 8 ] = t13 * detInv;
te[ 9 ] = ( n14 * n23 * n41 - n13 * n24 * n41 - n14 * n21 * n43 + n11 * n24 * n43 + n13 * n21 * n44 - n11 * n23 * n44 ) * detInv;
te[ 10 ] = ( n12 * n24 * n41 - n14 * n22 * n41 + n14 * n21 * n42 - n11 * n24 * n42 - n12 * n21 * n44 + n11 * n22 * n44 ) * detInv;
te[ 11 ] = ( n13 * n22 * n41 - n12 * n23 * n41 - n13 * n21 * n42 + n11 * n23 * n42 + n12 * n21 * n43 - n11 * n22 * n43 ) * detInv;
te[ 8 ] = ( n12 * t11 - n22 * t9 + n42 * t7 ) * detInv;
te[ 9 ] = ( n21 * t9 - n11 * t11 - n41 * t7 ) * detInv;
te[ 10 ] = ( n14 * t5 - n24 * t3 + n44 * t1 ) * detInv;
te[ 11 ] = ( n23 * t3 - n13 * t5 - n43 * t1 ) * detInv;
te[ 12 ] = t14 * detInv;
te[ 13 ] = ( n13 * n24 * n31 - n14 * n23 * n31 + n14 * n21 * n33 - n11 * n24 * n33 - n13 * n21 * n34 + n11 * n23 * n34 ) * detInv;
te[ 14 ] = ( n14 * n22 * n31 - n12 * n24 * n31 - n14 * n21 * n32 + n11 * n24 * n32 + n12 * n21 * n34 - n11 * n22 * n34 ) * detInv;
te[ 15 ] = ( n12 * n23 * n31 - n13 * n22 * n31 + n13 * n21 * n32 - n11 * n23 * n32 - n12 * n21 * n33 + n11 * n22 * n33 ) * detInv;
te[ 12 ] = ( n22 * t8 - n12 * t10 - n32 * t7 ) * detInv;
te[ 13 ] = ( n11 * t10 - n21 * t8 + n31 * t7 ) * detInv;
te[ 14 ] = ( n24 * t2 - n14 * t4 - n34 * t1 ) * detInv;
te[ 15 ] = ( n13 * t4 - n23 * t2 + n33 * t1 ) * detInv;

@@ -1052,4 +1060,6 @@ return this;

if ( this.determinant() === 0 ) {
const det = this.determinant();
if ( det === 0 ) {
scale.set( 1, 1, 1 );

@@ -1066,4 +1076,3 @@ quaternion.identity();

// if determine is negative, we need to invert one scale
const det = this.determinant();
// if determinant is negative, we need to invert one scale
if ( det < 0 ) sx = - sx;

@@ -1070,0 +1079,0 @@

@@ -57,3 +57,3 @@ import { clamp } from './MathUtils.js';

* @param {number} srcOffset1 - An offset into the second source array.
* @param {number} t - The interpolation factor in the range `[0,1]`.
* @param {number} t - The interpolation factor. A value in the range `[0,1]` will interpolate. A value outside the range `[0,1]` will extrapolate.
* @see {@link Quaternion#slerp}

@@ -73,24 +73,2 @@ */

if ( t <= 0 ) {
dst[ dstOffset + 0 ] = x0;
dst[ dstOffset + 1 ] = y0;
dst[ dstOffset + 2 ] = z0;
dst[ dstOffset + 3 ] = w0;
return;
}
if ( t >= 1 ) {
dst[ dstOffset + 0 ] = x1;
dst[ dstOffset + 1 ] = y1;
dst[ dstOffset + 2 ] = z1;
dst[ dstOffset + 3 ] = w1;
return;
}
if ( w0 !== w1 || x0 !== x1 || y0 !== y1 || z0 !== z1 ) {

@@ -737,6 +715,6 @@

/**
* Performs a spherical linear interpolation between quaternions.
* Performs a spherical linear interpolation between this quaternion and the target quaternion.
*
* @param {Quaternion} qb - The target quaternion.
* @param {number} t - The interpolation factor in the closed interval `[0, 1]`.
* @param {number} t - The interpolation factor. A value in the range `[0,1]` will interpolate. A value outside the range `[0,1]` will extrapolate.
* @return {Quaternion} A reference to this quaternion.

@@ -746,6 +724,2 @@ */

if ( t <= 0 ) return this;
if ( t >= 1 ) return this.copy( qb ); // copy calls _onChangeCallback()
let x = qb._x, y = qb._y, z = qb._z, w = qb._w;

@@ -752,0 +726,0 @@

@@ -109,3 +109,3 @@ import { clamp } from './MathUtils.js';

/**
* Sets the vector's x component to the given value
* Sets the vector's x component to the given value.
*

@@ -124,3 +124,3 @@ * @param {number} x - The value to set.

/**
* Sets the vector's y component to the given value
* Sets the vector's y component to the given value.
*

@@ -139,3 +139,3 @@ * @param {number} y - The value to set.

/**
* Sets the vector's z component to the given value
* Sets the vector's z component to the given value.
*

@@ -142,0 +142,0 @@ * @param {number} z - The value to set.

@@ -64,3 +64,3 @@ import StorageInstancedBufferAttribute from '../../renderers/common/StorageInstancedBufferAttribute.js';

const buffer = new StorageInstancedBufferAttribute( count, itemSize, typedArray );
const node = storage( buffer, type, count );
const node = storage( buffer, type, buffer.count );

@@ -67,0 +67,0 @@ return node;

@@ -16,7 +16,7 @@ import { Fn } from '../tsl/TSLCore.js';

*/
const getBitangent = /*@__PURE__*/ Fn( ( [ crossNormalTangent, varyingName ], { subBuildFn, material } ) => {
const getBitangent = /*@__PURE__*/ Fn( ( [ crossNormalTangent, varyingName ], builder ) => {
let bitangent = crossNormalTangent.mul( tangentGeometry.w ).xyz;
if ( subBuildFn === 'NORMAL' && material.flatShading !== true ) {
if ( builder.subBuildFn === 'NORMAL' && builder.isFlatShading() !== true ) {

@@ -53,7 +53,7 @@ bitangent = bitangent.toVarying( varyingName );

*/
export const bitangentView = /*@__PURE__*/ ( Fn( ( { subBuildFn, geometry, material } ) => {
export const bitangentView = /*@__PURE__*/ ( Fn( ( builder ) => {
let node;
if ( subBuildFn === 'VERTEX' || geometry.hasAttribute( 'tangent' ) ) {
if ( builder.subBuildFn === 'VERTEX' || builder.geometry.hasAttribute( 'tangent' ) ) {

@@ -68,3 +68,3 @@ node = getBitangent( normalView.cross( tangentView ), 'v_bitangentView' ).normalize();

if ( material.flatShading !== true ) {
if ( builder.isFlatShading() !== true ) {

@@ -71,0 +71,0 @@ node = directionToFaceDirection( node );

@@ -369,3 +369,3 @@ import InputNode from '../core/InputNode.js';

return new BufferAttributeNode( array, type, stride, offset );
return new BufferAttributeNode( array, type, stride, offset ).setUsage( usage );

@@ -372,0 +372,0 @@ }

@@ -9,2 +9,25 @@ import { uniform } from '../core/UniformNode.js';

// Cache node uniforms
let _cameraProjectionMatrixBase = null;
let _cameraProjectionMatrixArray = null;
let _cameraProjectionMatrixInverseBase = null;
let _cameraProjectionMatrixInverseArray = null;
let _cameraViewMatrixBase = null;
let _cameraViewMatrixArray = null;
let _cameraWorldMatrixBase = null;
let _cameraWorldMatrixArray = null;
let _cameraNormalMatrixBase = null;
let _cameraNormalMatrixArray = null;
let _cameraPositionBase = null;
let _cameraPositionArray = null;
let _cameraViewportBase = null;
let _cameraViewportArray = null;
/**

@@ -54,10 +77,24 @@ * TSL object that represents the current `index` value of the camera if used ArrayCamera.

const cameraProjectionMatrices = uniformArray( matrices ).setGroup( renderGroup ).setName( 'cameraProjectionMatrices' );
if ( _cameraProjectionMatrixArray === null ) {
cameraProjectionMatrix = cameraProjectionMatrices.element( camera.isMultiViewCamera ? builtin( 'gl_ViewID_OVR' ) : cameraIndex ).toConst( 'cameraProjectionMatrix' );
_cameraProjectionMatrixArray = uniformArray( matrices ).setGroup( renderGroup ).setName( 'cameraProjectionMatrices' );
} else {
_cameraProjectionMatrixArray.array = matrices;
}
cameraProjectionMatrix = _cameraProjectionMatrixArray.element( camera.isMultiViewCamera ? builtin( 'gl_ViewID_OVR' ) : cameraIndex ).toConst( 'cameraProjectionMatrix' );
} else {
cameraProjectionMatrix = uniform( 'mat4' ).setName( 'cameraProjectionMatrix' ).setGroup( renderGroup ).onRenderUpdate( ( { camera } ) => camera.projectionMatrix );
if ( _cameraProjectionMatrixBase === null ) {
_cameraProjectionMatrixBase = uniform( camera.projectionMatrix ).setName( 'cameraProjectionMatrix' ).setGroup( renderGroup ).onRenderUpdate( ( { camera } ) => camera.projectionMatrix );
}
cameraProjectionMatrix = _cameraProjectionMatrixBase;
}

@@ -89,10 +126,24 @@

const cameraProjectionMatricesInverse = uniformArray( matrices ).setGroup( renderGroup ).setName( 'cameraProjectionMatricesInverse' );
if ( _cameraProjectionMatrixInverseArray === null ) {
cameraProjectionMatrixInverse = cameraProjectionMatricesInverse.element( camera.isMultiViewCamera ? builtin( 'gl_ViewID_OVR' ) : cameraIndex ).toConst( 'cameraProjectionMatrixInverse' );
_cameraProjectionMatrixInverseArray = uniformArray( matrices ).setGroup( renderGroup ).setName( 'cameraProjectionMatricesInverse' );
} else {
_cameraProjectionMatrixInverseArray.array = matrices;
}
cameraProjectionMatrixInverse = _cameraProjectionMatrixInverseArray.element( camera.isMultiViewCamera ? builtin( 'gl_ViewID_OVR' ) : cameraIndex ).toConst( 'cameraProjectionMatrixInverse' );
} else {
cameraProjectionMatrixInverse = uniform( 'mat4' ).setName( 'cameraProjectionMatrixInverse' ).setGroup( renderGroup ).onRenderUpdate( ( { camera } ) => camera.projectionMatrixInverse );
if ( _cameraProjectionMatrixInverseBase === null ) {
_cameraProjectionMatrixInverseBase = uniform( camera.projectionMatrixInverse ).setName( 'cameraProjectionMatrixInverse' ).setGroup( renderGroup ).onRenderUpdate( ( { camera } ) => camera.projectionMatrixInverse );
}
cameraProjectionMatrixInverse = _cameraProjectionMatrixInverseBase;
}

@@ -124,10 +175,24 @@

const cameraViewMatrices = uniformArray( matrices ).setGroup( renderGroup ).setName( 'cameraViewMatrices' );
if ( _cameraViewMatrixArray === null ) {
cameraViewMatrix = cameraViewMatrices.element( camera.isMultiViewCamera ? builtin( 'gl_ViewID_OVR' ) : cameraIndex ).toConst( 'cameraViewMatrix' );
_cameraViewMatrixArray = uniformArray( matrices ).setGroup( renderGroup ).setName( 'cameraViewMatrices' );
} else {
_cameraViewMatrixArray.array = matrices;
}
cameraViewMatrix = _cameraViewMatrixArray.element( camera.isMultiViewCamera ? builtin( 'gl_ViewID_OVR' ) : cameraIndex ).toConst( 'cameraViewMatrix' );
} else {
cameraViewMatrix = uniform( 'mat4' ).setName( 'cameraViewMatrix' ).setGroup( renderGroup ).onRenderUpdate( ( { camera } ) => camera.matrixWorldInverse );
if ( _cameraViewMatrixBase === null ) {
_cameraViewMatrixBase = uniform( camera.matrixWorldInverse ).setName( 'cameraViewMatrix' ).setGroup( renderGroup ).onRenderUpdate( ( { camera } ) => camera.matrixWorldInverse );
}
cameraViewMatrix = _cameraViewMatrixBase;
}

@@ -159,10 +224,24 @@

const cameraWorldMatrices = uniformArray( matrices ).setGroup( renderGroup ).setName( 'cameraWorldMatrices' );
if ( _cameraWorldMatrixArray === null ) {
cameraWorldMatrix = cameraWorldMatrices.element( camera.isMultiViewCamera ? builtin( 'gl_ViewID_OVR' ) : cameraIndex ).toConst( 'cameraWorldMatrix' );
_cameraWorldMatrixArray = uniformArray( matrices ).setGroup( renderGroup ).setName( 'cameraWorldMatrices' );
} else {
_cameraWorldMatrixArray.array = matrices;
}
cameraWorldMatrix = _cameraWorldMatrixArray.element( camera.isMultiViewCamera ? builtin( 'gl_ViewID_OVR' ) : cameraIndex ).toConst( 'cameraWorldMatrix' );
} else {
cameraWorldMatrix = uniform( 'mat4' ).setName( 'cameraWorldMatrix' ).setGroup( renderGroup ).onRenderUpdate( ( { camera } ) => camera.matrixWorld );
if ( _cameraWorldMatrixBase === null ) {
_cameraWorldMatrixBase = uniform( camera.matrixWorld ).setName( 'cameraWorldMatrix' ).setGroup( renderGroup ).onRenderUpdate( ( { camera } ) => camera.matrixWorld );
}
cameraWorldMatrix = _cameraWorldMatrixBase;
}

@@ -194,10 +273,24 @@

const cameraNormalMatrices = uniformArray( matrices ).setGroup( renderGroup ).setName( 'cameraNormalMatrices' );
if ( _cameraNormalMatrixArray === null ) {
cameraNormalMatrix = cameraNormalMatrices.element( camera.isMultiViewCamera ? builtin( 'gl_ViewID_OVR' ) : cameraIndex ).toConst( 'cameraNormalMatrix' );
_cameraNormalMatrixArray = uniformArray( matrices ).setGroup( renderGroup ).setName( 'cameraNormalMatrices' );
} else {
_cameraNormalMatrixArray.array = matrices;
}
cameraNormalMatrix = _cameraNormalMatrixArray.element( camera.isMultiViewCamera ? builtin( 'gl_ViewID_OVR' ) : cameraIndex ).toConst( 'cameraNormalMatrix' );
} else {
cameraNormalMatrix = uniform( 'mat3' ).setName( 'cameraNormalMatrix' ).setGroup( renderGroup ).onRenderUpdate( ( { camera } ) => camera.normalMatrix );
if ( _cameraNormalMatrixBase === null ) {
_cameraNormalMatrixBase = uniform( camera.normalMatrix ).setName( 'cameraNormalMatrix' ).setGroup( renderGroup ).onRenderUpdate( ( { camera } ) => camera.normalMatrix );
}
cameraNormalMatrix = _cameraNormalMatrixBase;
}

@@ -229,21 +322,35 @@

const cameraPositions = uniformArray( positions ).setGroup( renderGroup ).setName( 'cameraPositions' ).onRenderUpdate( ( { camera }, self ) => {
if ( _cameraPositionArray === null ) {
const subCameras = camera.cameras;
const array = self.array;
_cameraPositionArray = uniformArray( positions ).setGroup( renderGroup ).setName( 'cameraPositions' ).onRenderUpdate( ( { camera }, self ) => {
for ( let i = 0, l = subCameras.length; i < l; i ++ ) {
const subCameras = camera.cameras;
const array = self.array;
array[ i ].setFromMatrixPosition( subCameras[ i ].matrixWorld );
for ( let i = 0, l = subCameras.length; i < l; i ++ ) {
}
array[ i ].setFromMatrixPosition( subCameras[ i ].matrixWorld );
} );
}
cameraPosition = cameraPositions.element( camera.isMultiViewCamera ? builtin( 'gl_ViewID_OVR' ) : cameraIndex ).toConst( 'cameraPosition' );
} );
} else {
_cameraPositionArray.array = positions;
}
cameraPosition = _cameraPositionArray.element( camera.isMultiViewCamera ? builtin( 'gl_ViewID_OVR' ) : cameraIndex ).toConst( 'cameraPosition' );
} else {
cameraPosition = uniform( new Vector3() ).setName( 'cameraPosition' ).setGroup( renderGroup ).onRenderUpdate( ( { camera }, self ) => self.value.setFromMatrixPosition( camera.matrixWorld ) );
if ( _cameraPositionBase === null ) {
_cameraPositionBase = uniform( new Vector3() ).setName( 'cameraPosition' ).setGroup( renderGroup ).onRenderUpdate( ( { camera }, self ) => self.value.setFromMatrixPosition( camera.matrixWorld ) );
}
cameraPosition = _cameraPositionBase;
}

@@ -276,11 +383,25 @@

const cameraViewports = uniformArray( viewports, 'vec4' ).setGroup( renderGroup ).setName( 'cameraViewports' );
if ( _cameraViewportArray === null ) {
cameraViewport = cameraViewports.element( cameraIndex ).toConst( 'cameraViewport' );
_cameraViewportArray = uniformArray( viewports, 'vec4' ).setGroup( renderGroup ).setName( 'cameraViewports' );
} else {
_cameraViewportArray.array = viewports;
}
cameraViewport = _cameraViewportArray.element( cameraIndex ).toConst( 'cameraViewport' );
} else {
// Fallback for single camera
cameraViewport = vec4( 0, 0, screenSize.x, screenSize.y ).toConst( 'cameraViewport' );
if ( _cameraViewportBase === null ) {
// Fallback for single camera
_cameraViewportBase = vec4( 0, 0, screenSize.x, screenSize.y ).toConst( 'cameraViewport' );
}
cameraViewport = _cameraViewportBase;
}

@@ -287,0 +408,0 @@

@@ -5,3 +5,3 @@ import Node from '../core/Node.js';

import { normalLocal, transformNormal } from './Normal.js';
import { positionLocal } from './Position.js';
import { positionLocal, positionPrevious } from './Position.js';
import { nodeProxy, vec3, mat4 } from '../tsl/TSLBase.js';

@@ -103,2 +103,10 @@ import { NodeUpdateType } from '../core/constants.js';

/**
* The previous instance matrices. Required for computing motion vectors.
*
* @type {?Node}
* @default null
*/
this.previousInstanceMatrixNode = null;
}

@@ -141,47 +149,18 @@

const { instanceMatrix, instanceColor, isStorageMatrix, isStorageColor } = this;
let { instanceMatrixNode, instanceColorNode } = this;
const { count } = instanceMatrix;
// instance matrix
let { instanceMatrixNode, instanceColorNode } = this;
if ( instanceMatrixNode === null ) {
if ( isStorageMatrix ) {
instanceMatrixNode = this._createInstanceMatrixNode( true, builder );
instanceMatrixNode = storage( instanceMatrix, 'mat4', Math.max( count, 1 ) ).element( instanceIndex );
this.instanceMatrixNode = instanceMatrixNode;
} else {
}
// Both backends have ~64kb UBO limit; fallback to attributes above 1000 matrices.
// instance color
if ( count <= 1000 ) {
const { instanceColor, isStorageColor } = this;
instanceMatrixNode = buffer( instanceMatrix.array, 'mat4', Math.max( count, 1 ) ).element( instanceIndex );
} else {
const interleaved = new InstancedInterleavedBuffer( instanceMatrix.array, 16, 1 );
this.buffer = interleaved;
const bufferFn = instanceMatrix.usage === DynamicDrawUsage ? instancedDynamicBufferAttribute : instancedBufferAttribute;
const instanceBuffers = [
bufferFn( interleaved, 'vec4', 16, 0 ),
bufferFn( interleaved, 'vec4', 16, 4 ),
bufferFn( interleaved, 'vec4', 16, 8 ),
bufferFn( interleaved, 'vec4', 16, 12 )
];
instanceMatrixNode = mat4( ...instanceBuffers );
}
}
this.instanceMatrixNode = instanceMatrixNode;
}
if ( instanceColor && instanceColorNode === null ) {

@@ -214,2 +193,8 @@

if ( builder.needsPreviousData() ) {
positionPrevious.assign( this.getPreviousInstancedPosition( builder ) );
}
// NORMAL

@@ -242,3 +227,3 @@

*/
update( /*frame*/ ) {
update( frame ) {

@@ -252,3 +237,3 @@ if ( this.buffer !== null && this.isStorageMatrix !== true ) {

if ( this.instanceMatrix.usage !== DynamicDrawUsage && this.instanceMatrix.version !== this.buffer.version ) {
if ( this.instanceMatrix.version !== this.buffer.version ) {

@@ -266,3 +251,3 @@ this.buffer.version = this.instanceMatrix.version;

if ( this.instanceColor.usage !== DynamicDrawUsage && this.instanceColor.version !== this.bufferColor.version ) {
if ( this.instanceColor.version !== this.bufferColor.version ) {

@@ -275,4 +260,84 @@ this.bufferColor.version = this.instanceColor.version;

if ( this.previousInstanceMatrixNode !== null ) {
frame.object.previousInstanceMatrix.array.set( this.instanceMatrix.array );
}
}
/**
* Computes the transformed/instanced vertex position of the previous frame.
*
* @param {NodeBuilder} builder - The current node builder.
* @return {Node<vec3>} The instanced position from the previous frame.
*/
getPreviousInstancedPosition( builder ) {
const instancedMesh = builder.object;
if ( this.previousInstanceMatrixNode === null ) {
instancedMesh.previousInstanceMatrix = this.instanceMatrix.clone();
this.previousInstanceMatrixNode = this._createInstanceMatrixNode( false, builder );
}
return this.previousInstanceMatrixNode.mul( positionPrevious ).xyz;
}
/**
* Creates a node representing the instance matrix data.
*
* @private
* @param {boolean} assignBuffer - Whether the created interleaved buffer should be assigned to the `buffer` member or not.
* @param {NodeBuilder} builder - A reference to the current node builder.
* @return {Node} The instance matrix node.
*/
_createInstanceMatrixNode( assignBuffer, builder ) {
let instanceMatrixNode;
const { instanceMatrix } = this;
const { count } = instanceMatrix;
if ( this.isStorageMatrix ) {
instanceMatrixNode = storage( instanceMatrix, 'mat4', Math.max( count, 1 ) ).element( instanceIndex );
} else {
const uniformBufferSize = count * 16 * 4; // count * 16 components * 4 bytes (float)
if ( uniformBufferSize <= builder.getUniformBufferLimit() ) {
instanceMatrixNode = buffer( instanceMatrix.array, 'mat4', Math.max( count, 1 ) ).element( instanceIndex );
} else {
const interleaved = new InstancedInterleavedBuffer( instanceMatrix.array, 16, 1 );
if ( assignBuffer === true ) this.buffer = interleaved;
const bufferFn = instanceMatrix.usage === DynamicDrawUsage ? instancedDynamicBufferAttribute : instancedBufferAttribute;
const instanceBuffers = [
bufferFn( interleaved, 'vec4', 16, 0 ),
bufferFn( interleaved, 'vec4', 16, 4 ),
bufferFn( interleaved, 'vec4', 16, 8 ),
bufferFn( interleaved, 'vec4', 16, 12 )
];
instanceMatrixNode = mat4( ...instanceBuffers );
}
}
return instanceMatrixNode;
}
}

@@ -279,0 +344,0 @@

@@ -55,3 +55,3 @@ import { attribute } from '../core/AttributeNode.js';

if ( builder.material.flatShading === true ) {
if ( builder.isFlatShading() ) {

@@ -80,3 +80,3 @@ node = normalFlat;

if ( builder.material.flatShading !== true ) {
if ( builder.isFlatShading() !== true ) {

@@ -97,11 +97,11 @@ normal = normal.toVarying( 'v_normalWorldGeometry' );

*/
export const normalView = /*@__PURE__*/ ( Fn( ( { subBuildFn, material, context } ) => {
export const normalView = /*@__PURE__*/ ( Fn( ( builder ) => {
let node;
if ( subBuildFn === 'NORMAL' || subBuildFn === 'VERTEX' ) {
if ( builder.subBuildFn === 'NORMAL' || builder.subBuildFn === 'VERTEX' ) {
node = normalViewGeometry;
if ( material.flatShading !== true ) {
if ( builder.isFlatShading() !== true ) {

@@ -114,5 +114,5 @@ node = directionToFaceDirection( node );

// Use getUV context to avoid side effects from nodes overwriting getUV in the context (e.g. EnvironmentNode)
// Use custom context to avoid side effects from nodes overwriting getUV, getTextureLevel in the context (e.g. EnvironmentNode)
node = context.setupNormal().context( { getUV: null } );
node = builder.context.setupNormal().context( { getUV: null, getTextureLevel: null } );

@@ -149,5 +149,5 @@ }

// Use getUV context to avoid side effects from nodes overwriting getUV in the context (e.g. EnvironmentNode)
// Use custom context to avoid side effects from nodes overwriting getUV, getTextureLevel in the context (e.g. EnvironmentNode)
node = context.setupClearcoatNormal().context( { getUV: null } );
node = context.setupClearcoatNormal().context( { getUV: null, getTextureLevel: null } );

@@ -154,0 +154,0 @@ }

import { attribute } from '../core/AttributeNode.js';
import { Fn, vec3 } from '../tsl/TSLCore.js';
import { Fn, vec3, vec4 } from '../tsl/TSLCore.js';
import { modelWorldMatrix } from './ModelNode.js';
import { cameraProjectionMatrixInverse } from './Camera.js';
import { warnOnce } from '../../utils.js';
/**
* TSL object that represents the clip space position of the current rendered object.
*
* @tsl
* @type {VaryingNode<vec4>}
*/
export const clipSpace = /*@__PURE__*/ ( Fn( ( builder ) => {
if ( builder.shaderStage !== 'fragment' ) {
warnOnce( 'TSL: `clipSpace` is only available in fragment stage.' );
return vec4();
}
return builder.context.clipSpace.toVarying( 'v_clipSpace' );
} ).once() )();
/**
* TSL object that represents the position attribute of the current rendered object.

@@ -64,5 +86,15 @@ *

if ( builder.shaderStage === 'fragment' && builder.material.vertexNode ) {
// reconstruct view position from clip space
const viewPos = cameraProjectionMatrixInverse.mul( clipSpace );
return viewPos.xyz.div( viewPos.w ).toVar( 'positionView' );
}
return builder.context.setupPositionView().toVarying( 'v_positionView' );
}, 'vec3' ).once( [ 'POSITION' ] ) )();
}, 'vec3' ).once( [ 'POSITION', 'VERTEX' ] ) )();

@@ -69,0 +101,0 @@ /**

@@ -12,3 +12,2 @@ import Node from '../core/Node.js';

import { buffer } from './BufferNode.js';
import { getDataFromObject } from '../core/NodeUtils.js';
import { storage } from './StorageBufferNode.js';

@@ -151,9 +150,10 @@ import { InstancedBufferAttribute } from '../../core/InstancedBufferAttribute.js';

/**
* Transforms the given vertex normal via skinning.
* Transforms the given vertex normal and tangent via skinning.
*
* @param {Node} [boneMatrices=this.boneMatricesNode] - The bone matrices
* @param {Node<vec3>} [normal=normalLocal] - The vertex normal in local space.
* @return {Node<vec3>} The transformed vertex normal.
* @param {Node<vec3>} [tangent=tangentLocal] - The vertex tangent in local space.
* @return {{skinNormal: Node<vec3>, skinTangent:Node<vec3>}} The transformed vertex normal and tangent.
*/
getSkinnedNormal( boneMatrices = this.boneMatricesNode, normal = normalLocal ) {
getSkinnedNormalAndTangent( boneMatrices = this.boneMatricesNode, normal = normalLocal, tangent = tangentLocal ) {

@@ -167,3 +167,3 @@ const { skinIndexNode, skinWeightNode, bindMatrixNode, bindMatrixInverseNode } = this;

// NORMAL
// NORMAL and TANGENT

@@ -179,4 +179,7 @@ let skinMatrix = add(

return skinMatrix.transformDirection( normal ).xyz;
const skinNormal = skinMatrix.transformDirection( normal ).xyz;
const skinTangent = skinMatrix.transformDirection( tangent ).xyz;
return { skinNormal, skinTangent };
}

@@ -207,17 +210,2 @@

/**
* Returns `true` if bone matrices from the previous frame are required. Relevant
* when computing motion vectors with {@link VelocityNode}.
*
* @param {NodeBuilder} builder - The current node builder.
* @return {boolean} Whether bone matrices from the previous frame are required or not.
*/
needsPreviousBoneMatrices( builder ) {
const mrt = builder.renderer.getMRT();
return ( mrt && mrt.has( 'velocity' ) ) || getDataFromObject( builder.object ).useVelocity === true;
}
/**
* Setups the skinning node by assigning the transformed vertex data to predefined node variables.

@@ -230,3 +218,3 @@ *

if ( this.needsPreviousBoneMatrices( builder ) ) {
if ( builder.needsPreviousData() ) {

@@ -245,3 +233,3 @@ positionPrevious.assign( this.getPreviousSkinnedPosition( builder ) );

const skinNormal = this.getSkinnedNormal();
const { skinNormal, skinTangent } = this.getSkinnedNormalAndTangent();

@@ -252,3 +240,3 @@ normalLocal.assign( skinNormal );

tangentLocal.assign( skinNormal );
tangentLocal.assign( skinTangent );

@@ -255,0 +243,0 @@ }

@@ -7,3 +7,2 @@ import BufferNode from './BufferNode.js';

import { getTypeFromLength } from '../core/NodeUtils.js';
import { warn } from '../../utils.js';

@@ -402,19 +401,1 @@ /**

export const storage = ( value, type = null, count = 0 ) => new StorageBufferNode( value, type, count );
/**
* @tsl
* @function
* @deprecated since r171. Use `storage().setPBO( true )` instead.
*
* @param {StorageBufferAttribute|StorageInstancedBufferAttribute|BufferAttribute} value - The buffer data.
* @param {?string} type - The buffer type (e.g. `'vec3'`).
* @param {number} count - The buffer count.
* @returns {StorageBufferNode}
*/
export const storageObject = ( value, type, count ) => { // @deprecated, r171
warn( 'TSL: "storageObject()" is deprecated. Use "storage().setPBO( true )" instead.' );
return storage( value, type, count ).setPBO( true );
};

@@ -225,2 +225,3 @@ import TextureNode from './TextureNode.js';

newNode.mipLevel = this.mipLevel;
newNode.access = this.access;
return newNode;

@@ -259,4 +260,17 @@

const node = storageTexture( value, uvNode, storeNode );
let node;
if ( value.isStorageTextureNode === true ) {
// Derive new storage texture node from existing one
node = value.clone();
node.uvNode = uvNode;
node.storeNode = storeNode;
} else {
node = storageTexture( value, uvNode, storeNode );
}
if ( storeNode !== null ) node.toStack();

@@ -263,0 +277,0 @@

@@ -30,7 +30,7 @@ import { attribute } from '../core/AttributeNode.js';

*/
export const tangentView = /*@__PURE__*/ ( Fn( ( { subBuildFn, geometry, material } ) => {
export const tangentView = /*@__PURE__*/ ( Fn( ( builder ) => {
let node;
if ( subBuildFn === 'VERTEX' || geometry.hasAttribute( 'tangent' ) ) {
if ( builder.subBuildFn === 'VERTEX' || builder.geometry.hasAttribute( 'tangent' ) ) {

@@ -45,3 +45,3 @@ node = modelViewMatrix.mul( vec4( tangentLocal, 0 ) ).xyz.toVarying( 'v_tangentView' ).normalize();

if ( material.flatShading !== true ) {
if ( builder.isFlatShading() !== true ) {

@@ -48,0 +48,0 @@ node = directionToFaceDirection( node );

import TextureNode from './TextureNode.js';
import { nodeProxy, vec3, Fn, If, int } from '../tsl/TSLBase.js';
import { textureSize } from './TextureSizeNode.js';
import { nodeProxy, vec3, Fn, If } from '../tsl/TSLBase.js';

@@ -119,31 +118,2 @@ const normal = Fn( ( { texture, uv } ) => {

/**
* Overwrites the default implementation to return the unmodified uv node.
*
* @param {NodeBuilder} builder - The current node builder.
* @param {Node} uvNode - The uv node to setup.
* @return {Node} The unmodified uv node.
*/
setupUV( builder, uvNode ) {
const texture = this.value;
if ( builder.isFlipY() && ( texture.isRenderTargetTexture === true || texture.isFramebufferTexture === true ) ) {
if ( this.sampler ) {
uvNode = uvNode.flipY();
} else {
uvNode = uvNode.setY( int( textureSize( this, this.levelNode ).y ).sub( uvNode.y ).sub( 1 ) );
}
}
return uvNode;
}
/**
* Generates the uv code snippet.

@@ -175,6 +145,8 @@ *

/**
* TODO.
* Computes the normal for the given uv. These texture coordiantes represent a
* position inside the 3D texture. Unlike geometric normals, this normal
* represents a slope or gradient of scalar data inside the 3D texture.
*
* @param {Node<vec3>} uvNode - The uv node .
* @return {Node<vec3>} TODO.
* @param {Node<vec3>} uvNode - The uv node that defines a position in the 3D texture.
* @return {Node<vec3>} The normal representing the slope/gradient in the data.
*/

@@ -181,0 +153,0 @@ normal( uvNode ) {

@@ -8,9 +8,12 @@ import UniformNode, { uniform } from '../core/UniformNode.js';

import { nodeProxy, vec3, nodeObject, int, Fn } from '../tsl/TSLBase.js';
import { step } from '../math/MathNode.js';
import { NodeUpdateType } from '../core/constants.js';
import { IntType, NearestFilter, UnsignedIntType } from '../../constants.js';
import { Compatibility, IntType, LessCompare, NearestFilter, UnsignedIntType } from '../../constants.js';
import { Texture } from '../../textures/Texture.js';
import { warn } from '../../utils.js';
import { warn, warnOnce } from '../../utils.js';
import NodeError from '../core/NodeError.js';
const EmptyTexture = /*@__PURE__*/ new Texture();

@@ -347,3 +350,3 @@

throw new Error( 'THREE.TSL: `texture( value )` function expects a valid instance of THREE.Texture().' );
throw new NodeError( 'THREE.TSL: `texture( value )` function expects a valid instance of THREE.Texture().', this.stackTrace );

@@ -396,6 +399,34 @@ }

let compareNode = null;
let compareStepNode = null;
if ( this.compareNode !== null ) {
if ( builder.renderer.hasCompatibility( Compatibility.TEXTURE_COMPARE ) ) {
compareNode = this.compareNode;
} else {
if ( this.value.compareFunction === null || this.value.compareFunction === LessCompare ) {
compareStepNode = this.compareNode;
} else {
compareNode = this.compareNode;
warnOnce( 'TSL: Only "LessCompare" is supported for depth texture comparison fallback.' );
}
}
}
properties.uvNode = uvNode;
properties.levelNode = levelNode;
properties.biasNode = this.biasNode;
properties.compareNode = this.compareNode;
properties.compareNode = compareNode;
properties.compareStepNode = compareStepNode;
properties.gradNode = this.gradNode;

@@ -509,2 +540,4 @@ properties.depthNode = this.depthNode;

const nodeType = this.getNodeType( builder );
let propertyName = nodeData.propertyName;

@@ -514,3 +547,3 @@

const { uvNode, levelNode, biasNode, compareNode, depthNode, gradNode, offsetNode } = properties;
const { uvNode, levelNode, biasNode, compareNode, compareStepNode, depthNode, gradNode, offsetNode } = properties;

@@ -522,2 +555,3 @@ const uvSnippet = this.generateUV( builder, uvNode );

const compareSnippet = compareNode ? compareNode.build( builder, 'float' ) : null;
const compareStepSnippet = compareStepNode ? compareStepNode.build( builder, 'float' ) : null;
const gradSnippet = gradNode ? [ gradNode[ 0 ].build( builder, 'vec2' ), gradNode[ 1 ].build( builder, 'vec2' ) ] : null;

@@ -530,4 +564,10 @@ const offsetSnippet = offsetNode ? this.generateOffset( builder, offsetNode ) : null;

const snippet = this.generateSnippet( builder, textureProperty, uvSnippet, levelSnippet, biasSnippet, depthSnippet, compareSnippet, gradSnippet, offsetSnippet );
let snippet = this.generateSnippet( builder, textureProperty, uvSnippet, levelSnippet, biasSnippet, depthSnippet, compareSnippet, gradSnippet, offsetSnippet );
if ( compareStepSnippet !== null ) {
snippet = step( expression( compareStepSnippet, 'float' ), expression( snippet, nodeType ) ).build( builder, nodeType );
}
builder.addLineFlowCode( `${propertyName} = ${snippet}`, this );

@@ -541,3 +581,2 @@

let snippet = propertyName;
const nodeType = this.getNodeType( builder );

@@ -584,17 +623,2 @@ if ( builder.needsToWorkingColorSpace( texture ) ) {

/**
* @function
* @deprecated since r172. Use {@link TextureNode#sample} instead.
*
* @param {Node} uvNode - The uv node.
* @return {TextureNode} A texture node representing the texture sample.
*/
uv( uvNode ) { // @deprecated, r172
warn( 'TextureNode: .uv() has been renamed. Use .sample() instead.' );
return this.sample( uvNode );
}
/**
* Samples the texture with the given uv node.

@@ -601,0 +625,0 @@ *

@@ -318,2 +318,4 @@ import { nodeObject } from '../tsl/TSLBase.js';

this.update(); // initialize the buffer values
return super.setup( builder );

@@ -320,0 +322,0 @@

import OutputStructNode from './OutputStructNode.js';
import { nodeProxy, vec4 } from '../tsl/TSLBase.js';
import { MaterialBlending, NoBlending } from '../../constants.js';
import BlendMode from '../../renderers/common/BlendMode.js';
// Predefined blend modes for MRT nodes.
const _noBlending = /**@__PURE__*/ new BlendMode( NoBlending );
const _materialBlending = /**@__PURE__*/ new BlendMode( MaterialBlending );
/**

@@ -34,3 +40,3 @@ * Returns the MRT texture index for the given name.

* normal: normalView
* } ) );
* } ) ;
* ```

@@ -68,2 +74,11 @@ * The MRT output is defined as a dictionary.

/**
* A dictionary storing the blend modes for each output.
*
* @type {Object<string, BlendMode>}
*/
this.blendModes = {
output: _materialBlending
};
/**
* This flag can be used for type testing.

@@ -80,2 +95,29 @@ *

/**
* Sets the blend mode for the given output name.
*
* @param {string} name - The name of the output.
* @param {BlendMode} blend - The blending mode.
* @return {MRTNode} The current MRT node.
*/
setBlendMode( name, blend ) {
this.blendModes[ name ] = blend;
return this;
}
/**
* Returns the blend mode for the given output name.
*
* @param {string} name - The name of the output.
* @return {BlendMode} The blend mode.
*/
getBlendMode( name ) {
return this.blendModes[ name ] || _noBlending;
}
/**
* Returns `true` if the MRT node has an output with the given name.

@@ -113,5 +155,9 @@ *

const outputs = { ...this.outputNodes, ...mrtNode.outputNodes };
const blendings = { ...this.blendModes, ...mrtNode.blendModes };
return mrt( outputs );
const mrtTarget = mrt( outputs );
mrtTarget.blendings = blendings;
return mrtTarget;
}

@@ -118,0 +164,0 @@

@@ -8,2 +8,4 @@ import { NodeUpdateType } from './constants.js';

import StackTrace from './StackTrace.js';
const _parentBuildStage = {

@@ -136,3 +138,3 @@ analyze: 'setup',

/**
* The cache key 's version.
* The cache key's version.
*

@@ -147,2 +149,16 @@ * @private

/**
* The stack trace of the node for debugging purposes.
*
* @type {?string}
* @default null
*/
this.stackTrace = null;
if ( Node.captureStackTrace === true ) {
this.stackTrace = new StackTrace();
}
}

@@ -776,3 +792,2 @@

builder.addNode( this );
builder.addChain( this );

@@ -791,2 +806,4 @@

builder.addNode( this );
this.updateReference( builder );

@@ -828,2 +845,4 @@

builder.addSequentialNode( this );
}

@@ -898,3 +917,2 @@

builder.removeChain( this );
builder.addSequentialNode( this );

@@ -1090,2 +1108,10 @@ return result;

/**
* Enables or disables the automatic capturing of stack traces for nodes.
*
* @type {boolean}
* @default false
*/
Node.captureStackTrace = false;
export default Node;

@@ -8,3 +8,5 @@ import { Color } from '../../math/Color.js';

import { Vector4 } from '../../math/Vector4.js';
import { error } from '../../utils.js';
import StackTrace from '../core/StackTrace.js';

@@ -158,3 +160,3 @@ // cyrb53 (c) 2018 bryc (github.com/bryc). License: Public domain. Attribution appreciated.

error( 'TSL: Unsupported type:', type );
error( `TSL: Unsupported type: ${ type }`, new StackTrace() );

@@ -181,3 +183,3 @@ }

error( 'TSL: Unsupported type:', type );
error( `TSL: Unsupported type: ${ type }`, new StackTrace() );

@@ -204,3 +206,3 @@ }

error( 'TSL: Unsupported type:', type );
error( `TSL: Unsupported type: ${ type }`, new StackTrace() );

@@ -207,0 +209,0 @@ }

@@ -45,8 +45,14 @@ import Node from './Node.js';

getNodeType( builder ) {
getNodeType( /*builder*/ ) {
const properties = builder.getNodeProperties( this );
return 'OutputType';
if ( properties.membersLayout === undefined ) {
}
generate( builder ) {
const nodeData = builder.getDataFromNode( this );
if ( nodeData.membersLayout === undefined ) {
const members = this.members;

@@ -64,13 +70,9 @@ const membersLayout = [];

properties.membersLayout = membersLayout;
properties.structType = builder.getOutputStructTypeFromNode( this, properties.membersLayout );
nodeData.membersLayout = membersLayout;
nodeData.structType = builder.getOutputStructTypeFromNode( this, nodeData.membersLayout );
}
return properties.structType.name;
//
}
generate( builder ) {
const propertyName = builder.getOutputStructName();

@@ -77,0 +79,0 @@ const members = this.members;

import { error } from '../../utils.js';
import StackTrace from '../core/StackTrace.js';
import PropertyNode from './PropertyNode.js';

@@ -58,3 +59,3 @@

error( `TSL: Member "${ name }" not found in struct "${ type }".` );
error( `TSL: Member "${ name }" not found in struct "${ type }".`, new StackTrace() );

@@ -61,0 +62,0 @@ memberType = 'float';

import Node from './Node.js';
import StackTrace from '../core/StackTrace.js';
import { select } from '../math/ConditionalNode.js';

@@ -93,3 +94,3 @@ import { ShaderNode, nodeProxy, getCurrentStack, setCurrentStack, nodeObject } from '../tsl/TSLBase.js';

return this.hasOutput ? this.outputNode.getElementType( builder ) : 'void';
return this.hasOutput( builder ) ? this.outputNode.getElementType( builder ) : 'void';

@@ -100,3 +101,3 @@ }

return this.hasOutput ? this.outputNode.getNodeType( builder ) : 'void';
return this.hasOutput( builder ) ? this.outputNode.getNodeType( builder ) : 'void';

@@ -107,3 +108,3 @@ }

return this.hasOutput ? this.outputNode.getMemberType( builder, name ) : 'void';
return this.hasOutput( builder ) ? this.outputNode.getMemberType( builder, name ) : 'void';

@@ -123,3 +124,3 @@ }

error( 'TSL: Invalid node added to stack.' );
error( 'TSL: Invalid node added to stack.', new StackTrace() );
return this;

@@ -236,3 +237,3 @@

error( 'TSL: Invalid parameter length. Case() requires at least two parameters.' );
error( 'TSL: Invalid parameter length. Case() requires at least two parameters.', new StackTrace() );

@@ -319,5 +320,5 @@ }

get hasOutput() {
hasOutput( builder ) {
return this.outputNode && this.outputNode.isNode;
return this.outputNode && this.outputNode.isNode && this.outputNode.getNodeType( builder ) !== 'void';

@@ -401,3 +402,3 @@ }

if ( this.hasOutput ) {
if ( this.hasOutput( builder ) ) {

@@ -404,0 +405,0 @@ result = this.outputNode.build( builder, ...params );

@@ -55,2 +55,17 @@ import Node from './Node.js';

_getChildren() {
// Ensure struct type is the last child for correct code generation order
const children = super._getChildren();
const structTypeProperty = children.find( child => child.childNode === this.structTypeNode );
children.splice( children.indexOf( structTypeProperty ), 1 );
children.push( structTypeProperty );
return children;
}
generate( builder ) {

@@ -57,0 +72,0 @@

@@ -89,2 +89,2 @@ import Node from './Node.js';

*/
export const subBuild = ( node, name, type = null ) => nodeObject( new SubBuildNode( nodeObject( node ), name, type ) );
export const subBuild = ( node, name, type = null ) => new SubBuildNode( nodeObject( node ), name, type );
import InputNode from './InputNode.js';
import StackTrace from '../core/StackTrace.js';
import { objectGroup } from './UniformGroupNode.js';

@@ -81,3 +82,3 @@ import { getConstNodeType } from '../tsl/TSLCore.js';

warn( 'TSL: "label()" has been deprecated. Use "setName()" instead.' ); // @deprecated r179
warn( 'TSL: "label()" has been deprecated. Use "setName()" instead.', new StackTrace() ); // @deprecated r179

@@ -84,0 +85,0 @@ return this.setName( name );

@@ -257,3 +257,3 @@ import Node from './Node.js';

error( 'TSL: ".toVar()" can not be used with void type.' );
error( 'TSL: ".toVar()" can not be used with void type.', this.stackTrace );

@@ -260,0 +260,0 @@ }

@@ -5,3 +5,2 @@ import Node from './Node.js';

import { subBuild } from './SubBuildNode.js';
import { warn } from '../../utils.js';

@@ -41,3 +40,3 @@ /**

*/
this.node = node;
this.node = subBuild( node, 'VERTEX' );

@@ -214,17 +213,1 @@ /**

addMethodChaining( 'toVertexStage', vertexStage );
// Deprecated
addMethodChaining( 'varying', ( ...params ) => { // @deprecated, r173
warn( 'TSL: .varying() has been renamed to .toVarying().' );
return varying( ...params );
} );
addMethodChaining( 'vertexStage', ( ...params ) => { // @deprecated, r173
warn( 'TSL: .vertexStage() has been renamed to .toVertexStage().' );
return varying( ...params );
} );
import { Fn, If, vec4 } from '../tsl/TSLBase.js';
import { mix, min, step } from '../math/MathNode.js';
import { warn } from '../../utils.js';

@@ -173,64 +172,1 @@ /**

}, { color: 'vec4', return: 'vec4' } );
// Deprecated
/**
* @tsl
* @function
* @deprecated since r171. Use {@link blendBurn} instead.
*
* @param {...any} params
* @returns {Function}
*/
export const burn = ( ...params ) => { // @deprecated, r171
warn( 'TSL: "burn" has been renamed. Use "blendBurn" instead.' );
return blendBurn( params );
};
/**
* @tsl
* @function
* @deprecated since r171. Use {@link blendDodge} instead.
*
* @param {...any} params
* @returns {Function}
*/
export const dodge = ( ...params ) => { // @deprecated, r171
warn( 'TSL: "dodge" has been renamed. Use "blendDodge" instead.' );
return blendDodge( params );
};
/**
* @tsl
* @function
* @deprecated since r171. Use {@link blendScreen} instead.
*
* @param {...any} params
* @returns {Function}
*/
export const screen = ( ...params ) => { // @deprecated, r171
warn( 'TSL: "screen" has been renamed. Use "blendScreen" instead.' );
return blendScreen( params );
};
/**
* @tsl
* @function
* @deprecated since r171. Use {@link blendOverlay} instead.
*
* @param {...any} params
* @returns {Function}
*/
export const overlay = ( ...params ) => { // @deprecated, r171
warn( 'TSL: "overlay" has been renamed. Use "blendOverlay" instead.' );
return blendOverlay( params );
};

@@ -142,1 +142,18 @@ import { dot, max, mix } from '../math/MathNode.js';

} );
/**
* TSL function for creating a posterize effect which reduces the number of colors
* in an image, resulting in a more blocky and stylized appearance.
*
* @tsl
* @function
* @param {Node} sourceNode - The input color.
* @param {Node} stepsNode - Controls the intensity of the posterization effect. A lower number results in a more blocky appearance.
* @returns {Node} The posterized color.
*/
export const posterize = Fn( ( [ source, steps ] ) => {
return source.mul( steps ).floor().div( steps );
} );

@@ -138,3 +138,3 @@ import TempNode from '../core/TempNode.js';

*/
export const workingToColorSpace = ( node, targetColorSpace ) => nodeObject( new ColorSpaceNode( nodeObject( node ), WORKING_COLOR_SPACE, targetColorSpace ) );
export const workingToColorSpace = ( node, targetColorSpace ) => new ColorSpaceNode( nodeObject( node ), WORKING_COLOR_SPACE, targetColorSpace );

@@ -150,3 +150,3 @@ /**

*/
export const colorSpaceToWorking = ( node, sourceColorSpace ) => nodeObject( new ColorSpaceNode( nodeObject( node ), sourceColorSpace, WORKING_COLOR_SPACE ) );
export const colorSpaceToWorking = ( node, sourceColorSpace ) => new ColorSpaceNode( nodeObject( node ), sourceColorSpace, WORKING_COLOR_SPACE );

@@ -163,5 +163,5 @@ /**

*/
export const convertColorSpace = ( node, sourceColorSpace, targetColorSpace ) => nodeObject( new ColorSpaceNode( nodeObject( node ), sourceColorSpace, targetColorSpace ) );
export const convertColorSpace = ( node, sourceColorSpace, targetColorSpace ) => new ColorSpaceNode( nodeObject( node ), sourceColorSpace, targetColorSpace );
addMethodChaining( 'workingToColorSpace', workingToColorSpace );
addMethodChaining( 'colorSpaceToWorking', colorSpaceToWorking );

@@ -72,3 +72,3 @@ import TempNode from '../core/TempNode.js';

setup( { material } ) {
setup( builder ) {

@@ -109,3 +109,3 @@ const { normalMapType, scaleNode, unpackNormalMode } = this;

if ( material.flatShading === true ) {
if ( builder.isFlatShading() === true ) {

@@ -112,0 +112,0 @@ scale = directionToFaceDirection( scale );

@@ -47,2 +47,11 @@ import TempNode from '../core/TempNode.js';

/**
* This flag can be used for type testing.
*
* @type {boolean}
* @default true
* @readonly
*/
this.isPassTextureNode = true;
this.setUpdateMatrix( false );

@@ -54,3 +63,4 @@

this.passNode.build( builder );
const properties = builder.getNodeProperties( this );
properties.passNode = this.passNode;

@@ -112,2 +122,11 @@ return super.setup( builder );

/**
* This flag can be used for type testing.
*
* @type {boolean}
* @default true
* @readonly
*/
this.isPassMultipleTextureNode = true;
}

@@ -156,3 +175,3 @@

* ```js
* const postProcessing = new PostProcessing( renderer );
* const postProcessing = new RenderPipeline( renderer );
*

@@ -159,0 +178,0 @@ * const scenePass = pass( scene, camera );

@@ -16,6 +16,6 @@ import TempNode from '../core/TempNode.js';

* When applying tone mapping and color space conversion manually with this node,
* you have to set {@link PostProcessing#outputColorTransform} to `false`.
* you have to set {@link RenderPipeline#outputColorTransform} to `false`.
*
* ```js
* const postProcessing = new PostProcessing( renderer );
* const postProcessing = new RenderPipeline( renderer );
* postProcessing.outputColorTransform = false;

@@ -149,4 +149,4 @@ *

*/
export const renderOutput = ( color, toneMapping = null, outputColorSpace = null ) => nodeObject( new RenderOutputNode( nodeObject( color ), toneMapping, outputColorSpace ) );
export const renderOutput = ( color, toneMapping = null, outputColorSpace = null ) => new RenderOutputNode( nodeObject( color ), toneMapping, outputColorSpace );
addMethodChaining( 'renderOutput', renderOutput );
import Node from '../core/Node.js';
import StackTrace from '../core/StackTrace.js';
import { NodeUpdateType } from '../core/constants.js';

@@ -287,3 +288,3 @@ import { uniform } from '../core/UniformNode.js';

warn( 'TSL: "viewportResolution" is deprecated. Use "screenSize" instead.' );
warn( 'TSL: "viewportResolution" is deprecated. Use "screenSize" instead.', new StackTrace() );

@@ -290,0 +291,0 @@ return screenSize;

@@ -137,3 +137,3 @@ import TempNode from '../core/TempNode.js';

*/
export const toneMapping = ( mapping, exposure, color ) => nodeObject( new ToneMappingNode( mapping, nodeObject( exposure ), nodeObject( color ) ) );
export const toneMapping = ( mapping, exposure, color ) => new ToneMappingNode( mapping, nodeObject( exposure ), nodeObject( color ) );

@@ -140,0 +140,0 @@ /**

@@ -17,3 +17,3 @@ import { float, nodeObject, normalize, vec4 } from '../tsl/TSLBase.js';

* ```js
* const postProcessing = new PostProcessing( renderer );
* const postProcessing = new RenderPipeline( renderer );
*

@@ -192,2 +192,2 @@ * const scenePass = toonOutlinePass( scene, camera );

*/
export const toonOutlinePass = ( scene, camera, color = new Color( 0, 0, 0 ), thickness = 0.003, alpha = 1 ) => nodeObject( new ToonOutlinePassNode( scene, camera, nodeObject( color ), nodeObject( thickness ), nodeObject( alpha ) ) );
export const toonOutlinePass = ( scene, camera, color = new Color( 0, 0, 0 ), thickness = 0.003, alpha = 1 ) => new ToonOutlinePassNode( scene, camera, nodeObject( color ), nodeObject( thickness ), nodeObject( alpha ) );
import Node from '../core/Node.js';
import { float, log, log2, nodeImmutable, nodeProxy } from '../tsl/TSLBase.js';
import { float, Fn, log, log2, nodeImmutable, nodeProxy } from '../tsl/TSLBase.js';
import { cameraNear, cameraFar } from '../accessors/Camera.js';

@@ -89,3 +89,3 @@ import { positionView } from '../accessors/Position.js';

node = depthBase().assign( value );
node = depthBase().assign( value );

@@ -157,2 +157,14 @@ }

/**
* TSL function for converting a viewZ value to a reversed orthographic depth value.
*
* @tsl
* @function
* @param {Node<float>} viewZ - The viewZ node.
* @param {Node<float>} near - The camera's near value.
* @param {Node<float>} far - The camera's far value.
* @returns {Node<float>}
*/
export const viewZToReversedOrthographicDepth = ( viewZ, near, far ) => viewZ.add( far ).div( far.sub( near ) );
/**
* TSL function for converting an orthographic depth value to a viewZ value.

@@ -167,4 +179,16 @@ *

*/
export const orthographicDepthToViewZ = ( depth, near, far ) => near.sub( far ).mul( depth ).sub( near );
export const orthographicDepthToViewZ = /*@__PURE__*/ Fn( ( [ depth, near, far ], builder ) => {
if ( builder.renderer.reversedDepthBuffer === true ) {
return far.sub( near ).mul( depth ).sub( far );
} else {
return near.sub( far ).mul( depth ).sub( near );
}
} );
/**

@@ -185,2 +209,14 @@ * TSL function for converting a viewZ value to a perspective depth value.

/**
* TSL function for converting a viewZ value to a reversed perspective depth value.
*
* @tsl
* @function
* @param {Node<float>} viewZ - The viewZ node.
* @param {Node<float>} near - The camera's near value.
* @param {Node<float>} far - The camera's far value.
* @returns {Node<float>}
*/
export const viewZToReversedPerspectiveDepth = ( viewZ, near, far ) => near.mul( viewZ.add( far ) ).div( viewZ.mul( near.sub( far ) ) );
/**
* TSL function for converting a perspective depth value to a viewZ value.

@@ -195,4 +231,16 @@ *

*/
export const perspectiveDepthToViewZ = ( depth, near, far ) => near.mul( far ).div( far.sub( near ).mul( depth ).sub( far ) );
export const perspectiveDepthToViewZ = /*@__PURE__*/ Fn( ( [ depth, near, far ], builder ) => {
if ( builder.renderer.reversedDepthBuffer === true ) {
return near.mul( far ).div( near.sub( far ).mul( depth ).sub( near ) );
} else {
return near.mul( far ).div( far.sub( near ).mul( depth ).sub( far ) );
}
} );
/**

@@ -199,0 +247,0 @@ * TSL function for converting a viewZ value to a logarithmic depth value.

@@ -83,9 +83,9 @@ import TextureNode from '../accessors/TextureNode.js';

/**
* The `updateBeforeType` is set to `NodeUpdateType.FRAME` since the node renders the
* scene once per frame in its {@link ViewportTextureNode#updateBefore} method.
* The `updateBeforeType` is set to `NodeUpdateType.RENDER` since the node should extract
* the current contents of the bound framebuffer for each render call.
*
* @type {string}
* @default 'frame'
* @default 'render'
*/
this.updateBeforeType = NodeUpdateType.FRAME;
this.updateBeforeType = NodeUpdateType.RENDER;

@@ -230,1 +230,18 @@ /**

export const viewportMipTexture = /*@__PURE__*/ nodeProxy( ViewportTextureNode, null, null, { generateMipmaps: true } ).setParameterLength( 0, 3 );
// Singleton instances for common usage
const _singletonOpaqueViewportTextureNode = /*@__PURE__*/ viewportMipTexture();
/**
* TSL function for creating a viewport texture node with enabled mipmap generation.
* The texture should only contain the opaque rendering objects.
*
* This should be used just in transparent or transmissive materials.
*
* @tsl
* @function
* @param {?Node} [uv=screenUV] - The uv node.
* @param {?Node} [level=null] - The level node.
* @returns {ViewportTextureNode}
*/
export const viewportOpaqueMipTexture = ( uv = screenUV, level = null ) => _singletonOpaqueViewportTextureNode.sample( uv, level ); // TODO: Use once() when sample() supports it

@@ -1,5 +0,4 @@

import { positionView } from '../accessors/Position.js';
import { positionView, positionWorld } from '../accessors/Position.js';
import { smoothstep } from '../math/MathNode.js';
import { Fn, output, vec4 } from '../tsl/TSLBase.js';
import { warn } from '../../utils.js';

@@ -67,49 +66,33 @@ /**

/**
* This class can be used to configure a fog for the scene.
* Nodes of this type are assigned to `Scene.fogNode`.
* Constructs a new height fog factor node. This fog factor requires a Y-up coordinate system.
*
* @tsl
* @function
* @param {Node} color - Defines the color of the fog.
* @param {Node} factor - Defines how the fog is factored in the scene.
* @param {Node} density - Defines the fog density.
* @param {Node} height - The height threshold in world space. Everything below this y-coordinate is affected by fog.
*/
export const fog = Fn( ( [ color, factor ] ) => {
export const exponentialHeightFogFactor = Fn( ( [ density, height ], builder ) => {
return vec4( factor.toFloat().mix( output.rgb, color.toVec3() ), output.a );
const viewZ = getViewZNode( builder );
const distance = height.sub( positionWorld.y ).max( 0 ).toConst();
const m = distance.mul( viewZ ).toConst();
return density.mul( density, m, m ).negate().exp().oneMinus();
} );
// Deprecated
/**
* @tsl
* @function
* @deprecated since r171. Use `fog( color, rangeFogFactor( near, far ) )` instead.
* This class can be used to configure a fog for the scene.
* Nodes of this type are assigned to `Scene.fogNode`.
*
* @param {Node} color
* @param {Node} near
* @param {Node} far
* @returns {Function}
*/
export function rangeFog( color, near, far ) { // @deprecated, r171
warn( 'TSL: "rangeFog( color, near, far )" is deprecated. Use "fog( color, rangeFogFactor( near, far ) )" instead.' );
return fog( color, rangeFogFactor( near, far ) );
}
/**
* @tsl
* @function
* @deprecated since r171. Use `fog( color, densityFogFactor( density ) )` instead.
*
* @param {Node} color
* @param {Node} density
* @returns {Function}
* @param {Node} color - Defines the color of the fog.
* @param {Node} factor - Defines how the fog is factored in the scene.
*/
export function densityFog( color, density ) { // @deprecated, r171
export const fog = Fn( ( [ color, factor ] ) => {
warn( 'TSL: "densityFog( color, density )" is deprecated. Use "fog( color, densityFogFactor( density ) )" instead.' );
return fog( color, densityFogFactor( density ) );
return vec4( factor.toFloat().mix( output.rgb, color.toVec3() ), output.a );
}
} );

@@ -20,3 +20,3 @@ import BRDF_Lambert from './BSDF/BRDF_Lambert.js';

import { screenSize } from '../display/ScreenNode.js';
import { viewportMipTexture } from '../display/ViewportTextureNode.js';
import { viewportMipTexture, viewportOpaqueMipTexture } from '../display/ViewportTextureNode.js';
import { textureBicubicLevel } from '../accessors/TextureBicubic.js';

@@ -73,3 +73,3 @@ import { Loop } from '../utils/LoopNode.js';

const viewportBackSideTexture = /*@__PURE__*/ viewportMipTexture();
const viewportFrontSideTexture = /*@__PURE__*/ viewportMipTexture();
const viewportFrontSideTexture = /*@__PURE__*/ viewportOpaqueMipTexture();

@@ -663,3 +663,3 @@ const getTransmissionSample = /*@__PURE__*/ Fn( ( [ fragCoord, roughness, ior ], { material } ) => {

// http://blog.selfshadow.com/publications/s2016-advances/s2016_ltc_fresnel.pdf
const fresnel = specularColorBlended.mul( t2.x ).add( specularColorBlended.oneMinus().mul( t2.y ) ).toVar();
const fresnel = specularColorBlended.mul( t2.x ).add( specularF90.sub( specularColorBlended ).mul( t2.y ) ).toVar();

@@ -670,2 +670,24 @@ reflectedLight.directSpecular.addAssign( lightColor.mul( fresnel ).mul( LTC_Evaluate( { N, V, P, mInv, p0, p1, p2, p3 } ) ) );

if ( this.clearcoat === true ) {
const Ncc = clearcoatNormalView;
const uvClearcoat = LTC_Uv( { N: Ncc, V, roughness: clearcoatRoughness } );
const t1Clearcoat = ltc_1.sample( uvClearcoat );
const t2Clearcoat = ltc_2.sample( uvClearcoat );
const mInvClearcoat = mat3(
vec3( t1Clearcoat.x, 0, t1Clearcoat.y ),
vec3( 0, 1, 0 ),
vec3( t1Clearcoat.z, 0, t1Clearcoat.w )
);
// LTC Fresnel Approximation for clearcoat
const fresnelClearcoat = clearcoatF0.mul( t2Clearcoat.x ).add( clearcoatF90.sub( clearcoatF0 ).mul( t2Clearcoat.y ) );
this.clearcoatSpecularDirect.addAssign( lightColor.mul( fresnelClearcoat ).mul( LTC_Evaluate( { N: Ncc, V, P, mInv: mInvClearcoat, p0, p1, p2, p3 } ) ) );
}
}

@@ -672,0 +694,0 @@

import Node from '../core/Node.js';
import NodeError from '../core/NodeError.js';
import { getValueType } from '../core/NodeUtils.js';

@@ -114,3 +115,3 @@ import { buffer } from '../accessors/BufferNode.js';

throw new Error( 'THREE.TSL: No "ConstNode" found in node graph.' );
throw new NodeError( 'THREE.TSL: No "ConstNode" found in node graph.', this.stackTrace );

@@ -171,4 +172,5 @@ }

const nodeType = this.getNodeType( builder );
const uniformBufferSize = object.count * 4 * 4; // count * 4 components * 4 bytes (float)
if ( object.count <= 4096 ) {
if ( uniformBufferSize <= builder.getUniformBufferLimit() ) {

@@ -175,0 +177,0 @@ output = buffer( array, 'vec4', object.count ).element( instanceIndex ).convert( nodeType );

import Node from '../core/Node.js';
import StackTrace from '../core/StackTrace.js';
import { NodeUpdateType } from '../core/constants.js';

@@ -151,3 +152,3 @@ import { addMethodChaining, nodeObject } from '../tsl/TSLCore.js';

warn( 'TSL: "label()" has been deprecated. Use "setName()" instead.' ); // @deprecated r179
warn( 'TSL: "label()" has been deprecated. Use "setName()" instead.', new StackTrace() ); // @deprecated r179

@@ -246,3 +247,3 @@ return this.setName( name );

error( 'TSL: compute() workgroupSize must have 1, 2, or 3 elements' );
error( 'TSL: compute() workgroupSize must have 1, 2, or 3 elements', new StackTrace() );

@@ -257,3 +258,3 @@ }

error( `TSL: compute() workgroupSize element at index [ ${ i } ] must be a positive integer` );
error( `TSL: compute() workgroupSize element at index [ ${ i } ] must be a positive integer`, new StackTrace() );

@@ -270,3 +271,3 @@ }

return nodeObject( new ComputeNode( nodeObject( node ), workgroupSize ) );
return new ComputeNode( nodeObject( node ), workgroupSize );

@@ -273,0 +274,0 @@ };

import ArrayElementNode from '../utils/ArrayElementNode.js';
import Node from '../core/Node.js';
import { warn } from '../../utils.js';
import StackTrace from '../core/StackTrace.js';

@@ -152,3 +153,3 @@ /**

warn( 'TSL: "label()" has been deprecated. Use "setName()" instead.' ); // @deprecated r179
warn( 'TSL: "label()" has been deprecated. Use "setName()" instead.', new StackTrace() ); // @deprecated r179

@@ -155,0 +156,0 @@ return this.setName( name );

@@ -12,3 +12,3 @@ import LightingNode from './LightingNode.js';

const _envNodeCache = new WeakMap();
const _rendererCache = new WeakMap();

@@ -59,4 +59,6 @@ /**

let cacheEnvNode = _envNodeCache.get( value );
const cache = this._getPMREMNodeCache( builder.renderer );
let cacheEnvNode = cache.get( value );
if ( cacheEnvNode === undefined ) {

@@ -66,3 +68,3 @@

_envNodeCache.set( value, cacheEnvNode );
cache.set( value, cacheEnvNode );

@@ -107,2 +109,25 @@ }

/**
* Returns the PMREM node cache of the current renderer.
*
* @private
* @param {Renderer} renderer - The current renderer.
* @return {WeakMap} The node cache.
*/
_getPMREMNodeCache( renderer ) {
let pmremCache = _rendererCache.get( renderer );
if ( pmremCache === undefined ) {
pmremCache = new WeakMap();
_rendererCache.set( renderer, pmremCache );
}
return pmremCache;
}
}

@@ -109,0 +134,0 @@

@@ -10,3 +10,3 @@ import ShadowNode from './ShadowNode.js';

import { Color } from '../../math/Color.js';
import { BasicShadowMap, LessCompare, WebGPUCoordinateSystem } from '../../constants.js';
import { BasicShadowMap, GreaterEqualCompare, LessEqualCompare, WebGPUCoordinateSystem } from '../../constants.js';
import { CubeDepthTexture } from '../../textures/CubeDepthTexture.js';

@@ -16,2 +16,3 @@ import { screenCoordinate } from '../display/ScreenNode.js';

import { abs, normalize, cross } from '../math/MathNode.js';
import { viewZToPerspectiveDepth, viewZToReversedPerspectiveDepth } from '../display/ViewportDepthNode.js';

@@ -98,11 +99,12 @@ const _clearColor = /*@__PURE__*/ new Color();

const pointShadowFilter = /*@__PURE__*/ Fn( ( { filterFn, depthTexture, shadowCoord, shadow } ) => {
const pointShadowFilter = /*@__PURE__*/ Fn( ( { filterFn, depthTexture, shadowCoord, shadow }, builder ) => {
// for point lights, the uniform @vShadowCoord is re-purposed to hold
// the vector from the light to the world-space position of the fragment.
const lightToPosition = shadowCoord.xyz.toVar();
const lightToPositionLength = lightToPosition.length();
const shadowPosition = shadowCoord.xyz.toConst();
const shadowPositionAbs = shadowPosition.abs().toConst();
const viewZ = shadowPositionAbs.x.max( shadowPositionAbs.y ).max( shadowPositionAbs.z );
const cameraNearLocal = uniform( 'float' ).setGroup( renderGroup ).onRenderUpdate( () => shadow.camera.near );
const cameraFarLocal = uniform( 'float' ).setGroup( renderGroup ).onRenderUpdate( () => shadow.camera.far );
const shadowCameraNear = uniform( 'float' ).setGroup( renderGroup ).onRenderUpdate( () => shadow.camera.near );
const shadowCameraFar = uniform( 'float' ).setGroup( renderGroup ).onRenderUpdate( () => shadow.camera.far );
const bias = reference( 'bias', 'float', shadow ).setGroup( renderGroup );

@@ -112,10 +114,20 @@

If( lightToPositionLength.sub( cameraFarLocal ).lessThanEqual( 0.0 ).and( lightToPositionLength.sub( cameraNearLocal ).greaterThanEqual( 0.0 ) ), () => {
If( viewZ.sub( shadowCameraFar ).lessThanEqual( 0.0 ).and( viewZ.sub( shadowCameraNear ).greaterThanEqual( 0.0 ) ), () => {
// dp = normalized distance from light to fragment position
const dp = lightToPositionLength.sub( cameraNearLocal ).div( cameraFarLocal.sub( cameraNearLocal ) ).toVar(); // need to clamp?
dp.addAssign( bias );
let dp;
if ( builder.renderer.reversedDepthBuffer ) {
dp = viewZToReversedPerspectiveDepth( viewZ.negate(), shadowCameraNear, shadowCameraFar );
dp.subAssign( bias );
} else {
dp = viewZToPerspectiveDepth( viewZ.negate(), shadowCameraNear, shadowCameraFar );
dp.addAssign( bias );
}
// bd3D = base direction 3D (direction from light to fragment)
const bd3D = lightToPosition.normalize();
const bd3D = shadowPosition.normalize();

@@ -212,3 +224,3 @@ // percentage-closer filtering using cube texture sampling

depthTexture.name = 'PointShadowDepthTexture';
depthTexture.compareFunction = LessCompare;
depthTexture.compareFunction = builder.renderer.reversedDepthBuffer ? GreaterEqualCompare : LessEqualCompare;

@@ -215,0 +227,0 @@ const shadowMap = builder.createCubeRenderTarget( shadow.mapSize.width );

@@ -8,6 +8,5 @@ import { float, vec2, vec4, If, Fn } from '../tsl/TSLBase.js';

import NodeMaterial from '../../materials/nodes/NodeMaterial.js';
import { objectPosition } from '../accessors/Object3DNode.js';
import { positionWorld } from '../accessors/Position.js';
import { screenCoordinate } from '../display/ScreenNode.js';
import { interleavedGradientNoise, vogelDiskSample } from '../utils/PostProcessingUtils.js';
import { NoBlending } from '../../constants.js';

@@ -178,3 +177,3 @@ const shadowMaterialLib = /*@__PURE__*/ new WeakMap();

*/
export const VSMShadowFilter = /*@__PURE__*/ Fn( ( { depthTexture, shadowCoord, depthLayer } ) => {
export const VSMShadowFilter = /*@__PURE__*/ Fn( ( { depthTexture, shadowCoord, depthLayer }, builder ) => {

@@ -194,49 +193,24 @@ let distribution = texture( depthTexture ).sample( shadowCoord.xy );

const hardShadow = step( shadowCoord.z, mean );
const hardShadow = ( builder.renderer.reversedDepthBuffer ) ? step( mean, shadowCoord.z ) : step( shadowCoord.z, mean );
// Early return if fully lit
If( hardShadow.equal( 1.0 ), () => {
const output = float( 1 ).toVar(); // default, fully lit
return float( 1.0 );
If( hardShadow.notEqual( 1.0 ), () => {
} );
// Distance from mean
const d = shadowCoord.z.sub( mean );
// Distance from mean
const d = shadowCoord.z.sub( mean );
// Chebyshev's inequality for upper bound on probability
let p_max = variance.div( variance.add( d.mul( d ) ) );
// Chebyshev's inequality for upper bound on probability
let p_max = variance.div( variance.add( d.mul( d ) ) );
// Reduce light bleeding by remapping [amount, 1] to [0, 1]
p_max = clamp( sub( p_max, 0.3 ).div( 0.65 ) );
// Reduce light bleeding by remapping [amount, 1] to [0, 1]
p_max = clamp( sub( p_max, 0.3 ).div( 0.65 ) );
output.assign( max( hardShadow, p_max ) );
return max( hardShadow, p_max );
} );
return output;
} );
//
const linearDistance = /*@__PURE__*/ Fn( ( [ position, cameraNear, cameraFar ] ) => {
let dist = positionWorld.sub( position ).length();
dist = dist.sub( cameraNear ).div( cameraFar.sub( cameraNear ) );
dist = dist.saturate(); // clamp to [ 0, 1 ]
return dist;
} );
const linearShadowDistance = ( light ) => {
const camera = light.shadow.camera;
const nearDistance = reference( 'near', 'float', camera ).setGroup( renderGroup );
const farDistance = reference( 'far', 'float', camera ).setGroup( renderGroup );
const referencePosition = objectPosition( light );
return linearDistance( referencePosition, nearDistance, farDistance );
};
/**

@@ -262,9 +236,7 @@ * Retrieves or creates a shadow material for the given light source.

const depthNode = light.isPointLight ? linearShadowDistance( light ) : null;
material = new NodeMaterial();
material.colorNode = vec4( 0, 0, 0, 1 );
material.depthNode = depthNode;
material.isShadowPassMaterial = true; // Use to avoid other overrideMaterial override material.colorNode unintentionally when using material.shadowNode
material.name = 'ShadowMaterial';
material.blending = NoBlending;
material.fog = false;

@@ -271,0 +243,0 @@

@@ -14,3 +14,3 @@ import ShadowBaseNode, { shadowPositionWorld } from './ShadowBaseNode.js';

import { screenCoordinate } from '../display/ScreenNode.js';
import { HalfFloatType, LessCompare, RGFormat, VSMShadowMap, WebGPUCoordinateSystem } from '../../constants.js';
import { GreaterEqualCompare, HalfFloatType, LessEqualCompare, LinearFilter, NearestFilter, PCFShadowMap, PCFSoftShadowMap, RGFormat, VSMShadowMap } from '../../constants.js';
import { renderGroup } from '../core/UniformGroupNode.js';

@@ -23,3 +23,2 @@ import { viewZToLogarithmicDepth } from '../display/ViewportDepthNode.js';

import ChainMap from '../../renderers/common/ChainMap.js';
import { warn } from '../../utils.js';
import { textureSize } from '../accessors/TextureSizeNode.js';

@@ -347,3 +346,3 @@ import { uv } from '../accessors/UV.js';

const bias = reference( 'bias', 'float', shadow ).setGroup( renderGroup );
const bias = shadow.biasNode || reference( 'bias', 'float', shadow ).setGroup( renderGroup );

@@ -359,8 +358,2 @@ let shadowCoord = shadowPosition;

if ( renderer.coordinateSystem === WebGPUCoordinateSystem ) {
coordZ = coordZ.mul( 2 ).sub( 1 ); // WebGPU: Conversion [ 0, 1 ] to [ - 1, 1 ]
}
} else {

@@ -384,3 +377,3 @@

shadowCoord.y.oneMinus(), // follow webgpu standards
coordZ.add( bias )
renderer.reversedDepthBuffer ? coordZ.sub( bias ) : coordZ.add( bias )
);

@@ -409,3 +402,3 @@

depthTexture.name = 'ShadowDepthTexture';
depthTexture.compareFunction = LessCompare;
depthTexture.compareFunction = builder.renderer.reversedDepthBuffer ? GreaterEqualCompare : LessEqualCompare;

@@ -433,6 +426,18 @@ const shadowMap = builder.createRenderTarget( shadow.mapSize.width, shadow.mapSize.height );

const { depthTexture, shadowMap } = this.setupRenderTarget( shadow, builder );
const shadowMapType = renderer.shadowMap.type;
const { depthTexture, shadowMap } = this.setupRenderTarget( shadow, builder );
if ( shadowMapType === PCFShadowMap || shadowMapType === PCFSoftShadowMap ) {
depthTexture.minFilter = LinearFilter;
depthTexture.magFilter = LinearFilter;
} else {
depthTexture.minFilter = NearestFilter;
depthTexture.magFilter = NearestFilter;
}
shadow.camera.coordinateSystem = camera.coordinateSystem;

@@ -529,15 +534,19 @@ shadow.camera.updateProjectionMatrix();

if ( shadowMap.texture.isCubeTexture ) {
if ( renderer.shadowMap.transmitted === true ) {
// For cube shadow maps (point lights), use cubeTexture with vec3 coordinates
shadowColor = cubeTexture( shadowMap.texture, shadowCoord.xyz );
if ( shadowMap.texture.isCubeTexture ) {
} else {
// For cube shadow maps (point lights), use cubeTexture with vec3 coordinates
shadowColor = cubeTexture( shadowMap.texture, shadowCoord.xyz );
shadowColor = texture( shadowMap.texture, shadowCoord );
} else {
if ( depthTexture.isArrayTexture ) {
shadowColor = texture( shadowMap.texture, shadowCoord );
shadowColor = shadowColor.depth( this.depthLayer );
if ( depthTexture.isArrayTexture ) {
shadowColor = shadowColor.depth( this.depthLayer );
}
}

@@ -547,4 +556,16 @@

const shadowOutput = mix( 1, shadowNode.rgb.mix( shadowColor, 1 ), shadowIntensity.mul( shadowColor.a ) ).toVar();
//
let shadowOutput;
if ( shadowColor ) {
shadowOutput = mix( 1, shadowNode.rgb.mix( shadowColor, 1 ), shadowIntensity.mul( shadowColor.a ) ).toVar();
} else {
shadowOutput = mix( 1, shadowNode, shadowIntensity ).toVar();
}
this.shadowMap = shadowMap;

@@ -557,14 +578,20 @@ this.shadow.map = shadowMap;

return shadowOutput.toInspector( `${ inspectName } / Color`, () => {
if ( shadowColor ) {
if ( this.shadowMap.texture.isCubeTexture ) {
shadowOutput.toInspector( `${ inspectName } / Color`, () => {
return cubeTexture( this.shadowMap.texture );
if ( this.shadowMap.texture.isCubeTexture ) {
}
return cubeTexture( this.shadowMap.texture );
return texture( this.shadowMap.texture );
}
} ).toInspector( `${ inspectName } / Depth`, () => {
return texture( this.shadowMap.texture );
} );
}
return shadowOutput.toInspector( `${ inspectName } / Depth`, () => {
// TODO: Use linear depth

@@ -617,8 +644,2 @@

if ( builder.material.shadowNode ) { // @deprecated, r171
warn( 'NodeMaterial: ".shadowNode" is deprecated. Use ".castShadowNode" instead.' );
}
if ( builder.material.receivedShadowNode ) {

@@ -625,0 +646,0 @@

@@ -177,3 +177,3 @@ import Node from '../core/Node.js';

warn( 'TSL: Return statement used in an inline \'Fn()\'. Define a layout struct to allow return values.' );
warn( 'TSL: Return statement used in an inline \'Fn()\'. Define a layout struct to allow return values.', this.stackTrace );

@@ -208,3 +208,3 @@ ifSnippet = '// ' + ifSnippet;

warn( 'TSL: Return statement used in an inline \'Fn()\'. Define a layout struct to allow return values.' );
warn( 'TSL: Return statement used in an inline \'Fn()\'. Define a layout struct to allow return values.', this.stackTrace );

@@ -211,0 +211,0 @@ elseSnippet = '// ' + elseSnippet;

import TempNode from '../core/TempNode.js';
import { sub, mul, div, mod, equal } from './OperatorNode.js';
import { sub, mul, div, mod } from './OperatorNode.js';
import { addMethodChaining, nodeObject, nodeProxyIntent, float, vec2, vec3, vec4, Fn } from '../tsl/TSLCore.js';

@@ -294,3 +294,3 @@ import { WebGLCoordinateSystem, WebGPUCoordinateSystem } from '../../constants.js';

warn( `TSL: '${ method }' is not supported in the ${ builder.shaderStage } stage.` );
warn( `TSL: '${ method }' is not supported in the ${ builder.shaderStage } stage.`, this.stackTrace );

@@ -789,19 +789,2 @@ method = '/*' + method + '*/';

/**
* Returns `true` if `x` equals `y`.
*
* @tsl
* @function
* @param {Node | number} x - The first parameter.
* @param {Node | number} y - The second parameter.
* @deprecated since r175. Use {@link equal} instead.
* @returns {Node<bool>}
*/
export const equals = ( x, y ) => { // @deprecated, r172
warn( 'TSL: "equals" is deprecated. Use "equal" inside a vector instead, like: "bvec*( equal( ... ) )"' );
return equal( x, y );
};
/**
* Returns the least of the given values.

@@ -986,3 +969,3 @@ *

*/
export const clamp = ( value, low = 0, high = 1 ) => nodeObject( new MathNode( MathNode.CLAMP, nodeObject( value ), nodeObject( low ), nodeObject( high ) ) );
export const clamp = ( value, low = 0, high = 1 ) => new MathNode( MathNode.CLAMP, nodeObject( value ), nodeObject( low ), nodeObject( high ) );

@@ -1087,20 +1070,2 @@ /**

/**
* Returns the arc-tangent of the quotient of its parameters.
*
* @tsl
* @function
* @deprecated since r172. Use {@link atan} instead.
*
* @param {Node | number} y - The y parameter.
* @param {Node | number} x - The x parameter.
* @returns {Node}
*/
export const atan2 = ( y, x ) => { // @deprecated, r172
warn( 'TSL: "atan2" is overloaded. Use "atan" instead.' );
return atan( y, x );
};
// GLSL alias function

@@ -1115,3 +1080,2 @@

addMethodChaining( 'any', any );
addMethodChaining( 'equals', equals );

@@ -1148,3 +1112,2 @@ addMethodChaining( 'radians', radians );

addMethodChaining( 'fwidth', fwidth );
addMethodChaining( 'atan2', atan2 );
addMethodChaining( 'min', min );

@@ -1151,0 +1114,0 @@ addMethodChaining( 'max', max );

import { WebGLCoordinateSystem } from '../../constants.js';
import TempNode from '../core/TempNode.js';
import StackTrace from '../core/StackTrace.js';
import { addMethodChaining, Fn, int, nodeProxyIntent } from '../tsl/TSLCore.js';

@@ -746,3 +747,3 @@ import { warn } from '../../utils.js';

warn( 'TSL: "modInt()" is deprecated. Use "mod( int( ... ) )" instead.' );
warn( 'TSL: "modInt()" is deprecated. Use "mod( int( ... ) )" instead.', new StackTrace() );
return mod( int( a ), int( b ) );

@@ -749,0 +750,0 @@

@@ -9,9 +9,11 @@ // constants

export { default as BypassNode } from './core/BypassNode.js';
export { default as IsolateNode } from './core/IsolateNode.js';
export { default as ConstNode } from './core/ConstNode.js';
export { default as ContextNode } from './core/ContextNode.js';
export { default as IndexNode } from './core/IndexNode.js';
export { default as InputNode } from './core/InputNode.js';
export { default as InspectorNode } from './core/InspectorNode.js';
export { default as IsolateNode } from './core/IsolateNode.js';
export { default as LightingModel } from './core/LightingModel.js';
export { default as MRTNode } from './core/MRTNode.js';
export { default as Node } from './core/Node.js';
export { default as VarNode } from './core/VarNode.js';
export { default as NodeAttribute } from './core/NodeAttribute.js';

@@ -21,2 +23,3 @@ export { default as NodeBuilder } from './core/NodeBuilder.js';

export { default as NodeCode } from './core/NodeCode.js';
export { default as NodeError } from './core/NodeError.js';
export { default as NodeFrame } from './core/NodeFrame.js';

@@ -27,14 +30,15 @@ export { default as NodeFunctionInput } from './core/NodeFunctionInput.js';

export { default as NodeVarying } from './core/NodeVarying.js';
export { default as OutputStructNode } from './core/OutputStructNode.js';
export { default as ParameterNode } from './core/ParameterNode.js';
export { default as PropertyNode } from './core/PropertyNode.js';
export { default as StackNode } from './core/StackNode.js';
export { default as StackTrace } from './core/StackTrace.js';
export { default as StructNode } from './core/StructNode.js';
export { default as StructTypeNode } from './core/StructTypeNode.js';
export { default as SubBuildNode } from './core/SubBuildNode.js';
export { default as TempNode } from './core/TempNode.js';
export { default as UniformGroupNode } from './core/UniformGroupNode.js';
export { default as UniformNode } from './core/UniformNode.js';
export { default as VarNode } from './core/VarNode.js';
export { default as VaryingNode } from './core/VaryingNode.js';
export { default as StructNode } from './core/StructNode.js';
export { default as StructTypeNode } from './core/StructTypeNode.js';
export { default as OutputStructNode } from './core/OutputStructNode.js';
export { default as MRTNode } from './core/MRTNode.js';
export { default as SubBuildNode } from './core/SubBuildNode.js';

@@ -44,50 +48,37 @@ import * as NodeUtils from './core/NodeUtils.js';

// utils
export { default as ArrayElementNode } from './utils/ArrayElementNode.js';
export { default as ConvertNode } from './utils/ConvertNode.js';
export { default as FunctionOverloadingNode } from './utils/FunctionOverloadingNode.js';
export { default as JoinNode } from './utils/JoinNode.js';
export { default as LoopNode } from './utils/LoopNode.js';
export { default as MaxMipLevelNode } from './utils/MaxMipLevelNode.js';
export { default as RemapNode } from './utils/RemapNode.js';
export { default as RotateNode } from './utils/RotateNode.js';
export { default as SetNode } from './utils/SetNode.js';
export { default as SplitNode } from './utils/SplitNode.js';
export { default as SpriteSheetUVNode } from './utils/SpriteSheetUVNode.js';
export { default as StorageArrayElementNode } from './utils/StorageArrayElementNode.js';
export { default as ReflectorNode } from './utils/ReflectorNode.js';
export { default as RTTNode } from './utils/RTTNode.js';
export { default as MemberNode } from './utils/MemberNode.js';
export { default as DebugNode } from './utils/DebugNode.js';
export { default as EventNode } from './utils/EventNode.js';
// math
export { default as BitcastNode } from './math/BitcastNode.js';
// accessors
export { default as UniformArrayNode } from './accessors/UniformArrayNode.js';
export { default as BatchNode } from './accessors/BatchNode.js';
export { default as BufferAttributeNode } from './accessors/BufferAttributeNode.js';
export { default as BufferNode } from './accessors/BufferNode.js';
export { default as VertexColorNode } from './accessors/VertexColorNode.js';
export { default as BuiltinNode } from './accessors/BuiltinNode.js';
export { default as ClippingNode } from './accessors/ClippingNode.js';
export { default as CubeTextureNode } from './accessors/CubeTextureNode.js';
export { default as InstanceNode } from './accessors/InstanceNode.js';
export { default as InstancedMeshNode } from './accessors/InstancedMeshNode.js';
export { default as BatchNode } from './accessors/BatchNode.js';
export { default as MaterialNode } from './accessors/MaterialNode.js';
export { default as MaterialReferenceNode } from './accessors/MaterialReferenceNode.js';
export { default as RendererReferenceNode } from './accessors/RendererReferenceNode.js';
export { default as ModelNode } from './accessors/ModelNode.js';
export { default as MorphNode } from './accessors/MorphNode.js';
export { default as ModelNode } from './accessors/ModelNode.js';
export { default as Object3DNode } from './accessors/Object3DNode.js';
export { default as PointUVNode } from './accessors/PointUVNode.js';
export { default as ReferenceBaseNode } from './accessors/ReferenceBaseNode.js';
export { default as ReferenceNode } from './accessors/ReferenceNode.js';
export { default as RendererReferenceNode } from './accessors/RendererReferenceNode.js';
export { default as SkinningNode } from './accessors/SkinningNode.js';
export { default as SceneNode } from './accessors/SceneNode.js';
export { default as StorageBufferNode } from './accessors/StorageBufferNode.js';
export { default as StorageTextureNode } from './accessors/StorageTextureNode.js';
export { default as Texture3DNode } from './accessors/Texture3DNode.js';
export { default as TextureNode } from './accessors/TextureNode.js';
export { default as TextureSizeNode } from './accessors/TextureSizeNode.js';
export { default as StorageTextureNode } from './accessors/StorageTextureNode.js';
export { default as Texture3DNode } from './accessors/Texture3DNode.js';
export { default as UniformArrayNode } from './accessors/UniformArrayNode.js';
export { default as UserDataNode } from './accessors/UserDataNode.js';
export { default as VelocityNode } from './accessors/VelocityNode.js';
export { default as VertexColorNode } from './accessors/VertexColorNode.js';
// code
export { default as CodeNode } from './code/CodeNode.js';
export { default as ExpressionNode } from './code/ExpressionNode.js';
export { default as FunctionCallNode } from './code/FunctionCallNode.js';
export { default as FunctionNode } from './code/FunctionNode.js';
// display

@@ -98,21 +89,12 @@ export { default as BumpMapNode } from './display/BumpMapNode.js';

export { default as NormalMapNode } from './display/NormalMapNode.js';
export { default as PosterizeNode } from './display/PosterizeNode.js';
export { default as PassNode } from './display/PassNode.js';
export { default as RenderOutputNode } from './display/RenderOutputNode.js';
export { default as ScreenNode } from './display/ScreenNode.js';
export { default as ToneMappingNode } from './display/ToneMappingNode.js';
export { default as ScreenNode } from './display/ScreenNode.js';
export { default as ToonOutlinePassNode } from './display/ToonOutlinePassNode.js';
export { default as ViewportDepthNode } from './display/ViewportDepthNode.js';
export { default as ViewportDepthTextureNode } from './display/ViewportDepthTextureNode.js';
export { default as ViewportSharedTextureNode } from './display/ViewportSharedTextureNode.js';
export { default as ViewportTextureNode } from './display/ViewportTextureNode.js';
export { default as ViewportSharedTextureNode } from './display/ViewportSharedTextureNode.js';
export { default as ViewportDepthTextureNode } from './display/ViewportDepthTextureNode.js';
export { default as ViewportDepthNode } from './display/ViewportDepthNode.js';
export { default as RenderOutputNode } from './display/RenderOutputNode.js';
export { default as PassNode } from './display/PassNode.js';
export { default as ToonOutlinePassNode } from './display/ToonOutlinePassNode.js';
// code
export { default as ExpressionNode } from './code/ExpressionNode.js';
export { default as CodeNode } from './code/CodeNode.js';
export { default as FunctionCallNode } from './code/FunctionCallNode.js';
export { default as FunctionNode } from './code/FunctionNode.js';
export { default as ScriptableNode } from './code/ScriptableNode.js';
export { default as ScriptableValueNode } from './code/ScriptableValueNode.js';
// geometry

@@ -122,27 +104,40 @@ export { default as RangeNode } from './geometry/RangeNode.js';

// gpgpu
export { default as AtomicFunctionNode } from './gpgpu/AtomicFunctionNode.js';
export { default as BarrierNode } from './gpgpu/BarrierNode.js';
export { default as ComputeBuiltinNode } from './gpgpu/ComputeBuiltinNode.js';
export { default as ComputeNode } from './gpgpu/ComputeNode.js';
export { default as SubgroupFunctionNode } from './gpgpu/SubgroupFunctionNode.js';
export { default as WorkgroupInfoNode } from './gpgpu/WorkgroupInfoNode.js';
// lighting
export { default as PointLightNode } from './lighting/PointLightNode.js';
export { default as AmbientLightNode } from './lighting/AmbientLightNode.js';
export { default as AnalyticLightNode } from './lighting/AnalyticLightNode.js';
export { default as AONode } from './lighting/AONode.js';
export { default as BasicEnvironmentNode } from './lighting/BasicEnvironmentNode.js';
export { default as BasicLightMapNode } from './lighting/BasicLightMapNode.js';
export { default as DirectionalLightNode } from './lighting/DirectionalLightNode.js';
export { default as RectAreaLightNode } from './lighting/RectAreaLightNode.js';
export { default as SpotLightNode } from './lighting/SpotLightNode.js';
export { default as EnvironmentNode } from './lighting/EnvironmentNode.js';
export { default as HemisphereLightNode } from './lighting/HemisphereLightNode.js';
export { default as IESSpotLightNode } from './lighting/IESSpotLightNode.js';
export { default as ProjectorLightNode } from './lighting/ProjectorLightNode.js';
export { default as AmbientLightNode } from './lighting/AmbientLightNode.js';
export { default as LightsNode } from './lighting/LightsNode.js';
export { default as IrradianceNode } from './lighting/IrradianceNode.js';
export { default as LightingContextNode } from './lighting/LightingContextNode.js';
export { default as LightingNode } from './lighting/LightingNode.js';
export { default as LightingContextNode } from './lighting/LightingContextNode.js';
export { default as HemisphereLightNode } from './lighting/HemisphereLightNode.js';
export { default as LightProbeNode } from './lighting/LightProbeNode.js';
export { default as EnvironmentNode } from './lighting/EnvironmentNode.js';
export { default as BasicEnvironmentNode } from './lighting/BasicEnvironmentNode.js';
export { default as IrradianceNode } from './lighting/IrradianceNode.js';
export { default as AONode } from './lighting/AONode.js';
export { default as AnalyticLightNode } from './lighting/AnalyticLightNode.js';
export { default as LightsNode } from './lighting/LightsNode.js';
export { default as PointLightNode } from './lighting/PointLightNode.js';
export { default as PointShadowNode } from './lighting/PointShadowNode.js';
export { default as ProjectorLightNode } from './lighting/ProjectorLightNode.js';
export { default as RectAreaLightNode } from './lighting/RectAreaLightNode.js';
export { default as ShadowBaseNode } from './lighting/ShadowBaseNode.js';
export { default as ShadowNode } from './lighting/ShadowNode.js';
export { default as SpotLightNode } from './lighting/SpotLightNode.js';
// pmrem
export { default as PMREMNode } from './pmrem/PMREMNode.js';
// math
export { default as BitcastNode } from './math/BitcastNode.js';
export { default as BitcountNode } from './math/BitcountNode.js';
export { default as ConditionalNode } from './math/ConditionalNode.js';
export { default as MathNode } from './math/MathNode.js';
export { default as OperatorNode } from './math/OperatorNode.js';
export { default as PackFloatNode } from './math/PackFloatNode.js';
export { default as UnpackFloatNode } from './math/UnpackFloatNode.js';

@@ -152,4 +147,28 @@ // parsers

// pmrem
export { default as PMREMNode } from './pmrem/PMREMNode.js';
// utils
export { default as ArrayElementNode } from './utils/ArrayElementNode.js';
export { default as ConvertNode } from './utils/ConvertNode.js';
export { default as CubeMapNode } from './utils/CubeMapNode.js';
export { default as DebugNode } from './utils/DebugNode.js';
export { default as EventNode } from './utils/EventNode.js';
export { default as FlipNode } from './utils/FlipNode.js';
export { default as FunctionOverloadingNode } from './utils/FunctionOverloadingNode.js';
export { default as JoinNode } from './utils/JoinNode.js';
export { default as LoopNode } from './utils/LoopNode.js';
export { default as MaxMipLevelNode } from './utils/MaxMipLevelNode.js';
export { default as MemberNode } from './utils/MemberNode.js';
export { default as ReflectorNode } from './utils/ReflectorNode.js';
export { default as RemapNode } from './utils/RemapNode.js';
export { default as RotateNode } from './utils/RotateNode.js';
export { default as RTTNode } from './utils/RTTNode.js';
export { default as SampleNode } from './utils/SampleNode.js';
export { default as SetNode } from './utils/SetNode.js';
export { default as SplitNode } from './utils/SplitNode.js';
export { default as StorageArrayElementNode } from './utils/StorageArrayElementNode.js';
// lighting models
export { default as PhongLightingModel } from './functions/PhongLightingModel.js';
export { default as PhysicalLightingModel } from './functions/PhysicalLightingModel.js';

@@ -315,26 +315,20 @@ import { Fn, int, uint, float, vec2, vec3, vec4, If } from '../tsl/TSLBase.js';

// https://jcgt.org/published/0007/04/01/
const importanceSampleGGX_VNDF = /*@__PURE__*/ Fn( ( [ Xi, V_immutable, roughness_immutable ] ) => {
const importanceSampleGGX_VNDF = /*@__PURE__*/ Fn( ( [ Xi, V, roughness ] ) => {
const V = vec3( V_immutable ).toVar();
const roughness = float( roughness_immutable );
const alpha = roughness.mul( roughness ).toVar();
const alpha = roughness.mul( roughness ).toConst();
// Section 3.2: Transform view direction to hemisphere configuration
const Vh = normalize( vec3( alpha.mul( V.x ), alpha.mul( V.y ), V.z ) ).toVar();
// Section 4.1: Orthonormal basis
const lensq = Vh.x.mul( Vh.x ).add( Vh.y.mul( Vh.y ) );
const T1 = select( lensq.greaterThan( 0.0 ), vec3( Vh.y.negate(), Vh.x, 0.0 ).div( sqrt( lensq ) ), vec3( 1.0, 0.0, 0.0 ) ).toVar();
const T2 = cross( Vh, T1 ).toVar();
const T1 = vec3( 1.0, 0.0, 0.0 ).toConst();
const T2 = cross( V, T1 ).toConst();
// Section 4.2: Parameterization of projected area
const r = sqrt( Xi.x );
const phi = mul( 2.0, 3.14159265359 ).mul( Xi.y );
const t1 = r.mul( cos( phi ) ).toVar();
const r = sqrt( Xi.x ).toConst();
const phi = mul( 2.0, 3.14159265359 ).mul( Xi.y ).toConst();
const t1 = r.mul( cos( phi ) ).toConst();
const t2 = r.mul( sin( phi ) ).toVar();
const s = mul( 0.5, Vh.z.add( 1.0 ) );
const s = mul( 0.5, V.z.add( 1.0 ) ).toConst();
t2.assign( s.oneMinus().mul( sqrt( t1.mul( t1 ).oneMinus() ) ).add( s.mul( t2 ) ) );
// Section 4.3: Reprojection onto hemisphere
const Nh = T1.mul( t1 ).add( T2.mul( t2 ) ).add( Vh.mul( sqrt( max( 0.0, t1.mul( t1 ).add( t2.mul( t2 ) ).oneMinus() ) ) ) );
const Nh = T1.mul( t1 ).add( T2.mul( t2 ) ).add( V.mul( sqrt( max( 0.0, t1.mul( t1 ).add( t2.mul( t2 ) ).oneMinus() ) ) ) );

@@ -341,0 +335,0 @@ // Section 3.4: Transform back to ellipsoid configuration

@@ -43,3 +43,3 @@ // constants

export * from './utils/RotateNode.js';
export * from './utils/SpriteSheetUVNode.js';
export * from './utils/SpriteSheetUV.js';
export * from './utils/Timer.js';

@@ -85,3 +85,3 @@ export * from './utils/TriplanarTextures.js';

export * from './accessors/SkinningNode.js';
export * from './accessors/SceneNode.js';
export * from './accessors/SceneProperties.js';
export * from './accessors/StorageBufferNode.js';

@@ -104,3 +104,2 @@ export * from './accessors/Tangent.js';

export * from './display/NormalMapNode.js';
export * from './display/PosterizeNode.js';
export * from './display/ToneMappingNode.js';

@@ -125,4 +124,2 @@ export * from './display/ScreenNode.js';

export * from './code/FunctionNode.js';
export * from './code/ScriptableNode.js';
export * from './code/ScriptableValueNode.js';

@@ -129,0 +126,0 @@ // fog

@@ -10,2 +10,3 @@ import Node from '../core/Node.js';

import MemberNode from '../utils/MemberNode.js';
import StackTrace from '../core/StackTrace.js';
import { getValueFromType, getValueType } from '../core/NodeUtils.js';

@@ -22,2 +23,4 @@ import { warn, error } from '../../utils.js';

// No require StackTrace because this is internal API
if ( NodeElements.has( name ) ) {

@@ -71,3 +74,3 @@

error( 'TSL: No stack defined for assign operation. Make sure the assign is inside a Fn().' );
error( 'TSL: No stack defined for assign operation. Make sure the assign is inside a Fn().', new StackTrace() );

@@ -380,3 +383,3 @@ }

error( `TSL: "${ tslName }" parameter length is less than minimum required.` );
error( `TSL: "${ tslName }" parameter length is less than minimum required.`, new StackTrace() );

@@ -387,3 +390,3 @@ return params.concat( new Array( minParams - params.length ).fill( 0 ) );

error( `TSL: "${ tslName }" parameter length exceeds limit.` );
error( `TSL: "${ tslName }" parameter length exceeds limit.`, new StackTrace() );

@@ -879,3 +882,3 @@ return params.slice( 0, maxParams );

error( `TSL: Invalid parameter for the type "${ type }".` );
error( `TSL: Invalid parameter for the type "${ type }".`, new StackTrace() );

@@ -969,3 +972,3 @@ return new ConstNode( 0, type );

error( 'TSL: Invalid layout type.' );
error( 'TSL: Invalid layout type.', new StackTrace() );

@@ -1054,3 +1057,3 @@ }

error( 'TSL: "Fn()" was declared but not invoked. Try calling it like "Fn()( ...params )".' );
error( 'TSL: "Fn()" was declared but not invoked. Try calling it like "Fn()( ...params )".', this.stackTrace );

@@ -1209,4 +1212,4 @@ return builder.generateConst( type );

export const element = /*@__PURE__*/ nodeProxy( ArrayElementNode ).setParameterLength( 2 );
export const convert = ( node, types ) => nodeObject( new ConvertNode( nodeObject( node ), types ) );
export const split = ( node, channels ) => nodeObject( new SplitNode( nodeObject( node ), channels ) );
export const convert = ( node, types ) => new ConvertNode( nodeObject( node ), types );
export const split = ( node, channels ) => new SplitNode( nodeObject( node ), channels );

@@ -1228,3 +1231,3 @@ addMethodChaining( 'element', element );

warn( 'TSL: append() has been renamed to Stack().' );
warn( 'TSL: append() has been renamed to Stack().', new StackTrace() );
return Stack( node );

@@ -1236,5 +1239,5 @@

warn( 'TSL: .append() has been renamed to .toStack().' );
warn( 'TSL: .append() has been renamed to .toStack().', new StackTrace() );
return Stack( node );
} );

@@ -45,17 +45,17 @@ import TempNode from '../core/TempNode.js';

const title = '--- TSL debug - ' + builder.shaderStage + ' shader ---';
const border = '-'.repeat( title.length );
let code = '';
code += '// #' + title + '#\n';
code += builder.flow.code.replace( /^\t/mg, '' ) + '\n';
code += '/* ... */ ' + snippet + ' /* ... */\n';
code += '// #' + border + '#\n';
if ( callback !== null ) {
callback( builder, code );
callback( builder, snippet );
} else {
const title = '--- TSL debug - ' + builder.shaderStage + ' shader ---';
const border = '-'.repeat( title.length );
let code = '';
code += '// #' + title + '#\n';
code += builder.flow.code.replace( /^\t/mg, '' ) + '\n';
code += '/* ... */ ' + snippet + ' /* ... */\n';
code += '// #' + border + '#\n';
log( code );

@@ -82,4 +82,4 @@

*/
export const debug = ( node, callback = null ) => nodeObject( new DebugNode( nodeObject( node ), callback ) ).toStack();
export const debug = ( node, callback = null ) => new DebugNode( nodeObject( node ), callback ).toStack();
addMethodChaining( 'debug', debug );

@@ -74,3 +74,3 @@ import TempNode from '../core/TempNode.js';

error( `TSL: Length of parameters exceeds maximum length of function '${ type }()' type.` );
error( `TSL: Length of parameters exceeds maximum length of function '${ type }()' type.`, this.stackTrace );
break;

@@ -86,3 +86,3 @@

error( `TSL: Length of '${ type }()' data exceeds maximum length of output type.` );
error( `TSL: Length of '${ type }()' data exceeds maximum length of output type.`, this.stackTrace );

@@ -89,0 +89,0 @@ inputTypeLength = maxLength - length;

@@ -268,3 +268,3 @@ import Node from '../core/Node.js';

error( 'TSL: \'Loop( { update: ... } )\' is not a function, string or number.' );
error( 'TSL: \'Loop( { update: ... } )\' is not a function, string or number.', this.stackTrace );

@@ -271,0 +271,0 @@ updateSnippet = 'break /* invalid update */';

@@ -104,3 +104,3 @@ import Node from '../core/Node.js';

warn( `TSL: Member "${ this.property }" does not exist in struct.` );
warn( `TSL: Member "${ this.property }" does not exist in struct.`, this.stackTrace );

@@ -107,0 +107,0 @@ const type = this.getNodeType( builder );

import Node from '../core/Node.js';
import TextureNode from '../accessors/TextureNode.js';
import { nodeObject } from '../tsl/TSLBase.js';
import { NodeUpdateType } from '../core/constants.js';

@@ -137,6 +136,6 @@ import { screenUV } from '../display/ScreenNode.js';

this._depthNode = nodeObject( new ReflectorNode( {
this._depthNode = new ReflectorNode( {
defaultTexture: _defaultRT.depthTexture,
reflector: this._reflectorBaseNode
} ) );
} );

@@ -143,0 +142,0 @@ }

@@ -269,3 +269,3 @@ import { nodeObject } from '../tsl/TSLCore.js';

*/
export const rtt = ( node, ...params ) => nodeObject( new RTTNode( nodeObject( node ), ...params ) );
export const rtt = ( node, ...params ) => new RTTNode( nodeObject( node ), ...params );

@@ -272,0 +272,0 @@ /**

@@ -19,3 +19,3 @@ import { Fn, vec2 } from '../tsl/TSLBase.js';

* @function
* @param {function(Node):Node<vec2>} callback - A callback that receives the texture node
* @param {function(Node):Node<vec2>|Node<vec2>} callback - A callback that receives the texture node
* and must return the new uv coordinates.

@@ -27,4 +27,6 @@ * @param {Node} [node=null] - An optional node to which the context will be applied.

return context( node, { getUV: callback } );
const getUV = typeof callback === 'function' ? callback : () => callback;
return context( node, { getUV } );
}

@@ -31,0 +33,0 @@

@@ -960,8 +960,6 @@ import { BufferAttribute } from '../core/BufferAttribute.js';

// step the next geometry points to the shifted position
this._nextIndexStart = geometry.index ? geometryInfo.indexStart + geometryInfo.reservedIndexCount : 0;
this._nextVertexStart = geometryInfo.vertexStart + geometryInfo.reservedVertexCount;
}
this._nextIndexStart = nextIndexStart;
this._nextVertexStart = nextVertexStart;
this._visibilityChanged = true;

@@ -1109,3 +1107,3 @@

* @param {number} instanceId - The ID of an instance to set the color of.
* @param {Color} color - The color to set the instance to.
* @param {Color|Vector4} color - The color to set the instance to. Use a `Vector4` to also define alpha.
* @return {BatchedMesh} A reference to this batched mesh.

@@ -1134,4 +1132,4 @@ */

* @param {number} instanceId - The ID of an instance to get the color of.
* @param {Color} color - The target object that is used to store the method's result.
* @return {Color} The instance's color.
* @param {Color|Vector4} color - The target object that is used to store the method's result.
* @return {Color|Vector4} The instance's color. Use a `Vector4` to also retrieve alpha.
*/

@@ -1521,4 +1519,16 @@ getColorAt( instanceId, color ) {

const index = geometry.getIndex();
const bytesPerElement = index === null ? 1 : index.array.BYTES_PER_ELEMENT;
let bytesPerElement = index === null ? 1 : index.array.BYTES_PER_ELEMENT;
// the "wireframe" attribute implicitly creates a line attribute in the renderer, which is double
// the vertices to draw (3 lines per triangle) so we multiply the draw counts / starts and make
// assumptions about the index buffer byte size.
let multiDrawMultiplier = 1;
if ( material.wireframe ) {
multiDrawMultiplier = 2;
bytesPerElement = geometry.attributes.position.count > 65535 ? 4 : 2;
}
const instanceInfo = this._instanceInfo;

@@ -1603,4 +1613,4 @@ const multiDrawStarts = this._multiDrawStarts;

const item = list[ i ];
multiDrawStarts[ multiDrawCount ] = item.start * bytesPerElement;
multiDrawCounts[ multiDrawCount ] = item.count;
multiDrawStarts[ multiDrawCount ] = item.start * bytesPerElement * multiDrawMultiplier;
multiDrawCounts[ multiDrawCount ] = item.count * multiDrawMultiplier;
indirectArray[ multiDrawCount ] = item.index;

@@ -1635,4 +1645,4 @@ multiDrawCount ++;

const geometryInfo = geometryInfoList[ geometryId ];
multiDrawStarts[ multiDrawCount ] = geometryInfo.start * bytesPerElement;
multiDrawCounts[ multiDrawCount ] = geometryInfo.count;
multiDrawStarts[ multiDrawCount ] = geometryInfo.start * bytesPerElement * multiDrawMultiplier;
multiDrawCounts[ multiDrawCount ] = geometryInfo.count * multiDrawMultiplier;
indirectArray[ multiDrawCount ] = i;

@@ -1639,0 +1649,0 @@ multiDrawCount ++;

@@ -60,2 +60,11 @@ import { InstancedBufferAttribute } from '../core/InstancedBufferAttribute.js';

/**
* Represents the local transformation of all instances of the previous frame.
* Required for computing velocity. Maintained in {@link InstanceNode}.
*
* @type {?InstancedBufferAttribute}
* @default null
*/
this.previousInstanceMatrix = null;
/**
* Represents the color of all instances. You have to set its

@@ -189,2 +198,4 @@ * {@link BufferAttribute#needsUpdate} flag to true if you modify instanced data

if ( source.previousInstanceMatrix !== null ) this.previousInstanceMatrix = source.previousInstanceMatrix.clone();
if ( source.morphTexture !== null ) this.morphTexture = source.morphTexture.clone();

@@ -191,0 +202,0 @@ if ( source.instanceColor !== null ) this.instanceColor = source.instanceColor.clone();

@@ -682,2 +682,23 @@ let _vector2 = null;

/**
* Checks if the backend has the given compatibility.
*
* @abstract
* @param {string} name - The compatibility.
* @return {boolean} Whether the backend has the given compatibility or not.
*/
hasCompatibility( /*name*/ ) {
return false;
}
/**
* Initializes the render target defined in the given render context.
*
* @abstract
* @param {RenderContext} renderContext - The render context.
*/
initRenderTarget( /*renderContext*/ ) {}
/**
* Sets a dictionary for the given object into the

@@ -684,0 +705,0 @@ * internal data structure.

@@ -110,5 +110,8 @@ import DataMap from './DataMap.js';

// By using a w component of 0, the skybox will not translate when the camera moves through the scene
let viewProj = cameraProjectionMatrix.mul( modelViewMatrix.mul( vec4( modifiedPosition, 0.0 ) ) );
// by using a w component of 0, the skybox will not translate when the camera moves through the scene
const viewPosition = modelViewMatrix.mul( vec4( modifiedPosition, 0.0 ) );
// we force w=1.0 here to prevent the w_clip=0 divide-by-zero error for ortho cameras.
let viewProj = cameraProjectionMatrix.mul( vec4( viewPosition.xyz, 1.0 ) );
// force background to far plane so it does not occlude objects

@@ -204,4 +207,4 @@ viewProj = viewProj.setZ( viewProj.w );

renderContext.depthClearValue = renderer._clearDepth;
renderContext.stencilClearValue = renderer._clearStencil;
renderContext.depthClearValue = renderer.getClearDepth();
renderContext.stencilClearValue = renderer.getClearStencil();

@@ -208,0 +211,0 @@ renderContext.clearColor = renderer.autoClearColor === true;

@@ -18,5 +18,4 @@ let _id = 0;

* @param {number} index - The group index.
* @param {Array<Binding>} bindingsReference - An array of reference bindings.
*/
constructor( name = '', bindings = [], index = 0, bindingsReference = [] ) {
constructor( name = '', bindings = [], index = 0 ) {

@@ -45,9 +44,2 @@ /**

/**
* An array of reference bindings.
*
* @type {Array<Binding>}
*/
this.bindingsReference = bindingsReference;
/**
* The group's ID.

@@ -54,0 +46,0 @@ *

@@ -270,5 +270,14 @@ import DataMap from './DataMap.js';

const bindingData = backend.get( binding );
this.attributes.update( attribute, attributeType );
if ( bindingData.attribute !== attribute ) {
bindingData.attribute = attribute;
needsBindingsUpdate = true;
}
}

@@ -301,3 +310,3 @@

// generation: update the bindings if a new texture has been created
// generation: update the bindings if the binding refers to a different texture object

@@ -310,6 +319,8 @@ if ( binding.generation !== texturesTextureData.generation ) {

cacheBindings = false;
}
// keep track which bind groups refer to the current texture (this is needed for dispose)
texturesTextureData.bindGroups.add( bindGroup );
}

@@ -362,4 +373,2 @@

cacheBindings = false;
}

@@ -371,2 +380,8 @@

if ( binding.isBuffer && binding.updateRanges.length > 0 ) {
binding.clearUpdateRanges();
}
}

@@ -373,0 +388,0 @@

@@ -47,3 +47,3 @@ import { Group } from '../../objects/Group.js';

* is assumed to be static and does not change. E.g. no new objects are
* added to the group
* added to the group.
*

@@ -50,0 +50,0 @@ * If a change is required, an update can still be forced by setting the

@@ -6,3 +6,3 @@ import { equirectUV } from '../../nodes/utils/EquirectUV.js';

import { WebGLCubeRenderTarget } from '../../renderers/WebGLCubeRenderTarget.js';
import { RenderTarget } from '../../core/RenderTarget.js';
import { Scene } from '../../scenes/Scene.js';

@@ -12,6 +12,5 @@ import { CubeCamera } from '../../cameras/CubeCamera.js';

import { Mesh } from '../../objects/Mesh.js';
import { CubeTexture } from '../../textures/CubeTexture.js';
import { BackSide, NoBlending, LinearFilter, LinearMipmapLinearFilter } from '../../constants.js';
// @TODO: Consider rename WebGLCubeRenderTarget to just CubeRenderTarget
/**

@@ -21,5 +20,5 @@ * This class represents a cube render target. It is a special version

*
* @augments WebGLCubeRenderTarget
* @augments RenderTarget
*/
class CubeRenderTarget extends WebGLCubeRenderTarget {
class CubeRenderTarget extends RenderTarget {

@@ -34,3 +33,3 @@ /**

super( size, options );
super( size, size, options );

@@ -46,2 +45,23 @@ /**

const image = { width: size, height: size, depth: 1 };
const images = [ image, image, image, image, image, image ];
/**
* Overwritten with a different texture type.
*
* @type {DataArrayTexture}
*/
this.texture = new CubeTexture( images );
this._setTextureOptions( options );
// By convention -- likely based on the RenderMan spec from the 1990's -- cube maps are specified by WebGL (and three.js)
// in a coordinate system in which positive-x is to the right when looking up the positive-z axis -- in other words,
// in a left-handed coordinate system. By continuing this convention, preexisting cube maps continued to render correctly.
// three.js uses a right-handed coordinate system. So environment maps used in three.js appear to have px and nx swapped
// and the flag isRenderTargetTexture controls this conversion. The flip is not required when using WebGLCubeRenderTarget.texture
// as a cube texture (this is detected when isRenderTargetTexture is set to true for cube textures).
this.texture.isRenderTargetTexture = true;
}

@@ -106,4 +126,28 @@

/**
* Clears this cube render target.
*
* @param {Renderer} renderer - The renderer.
* @param {boolean} [color=true] - Whether the color buffer should be cleared or not.
* @param {boolean} [depth=true] - Whether the depth buffer should be cleared or not.
* @param {boolean} [stencil=true] - Whether the stencil buffer should be cleared or not.
*/
clear( renderer, color = true, depth = true, stencil = true ) {
const currentRenderTarget = renderer.getRenderTarget();
for ( let i = 0; i < 6; i ++ ) {
renderer.setRenderTarget( this, i );
renderer.clear( color, depth, stencil );
}
renderer.setRenderTarget( currentRenderTarget );
}
}
export default CubeRenderTarget;
import DataMap from './DataMap.js';
import { AttributeType } from './Constants.js';
import { arrayNeedsUint32 } from '../../utils.js';

@@ -22,2 +21,16 @@ import { Uint16BufferAttribute, Uint32BufferAttribute } from '../../core/BufferAttribute.js';

/**
* Returns the wireframe ID for the given geometry.
*
* @private
* @function
* @param {BufferGeometry} geometry - The geometry.
* @return {number} The ID.
*/
function getWireframeId( geometry ) {
return ( geometry.index !== null ) ? geometry.index.id : geometry.attributes.position.id;
}
/**
* Returns a wireframe index attribute for the given geometry.

@@ -67,4 +80,5 @@ *

const attribute = new ( arrayNeedsUint32( indices ) ? Uint32BufferAttribute : Uint16BufferAttribute )( indices, 1 );
const attribute = new ( geometryPosition.count >= 65535 ? Uint32BufferAttribute : Uint16BufferAttribute )( indices, 1 );
attribute.version = getWireframeVersion( geometry );
attribute.__id = getWireframeId( geometry );

@@ -352,3 +366,3 @@ return attribute;

} else if ( wireframeAttribute.version !== getWireframeVersion( geometry ) ) {
} else if ( wireframeAttribute.version !== getWireframeVersion( geometry ) || wireframeAttribute.__id !== getWireframeId( geometry ) ) {

@@ -355,0 +369,0 @@ this.attributes.delete( wireframeAttribute );

import { LightsNode } from '../../nodes/Nodes.js';
import ChainMap from './ChainMap.js';
const _defaultLights = /*@__PURE__*/ new LightsNode();
const _chainKeys = [];
const _weakMap = /*@__PURE__*/ new WeakMap();

@@ -15,16 +14,6 @@ /**

* @private
* @augments ChainMap
*/
class Lighting extends ChainMap {
class Lighting {
/**
* Constructs a lighting management component.
*/
constructor() {
super();
}
/**
* Creates a new lights node for the given array of lights.

@@ -48,3 +37,3 @@ *

*/
getNode( scene, camera ) {
getNode( scene ) {

@@ -55,16 +44,11 @@ // ignore post-processing

_chainKeys[ 0 ] = scene;
_chainKeys[ 1 ] = camera;
let node = _weakMap.get( scene );
let node = this.get( _chainKeys );
if ( node === undefined ) {
node = this.createNode();
this.set( _chainKeys, node );
_weakMap.set( scene, node );
}
_chainKeys.length = 0;
return node;

@@ -71,0 +55,0 @@

@@ -129,3 +129,3 @@ import BindGroup from '../BindGroup.js';

const bindingsGroup = new BindGroup( instanceGroup.name, [], instanceGroup.index, instanceGroup.bindingsReference );
const bindingsGroup = new BindGroup( instanceGroup.name, [], instanceGroup.index );
bindings.push( bindingsGroup );

@@ -132,0 +132,0 @@

@@ -49,9 +49,20 @@ import StorageBuffer from '../StorageBuffer.js';

/**
* The storage buffer attribute node.
*
* @type {StorageBufferAttribute}
*/
get attribute() {
return this.nodeUniform.value;
}
/**
* The storage buffer.
*
* @type {BufferAttribute}
* @type {Float32Array}
*/
get buffer() {
return this.nodeUniform.value;
return this.nodeUniform.value.array;

@@ -58,0 +69,0 @@ }

import DataMap from './DataMap.js';
import RenderPipeline from './RenderPipeline.js';
import RenderObjectPipeline from './RenderObjectPipeline.js';
import ComputePipeline from './ComputePipeline.js';

@@ -149,3 +149,3 @@ import ProgrammableStage from './ProgrammableStage.js';

* @param {?Array<Promise>} [promises=null] - An array of compilation promises which is only relevant in context of `Renderer.compileAsync()`.
* @return {RenderPipeline} The render pipeline.
* @return {RenderObjectPipeline} The render pipeline.
*/

@@ -348,3 +348,3 @@ getForRender( renderObject, promises = null ) {

* @param {?Array<Promise>} promises - An array of compilation promises which is only relevant in context of `Renderer.compileAsync()`.
* @return {ComputePipeline} The compute pipeline.
* @return {RenderObjectPipeline} The render pipeline.
*/

@@ -361,3 +361,3 @@ _getRenderPipeline( renderObject, stageVertex, stageFragment, cacheKey, promises ) {

pipeline = new RenderPipeline( cacheKey, stageVertex, stageFragment );
pipeline = new RenderObjectPipeline( cacheKey, stageVertex, stageFragment );

@@ -364,0 +364,0 @@ this.caches.set( cacheKey, pipeline );

@@ -1,23 +0,10 @@

import NodeMaterial from '../../materials/nodes/NodeMaterial.js';
import { ColorManagement } from '../../math/ColorManagement.js';
import { vec4, renderOutput } from '../../nodes/TSL.js';
import { NoToneMapping } from '../../constants.js';
import QuadMesh from '../../renderers/common/QuadMesh.js';
import RenderPipeline from './RenderPipeline.js';
import { warnOnce } from '../../utils.js';
/**
* This module is responsible to manage the post processing setups in apps.
* You usually create a single instance of this class and use it to define
* the output of your post processing effect chain.
* ```js
* const postProcessing = new PostProcessing( renderer );
* @deprecated since r183. Use {@link RenderPipeline} instead. PostProcessing has been renamed to RenderPipeline.
*
* const scenePass = pass( scene, camera );
*
* postProcessing.outputNode = scenePass;
* ```
*
* Note: This module can only be used with `WebGPURenderer`.
* This class is a wrapper for backward compatibility and will be removed in a future version.
*/
class PostProcessing {
class PostProcessing extends RenderPipeline {

@@ -29,199 +16,14 @@ /**

* @param {Node<vec4>} outputNode - An optional output node.
* @deprecated since r183. Use {@link RenderPipeline} instead.
*/
constructor( renderer, outputNode = vec4( 0, 0, 1, 1 ) ) {
constructor( renderer, outputNode ) {
/**
* A reference to the renderer.
*
* @type {Renderer}
*/
this.renderer = renderer;
warnOnce( 'PostProcessing: "PostProcessing" has been renamed to "RenderPipeline". Please update your code to use "THREE.RenderPipeline" instead.' ); // @deprecated, r183
/**
* A node which defines the final output of the post
* processing. This is usually the last node in a chain
* of effect nodes.
*
* @type {Node<vec4>}
*/
this.outputNode = outputNode;
super( renderer, outputNode );
/**
* Whether the default output tone mapping and color
* space transformation should be enabled or not.
*
* It is enabled by default by it must be disabled when
* effects must be executed after tone mapping and color
* space conversion. A typical example is FXAA which
* requires sRGB input.
*
* When set to `false`, the app must control the output
* transformation with `RenderOutputNode`.
*
* ```js
* const outputPass = renderOutput( scenePass );
* ```
*
* @type {boolean}
*/
this.outputColorTransform = true;
/**
* Must be set to `true` when the output node changes.
*
* @type {Node<vec4>}
*/
this.needsUpdate = true;
const material = new NodeMaterial();
material.name = 'PostProcessing';
/**
* The full screen quad that is used to render
* the effects.
*
* @private
* @type {QuadMesh}
*/
this._quadMesh = new QuadMesh( material );
this._quadMesh.name = 'Post-Processing';
/**
* The context of the post processing stack.
*
* @private
* @type {?Object}
* @default null
*/
this._context = null;
}
/**
* When `PostProcessing` is used to apply post processing effects,
* the application must use this version of `render()` inside
* its animation loop (not the one from the renderer).
*/
render() {
const renderer = this.renderer;
this._update();
if ( this._context.onBeforePostProcessing !== null ) this._context.onBeforePostProcessing();
const toneMapping = renderer.toneMapping;
const outputColorSpace = renderer.outputColorSpace;
renderer.toneMapping = NoToneMapping;
renderer.outputColorSpace = ColorManagement.workingColorSpace;
//
const currentXR = renderer.xr.enabled;
renderer.xr.enabled = false;
this._quadMesh.render( renderer );
renderer.xr.enabled = currentXR;
//
renderer.toneMapping = toneMapping;
renderer.outputColorSpace = outputColorSpace;
if ( this._context.onAfterPostProcessing !== null ) this._context.onAfterPostProcessing();
}
/**
* Returns the current context of the post processing stack.
*
* @readonly
* @type {?Object}
*/
get context() {
return this._context;
}
/**
* Frees internal resources.
*/
dispose() {
this._quadMesh.material.dispose();
}
/**
* Updates the state of the module.
*
* @private
*/
_update() {
if ( this.needsUpdate === true ) {
const renderer = this.renderer;
const toneMapping = renderer.toneMapping;
const outputColorSpace = renderer.outputColorSpace;
const context = {
postProcessing: this,
onBeforePostProcessing: null,
onAfterPostProcessing: null
};
let outputNode = this.outputNode;
if ( this.outputColorTransform === true ) {
outputNode = outputNode.context( context );
outputNode = renderOutput( outputNode, toneMapping, outputColorSpace );
} else {
context.toneMapping = toneMapping;
context.outputColorSpace = outputColorSpace;
outputNode = outputNode.context( context );
}
this._context = context;
this._quadMesh.material.fragmentNode = outputNode;
this._quadMesh.material.needsUpdate = true;
this.needsUpdate = false;
}
}
/**
* When `PostProcessing` is used to apply post processing effects,
* the application must use this version of `renderAsync()` inside
* its animation loop (not the one from the renderer).
*
* @async
* @deprecated
* @return {Promise} A Promise that resolves when the render has been finished.
*/
async renderAsync() {
warnOnce( 'PostProcessing: "renderAsync()" has been deprecated. Use "render()" and "await renderer.init();" when creating the renderer.' ); // @deprecated r181
await this.renderer.init();
this.render();
}
}
export default PostProcessing;

@@ -50,3 +50,4 @@ import ChainMap from './ChainMap.js';

_chainKeys.length = 0;
_chainKeys[ 0 ] = null;
_chainKeys[ 1 ] = null;

@@ -53,0 +54,0 @@ return bundle;

@@ -29,2 +29,10 @@ import { Vector4 } from '../../math/Vector4.js';

/**
* The MRT configuration.
*
* @type {?MRTNode}
* @default null
*/
this.mrt = null;
/**
* Whether the current active framebuffer has a color attachment.

@@ -219,2 +227,10 @@ *

/**
* The current camera.
*
* @type {?Camera}
* @default null
*/
this.camera = null;
/**
* This flag can be used for type testing.

@@ -221,0 +237,0 @@ *

@@ -1,10 +0,3 @@

import ChainMap from './ChainMap.js';
import RenderContext from './RenderContext.js';
import { Scene } from '../../scenes/Scene.js';
import { Camera } from '../../cameras/Camera.js';
const _chainKeys = [];
const _defaultScene = /*@__PURE__*/ new Scene();
const _defaultCamera = /*@__PURE__*/ new Camera();
/**

@@ -19,13 +12,21 @@ * This module manages the render contexts of the renderer.

* Constructs a new render context management component.
*
* @param {Renderer} renderer - The renderer.
*/
constructor() {
constructor( renderer ) {
/**
* A dictionary that manages render contexts in chain maps
* for each attachment state.
* The renderer.
*
* @type {Object<string,ChainMap>}
* @type {Renderer}
*/
this.chainMaps = {};
this.renderer = renderer;
/**
* A dictionary that manages render contexts.
*
* @type {Object<string,RenderContext>}
*/
this._renderContexts = {};
}

@@ -36,19 +37,11 @@

*
* @param {Scene} scene - The scene.
* @param {Camera} camera - The camera that is used to render the scene.
* @param {?RenderTarget} [renderTarget=null] - The active render target.
* @param {?MRT} [mrt=null] - The active multiple render target.
* @param {?MRTNode} [mrt=null] - The MRT configuration
* @param {?number} [callDepth=0] - The call depth of the renderer.
* @return {RenderContext} The render context.
*/
get( scene, camera, renderTarget = null, mrt = null ) {
get( renderTarget = null, mrt = null, callDepth = 0 ) {
_chainKeys[ 0 ] = scene;
_chainKeys[ 1 ] = camera;
//
if ( mrt !== null ) {
_chainKeys[ 2 ] = mrt;
}
let attachmentState;

@@ -63,24 +56,33 @@

const format = renderTarget.texture.format;
const type = renderTarget.texture.type;
const count = renderTarget.textures.length;
attachmentState = `${ count }:${ format }:${ renderTarget.samples }:${ renderTarget.depthBuffer }:${ renderTarget.stencilBuffer }`;
attachmentState = `${ count }:${ format }:${ type }:${ renderTarget.samples }:${ renderTarget.depthBuffer }:${ renderTarget.stencilBuffer }`;
}
const chainMap = this._getChainMap( attachmentState );
//
let renderState = chainMap.get( _chainKeys );
const mrtState = ( mrt !== null ) ? mrt.id : 'default';
//
const renderStateKey = attachmentState + '-' + mrtState + '-' + callDepth;
let renderState = this._renderContexts[ renderStateKey ];
if ( renderState === undefined ) {
renderState = new RenderContext();
renderState.mrt = mrt;
chainMap.set( _chainKeys, renderState );
this._renderContexts[ renderStateKey ] = renderState;
}
_chainKeys.length = 0;
if ( renderTarget !== null ) renderState.sampleCount = renderTarget.samples === 0 ? 1 : renderTarget.samples;
renderState.clearDepthValue = this.renderer.getClearDepth();
renderState.clearStencilValue = this.renderer.getClearStencil();
return renderState;

@@ -91,27 +93,2 @@

/**
* Returns a render context intended for clear operations.
*
* @param {?RenderTarget} [renderTarget=null] - The active render target.
* @return {RenderContext} The render context.
*/
getForClear( renderTarget = null ) {
return this.get( _defaultScene, _defaultCamera, renderTarget );
}
/**
* Returns a chain map for the given attachment state.
*
* @private
* @param {string} attachmentState - The attachment state.
* @return {ChainMap} The chain map.
*/
_getChainMap( attachmentState ) {
return this.chainMaps[ attachmentState ] || ( this.chainMaps[ attachmentState ] = new ChainMap() );
}
/**
* Frees internal resources.

@@ -121,3 +98,3 @@ */

this.chainMaps = {};
this._renderContexts = {};

@@ -124,0 +101,0 @@ }

@@ -60,3 +60,4 @@ import ChainMap from './ChainMap.js';

_chainKeys.length = 0;
_chainKeys[ 0 ] = null;
_chainKeys[ 1 ] = null;

@@ -63,0 +64,0 @@ return list;

@@ -514,5 +514,4 @@ import { hash, hashString } from '../../nodes/core/NodeUtils.js';

attribute = geometry.getAttribute( nodeAttribute.name );
attributesId[ nodeAttribute.name ] = attribute.id;
attributesId[ nodeAttribute.name ] = attribute.version;
}

@@ -886,3 +885,3 @@

cacheKey = hash( cacheKey, this.camera.id, this.renderer.contextNode.id, this.renderer.contextNode.version );
cacheKey = hash( cacheKey, this.renderer.contextNode.id, this.renderer.contextNode.version );

@@ -889,0 +888,0 @@ return cacheKey;

@@ -94,3 +94,4 @@ import ChainMap from './ChainMap.js';

// reuse chainArray
// set chain keys
_chainKeys[ 0 ] = object;

@@ -101,2 +102,4 @@ _chainKeys[ 1 ] = material;

//
let renderObject = chainMap.get( _chainKeys );

@@ -112,2 +115,8 @@

// update references
renderObject.camera = camera;
//
renderObject.updateClipping( clippingContext );

@@ -139,4 +148,11 @@

_chainKeys.length = 0;
// reset chain array
_chainKeys[ 0 ] = null;
_chainKeys[ 1 ] = null;
_chainKeys[ 2 ] = null;
_chainKeys[ 3 ] = null;
//
return renderObject;

@@ -143,0 +159,0 @@

@@ -1,40 +0,226 @@

import Pipeline from './Pipeline.js';
import NodeMaterial from '../../materials/nodes/NodeMaterial.js';
import { ColorManagement } from '../../math/ColorManagement.js';
import { vec4, renderOutput } from '../../nodes/TSL.js';
import { NoToneMapping } from '../../constants.js';
import QuadMesh from '../../renderers/common/QuadMesh.js';
import { warnOnce } from '../../utils.js';
/**
* Class for representing render pipelines.
* This module is responsible to manage the rendering pipeline setups in apps.
* You usually create a single instance of this class and use it to define
* the output of your render pipeline and post processing effect chain.
* ```js
* const renderPipeline = new RenderPipeline( renderer );
*
* @private
* @augments Pipeline
* const scenePass = pass( scene, camera );
*
* renderPipeline.outputNode = scenePass;
* ```
*
* Note: This module can only be used with `WebGPURenderer`.
*/
class RenderPipeline extends Pipeline {
class RenderPipeline {
/**
* Constructs a new render pipeline.
* Constructs a new render pipeline management module.
*
* @param {string} cacheKey - The pipeline's cache key.
* @param {ProgrammableStage} vertexProgram - The pipeline's vertex shader.
* @param {ProgrammableStage} fragmentProgram - The pipeline's fragment shader.
* @param {Renderer} renderer - A reference to the renderer.
* @param {Node<vec4>} outputNode - An optional output node.
*/
constructor( cacheKey, vertexProgram, fragmentProgram ) {
constructor( renderer, outputNode = vec4( 0, 0, 1, 1 ) ) {
super( cacheKey );
/**
* A reference to the renderer.
*
* @type {Renderer}
*/
this.renderer = renderer;
/**
* The pipeline's vertex shader.
* A node which defines the final output of the rendering
* pipeline. This is usually the last node in a chain
* of effect nodes.
*
* @type {ProgrammableStage}
* @type {Node<vec4>}
*/
this.vertexProgram = vertexProgram;
this.outputNode = outputNode;
/**
* The pipeline's fragment shader.
* Whether the default output tone mapping and color
* space transformation should be enabled or not.
*
* @type {ProgrammableStage}
* It is enabled by default by it must be disabled when
* effects must be executed after tone mapping and color
* space conversion. A typical example is FXAA which
* requires sRGB input.
*
* When set to `false`, the app must control the output
* transformation with `RenderOutputNode`.
*
* ```js
* const outputPass = renderOutput( scenePass );
* ```
*
* @type {boolean}
*/
this.fragmentProgram = fragmentProgram;
this.outputColorTransform = true;
/**
* Must be set to `true` when the output node changes.
*
* @type {Node<vec4>}
*/
this.needsUpdate = true;
const material = new NodeMaterial();
material.name = 'RenderPipeline';
/**
* The full screen quad that is used to render
* the effects.
*
* @private
* @type {QuadMesh}
*/
this._quadMesh = new QuadMesh( material );
this._quadMesh.name = 'Render Pipeline';
/**
* The context of the render pipeline stack.
*
* @private
* @type {?Object}
* @default null
*/
this._context = null;
}
/**
* When `RenderPipeline` is used to apply rendering pipeline and post processing effects,
* the application must use this version of `render()` inside
* its animation loop (not the one from the renderer).
*/
render() {
const renderer = this.renderer;
this._update();
if ( this._context.onBeforeRenderPipeline !== null ) this._context.onBeforeRenderPipeline();
const toneMapping = renderer.toneMapping;
const outputColorSpace = renderer.outputColorSpace;
renderer.toneMapping = NoToneMapping;
renderer.outputColorSpace = ColorManagement.workingColorSpace;
//
const currentXR = renderer.xr.enabled;
renderer.xr.enabled = false;
this._quadMesh.render( renderer );
renderer.xr.enabled = currentXR;
//
renderer.toneMapping = toneMapping;
renderer.outputColorSpace = outputColorSpace;
if ( this._context.onAfterRenderPipeline !== null ) this._context.onAfterRenderPipeline();
}
/**
* Returns the current context of the render pipeline stack.
*
* @readonly
* @type {?Object}
*/
get context() {
return this._context;
}
/**
* Frees internal resources.
*/
dispose() {
this._quadMesh.material.dispose();
}
/**
* Updates the state of the module.
*
* @private
*/
_update() {
if ( this.needsUpdate === true ) {
const renderer = this.renderer;
const toneMapping = renderer.toneMapping;
const outputColorSpace = renderer.outputColorSpace;
const context = {
renderPipeline: this,
onBeforeRenderPipeline: null,
onAfterRenderPipeline: null
};
let outputNode = this.outputNode;
if ( this.outputColorTransform === true ) {
outputNode = outputNode.context( context );
outputNode = renderOutput( outputNode, toneMapping, outputColorSpace );
} else {
context.toneMapping = toneMapping;
context.outputColorSpace = outputColorSpace;
outputNode = outputNode.context( context );
}
this._context = context;
this._quadMesh.material.fragmentNode = outputNode;
this._quadMesh.material.needsUpdate = true;
this.needsUpdate = false;
}
}
/**
* When `RenderPipeline` is used to apply rendering pipeline and post processing effects,
* the application must use this version of `renderAsync()` inside
* its animation loop (not the one from the renderer).
*
* @async
* @deprecated
* @return {Promise} A Promise that resolves when the render has been finished.
*/
async renderAsync() {
warnOnce( 'RenderPipeline: "renderAsync()" has been deprecated. Use "render()" and "await renderer.init();" when creating the renderer.' ); // @deprecated r181
await this.renderer.init();
this.render();
}
}
export default RenderPipeline;

@@ -24,5 +24,6 @@ import Buffer from './Buffer.js';

*
* @private
* @type {BufferAttribute}
*/
this.attribute = attribute;
this._attribute = attribute;

@@ -40,4 +41,15 @@ /**

/**
* The storage buffer attribute.
*
* @type {BufferAttribute}
*/
get attribute() {
return this._attribute;
}
}
export default StorageBuffer;

@@ -321,2 +321,3 @@ import DataMap from './DataMap.js';

textureData.generation = texture.version;
textureData.bindGroups = new Set();

@@ -535,2 +536,17 @@ //

// delete cached bind groups so they don't point to destroyed textures
if ( textureData.bindGroups ) {
for ( const bindGroup of textureData.bindGroups ) {
const bindingsData = this.backend.get( bindGroup );
bindingsData.groups = undefined;
bindingsData.versions = undefined;
}
}
this.delete( texture );

@@ -537,0 +553,0 @@

@@ -57,3 +57,3 @@ import { warn } from '../../utils.js';

/**
* TODO
* The total frame duration until the next update.
*

@@ -73,5 +73,7 @@ * @type {number}

/**
* TODO
* This property is used to avoid multiple concurrent resolve operations.
* The WebGL backend uses it as a boolean flag. In context of WebGPU, it holds
* the promise of the current resolve operation.
*
* @type {boolean}
* @type {boolean|Promise<number>}
* @default false

@@ -78,0 +80,0 @@ */

@@ -64,2 +64,10 @@ import { Color } from '../../math/Color.js';

/**
* This property is set by {@link UniformsGroup} and marks
* the index position in the uniform array.
*
* @type {number}
*/
this.index = - 1;
}

@@ -66,0 +74,0 @@

@@ -50,5 +50,50 @@ import UniformBuffer from './UniformBuffer.js';

/**
* A cache for the uniform update ranges.
*
* @private
* @type {Map<number, {start: number, count: number}>}
*/
this._updateRangeCache = new Map();
}
/**
* Adds a uniform's update range to this buffer.
*
* @param {Uniform} uniform - The uniform.
*/
addUniformUpdateRange( uniform ) {
const index = uniform.index;
if ( this._updateRangeCache.has( index ) !== true ) {
const updateRanges = this.updateRanges;
const start = uniform.offset;
const count = uniform.itemSize;
const range = { start, count };
updateRanges.push( range );
this._updateRangeCache.set( index, range );
}
}
/**
* Clears all update ranges of this buffer.
*/
clearUpdateRanges() {
this._updateRangeCache.clear();
super.clearUpdateRanges();
}
/**
* Adds a uniform to this group.

@@ -160,2 +205,3 @@ *

uniform.offset = offset / bytesPerElement;
uniform.index = i;

@@ -240,2 +286,4 @@ offset += itemSize;

this.addUniformUpdateRange( uniform );
}

@@ -271,2 +319,4 @@

this.addUniformUpdateRange( uniform );
}

@@ -303,2 +353,4 @@

this.addUniformUpdateRange( uniform );
}

@@ -336,2 +388,4 @@

this.addUniformUpdateRange( uniform );
}

@@ -367,2 +421,4 @@

this.addUniformUpdateRange( uniform );
}

@@ -406,2 +462,4 @@

this.addUniformUpdateRange( uniform );
}

@@ -434,2 +492,4 @@

this.addUniformUpdateRange( uniform );
}

@@ -436,0 +496,0 @@

@@ -1134,4 +1134,4 @@ import { ArrayCamera } from '../../cameras/ArrayCamera.js';

cameraXR.layers.mask = camera.layers.mask | 0b110;
cameraL.layers.mask = cameraXR.layers.mask & 0b011;
cameraR.layers.mask = cameraXR.layers.mask & 0b101;
cameraL.layers.mask = cameraXR.layers.mask & ~ 0b100;
cameraR.layers.mask = cameraXR.layers.mask & ~ 0b010;

@@ -1138,0 +1138,0 @@

@@ -38,3 +38,3 @@ export default /* glsl */`

uniform sampler2D batchingColorTexture;
vec3 getBatchingColor( const in float i ) {
vec4 getBatchingColor( const in float i ) {

@@ -45,3 +45,3 @@ int size = textureSize( batchingColorTexture, 0 ).x;

int y = j / size;
return texelFetch( batchingColorTexture, ivec2( x, y ), 0 ).rgb;
return texelFetch( batchingColorTexture, ivec2( x, y ), 0 );

@@ -48,0 +48,0 @@ }

export default /* glsl */`
#if defined( USE_COLOR_ALPHA )
#if defined( USE_COLOR ) || defined( USE_COLOR_ALPHA )
diffuseColor *= vColor;
#elif defined( USE_COLOR )
diffuseColor.rgb *= vColor;
#endif
`;
export default /* glsl */`
#if defined( USE_COLOR_ALPHA )
#if defined( USE_COLOR ) || defined( USE_COLOR_ALPHA )
varying vec4 vColor;
#elif defined( USE_COLOR )
varying vec3 vColor;
#endif
`;
export default /* glsl */`
#if defined( USE_COLOR_ALPHA )
#if defined( USE_COLOR ) || defined( USE_COLOR_ALPHA ) || defined( USE_INSTANCING_COLOR ) || defined( USE_BATCHING_COLOR )
varying vec4 vColor;
#elif defined( USE_COLOR ) || defined( USE_INSTANCING_COLOR ) || defined( USE_BATCHING_COLOR )
varying vec3 vColor;
#endif
`;
export default /* glsl */`
#if defined( USE_COLOR_ALPHA )
#if defined( USE_COLOR ) || defined( USE_COLOR_ALPHA ) || defined( USE_INSTANCING_COLOR ) || defined( USE_BATCHING_COLOR )
vColor = vec4( 1.0 );
#elif defined( USE_COLOR ) || defined( USE_INSTANCING_COLOR ) || defined( USE_BATCHING_COLOR )
vColor = vec3( 1.0 );
#endif
#ifdef USE_COLOR
#ifdef USE_COLOR_ALPHA
vColor *= color;
#elif defined( USE_COLOR )
vColor.rgb *= color;
#endif

@@ -20,3 +20,3 @@

vColor.xyz *= instanceColor.xyz;
vColor.rgb *= instanceColor.rgb;

@@ -27,7 +27,5 @@ #endif

vec3 batchingColor = getBatchingColor( getIndirectIndex( gl_DrawID ) );
vColor *= getBatchingColor( getIndirectIndex( gl_DrawID ) );
vColor.xyz *= batchingColor.xyz;
#endif
`;

@@ -41,20 +41,16 @@ export default /* glsl */`

#else
#ifdef ENVMAP_BLENDING_MULTIPLY
vec4 envColor = vec4( 0.0 );
outgoingLight = mix( outgoingLight, outgoingLight * envColor.xyz, specularStrength * reflectivity );
#endif
#elif defined( ENVMAP_BLENDING_MIX )
#ifdef ENVMAP_BLENDING_MULTIPLY
outgoingLight = mix( outgoingLight, envColor.xyz, specularStrength * reflectivity );
outgoingLight = mix( outgoingLight, outgoingLight * envColor.xyz, specularStrength * reflectivity );
#elif defined( ENVMAP_BLENDING_ADD )
#elif defined( ENVMAP_BLENDING_MIX )
outgoingLight += envColor.xyz * specularStrength * reflectivity;
outgoingLight = mix( outgoingLight, envColor.xyz, specularStrength * reflectivity );
#endif
#elif defined( ENVMAP_BLENDING_ADD )
outgoingLight += envColor.xyz * specularStrength * reflectivity;
#endif

@@ -61,0 +57,0 @@

export default /* glsl */`
#if defined( RE_IndirectDiffuse )
#if defined( LAMBERT ) || defined( PHONG )
irradiance += iblIrradiance;
#endif
RE_IndirectDiffuse( irradiance, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );

@@ -5,0 +11,0 @@

@@ -13,6 +13,10 @@ export default /* glsl */`

#if defined( USE_ENVMAP ) && defined( STANDARD ) && defined( ENVMAP_TYPE_CUBE_UV )
#if defined( USE_ENVMAP ) && defined( ENVMAP_TYPE_CUBE_UV )
iblIrradiance += getIBLIrradiance( geometryNormal );
#if defined( STANDARD ) || defined( LAMBERT ) || defined( PHONG )
iblIrradiance += getIBLIrradiance( geometryNormal );
#endif
#endif

@@ -19,0 +23,0 @@

@@ -497,3 +497,3 @@ export default /* glsl */`

// http://blog.selfshadow.com/publications/s2016-advances/s2016_ltc_fresnel.pdf
vec3 fresnel = ( material.specularColorBlended * t2.x + ( vec3( 1.0 ) - material.specularColorBlended ) * t2.y );
vec3 fresnel = ( material.specularColorBlended * t2.x + ( material.specularF90 - material.specularColorBlended ) * t2.y );

@@ -504,2 +504,24 @@ reflectedLight.directSpecular += lightColor * fresnel * LTC_Evaluate( normal, viewDir, position, mInv, rectCoords );

#ifdef USE_CLEARCOAT
vec3 Ncc = geometryClearcoatNormal;
vec2 uvClearcoat = LTC_Uv( Ncc, viewDir, material.clearcoatRoughness );
vec4 t1Clearcoat = texture2D( ltc_1, uvClearcoat );
vec4 t2Clearcoat = texture2D( ltc_2, uvClearcoat );
mat3 mInvClearcoat = mat3(
vec3( t1Clearcoat.x, 0, t1Clearcoat.y ),
vec3( 0, 1, 0 ),
vec3( t1Clearcoat.z, 0, t1Clearcoat.w )
);
// LTC Fresnel Approximation for clearcoat
vec3 fresnelClearcoat = material.clearcoatF0 * t2Clearcoat.x + ( material.clearcoatF90 - material.clearcoatF0 ) * t2Clearcoat.y;
clearcoatSpecularDirect += lightColor * fresnelClearcoat * LTC_Evaluate( Ncc, viewDir, position, mInvClearcoat, rectCoords );
#endif
}

@@ -506,0 +528,0 @@

@@ -85,4 +85,12 @@ export default /* glsl */`

float orthographicDepthToViewZ( const in float depth, const in float near, const in float far ) {
// maps orthographic depth in [ 0, 1 ] to viewZ
return depth * ( near - far ) - near;
#ifdef USE_REVERSED_DEPTH_BUFFER
return depth * ( far - near ) - far;
#else
return depth * ( near - far ) - near;
#endif
}

@@ -98,5 +106,13 @@

float perspectiveDepthToViewZ( const in float depth, const in float near, const in float far ) {
// maps perspective depth in [ 0, 1 ] to viewZ
return ( near * far ) / ( ( far - near ) * depth - far );
#ifdef USE_REVERSED_DEPTH_BUFFER
return ( near * far ) / ( ( near - far ) * depth - near );
#else
return ( near * far ) / ( ( far - near ) * depth - far );
#endif
}
`;

@@ -135,3 +135,3 @@ export default /* glsl */`

// Use IGN to rotate sampling pattern per pixel
float phi = interleavedGradientNoise( gl_FragCoord.xy ) * 6.28318530718; // 2*PI
float phi = interleavedGradientNoise( gl_FragCoord.xy ) * PI2;

@@ -159,4 +159,13 @@ shadow = (

shadowCoord.xyz /= shadowCoord.w;
shadowCoord.z += shadowBias;
#ifdef USE_REVERSED_DEPTH_BUFFER
shadowCoord.z -= shadowBias;
#else
shadowCoord.z += shadowBias;
#endif
bool inFrustum = shadowCoord.x >= 0.0 && shadowCoord.x <= 1.0 && shadowCoord.y >= 0.0 && shadowCoord.y <= 1.0;

@@ -181,3 +190,3 @@ bool frustumTest = inFrustum && shadowCoord.z <= 1.0;

#endif
// Early return if fully lit

@@ -219,4 +228,13 @@ if ( hard_shadow == 1.0 ) {

shadowCoord.xyz /= shadowCoord.w;
shadowCoord.z += shadowBias;
#ifdef USE_REVERSED_DEPTH_BUFFER
shadowCoord.z -= shadowBias;
#else
shadowCoord.z += shadowBias;
#endif
bool inFrustum = shadowCoord.x >= 0.0 && shadowCoord.x <= 1.0 && shadowCoord.y >= 0.0 && shadowCoord.y <= 1.0;

@@ -269,7 +287,16 @@ bool frustumTest = inFrustum && shadowCoord.z <= 1.0;

// Calculate perspective depth for cube shadow map
// Standard perspective depth formula: depth = (far * (z - near)) / (z * (far - near))
float dp = ( shadowCameraFar * ( viewSpaceZ - shadowCameraNear ) ) / ( viewSpaceZ * ( shadowCameraFar - shadowCameraNear ) );
dp += shadowBias;
// viewZ to perspective depth
#ifdef USE_REVERSED_DEPTH_BUFFER
float dp = ( shadowCameraNear * ( shadowCameraFar - viewSpaceZ ) ) / ( viewSpaceZ * ( shadowCameraFar - shadowCameraNear ) );
dp -= shadowBias;
#else
float dp = ( shadowCameraFar * ( viewSpaceZ - shadowCameraNear ) ) / ( viewSpaceZ * ( shadowCameraFar - shadowCameraNear ) );
dp += shadowBias;
#endif
// Hardware PCF with LinearFilter gives us 4-tap filtering per sample

@@ -286,10 +313,16 @@ // Use Vogel disk + IGN sampling for better quality

// Use IGN to rotate sampling pattern per pixel
float phi = interleavedGradientNoise( gl_FragCoord.xy ) * 6.28318530718;
float phi = interleavedGradientNoise( gl_FragCoord.xy ) * PI2;
vec2 sample0 = vogelDiskSample( 0, 5, phi );
vec2 sample1 = vogelDiskSample( 1, 5, phi );
vec2 sample2 = vogelDiskSample( 2, 5, phi );
vec2 sample3 = vogelDiskSample( 3, 5, phi );
vec2 sample4 = vogelDiskSample( 4, 5, phi );
shadow = (
texture( shadowMap, vec4( bd3D + ( tangent * vogelDiskSample( 0, 5, phi ).x + bitangent * vogelDiskSample( 0, 5, phi ).y ) * texelSize, dp ) ) +
texture( shadowMap, vec4( bd3D + ( tangent * vogelDiskSample( 1, 5, phi ).x + bitangent * vogelDiskSample( 1, 5, phi ).y ) * texelSize, dp ) ) +
texture( shadowMap, vec4( bd3D + ( tangent * vogelDiskSample( 2, 5, phi ).x + bitangent * vogelDiskSample( 2, 5, phi ).y ) * texelSize, dp ) ) +
texture( shadowMap, vec4( bd3D + ( tangent * vogelDiskSample( 3, 5, phi ).x + bitangent * vogelDiskSample( 3, 5, phi ).y ) * texelSize, dp ) ) +
texture( shadowMap, vec4( bd3D + ( tangent * vogelDiskSample( 4, 5, phi ).x + bitangent * vogelDiskSample( 4, 5, phi ).y ) * texelSize, dp ) )
texture( shadowMap, vec4( bd3D + ( tangent * sample0.x + bitangent * sample0.y ) * texelSize, dp ) ) +
texture( shadowMap, vec4( bd3D + ( tangent * sample1.x + bitangent * sample1.y ) * texelSize, dp ) ) +
texture( shadowMap, vec4( bd3D + ( tangent * sample2.x + bitangent * sample2.y ) * texelSize, dp ) ) +
texture( shadowMap, vec4( bd3D + ( tangent * sample3.x + bitangent * sample3.y ) * texelSize, dp ) ) +
texture( shadowMap, vec4( bd3D + ( tangent * sample4.x + bitangent * sample4.y ) * texelSize, dp ) )
) * 0.2;

@@ -313,5 +346,2 @@

// Direction from light to fragment
vec3 bd3D = normalize( lightToPosition );
// For cube shadow maps, depth is stored as distance along each face's view axis, not radial distance

@@ -324,7 +354,10 @@ // The view-space depth is the maximum component of the direction vector (which face is sampled)

// Calculate perspective depth for cube shadow map
// Standard perspective depth formula: depth = (far * (z - near)) / (z * (far - near))
// viewZ to perspective depth
float dp = ( shadowCameraFar * ( viewSpaceZ - shadowCameraNear ) ) / ( viewSpaceZ * ( shadowCameraFar - shadowCameraNear ) );
dp += shadowBias;
// Direction from light to fragment
vec3 bd3D = normalize( lightToPosition );
float depth = textureCube( shadowMap, bd3D ).r;

@@ -334,10 +367,8 @@

shadow = step( depth, dp );
depth = 1.0 - depth;
#else
#endif
shadow = step( dp, depth );
shadow = step( dp, depth );
#endif
}

@@ -344,0 +375,0 @@

@@ -42,3 +42,4 @@ import { ShaderChunk } from './ShaderChunk.js';

{
emissive: { value: /*@__PURE__*/ new Color( 0x000000 ) }
emissive: { value: /*@__PURE__*/ new Color( 0x000000 ) },
envMapIntensity: { value: 1 }
}

@@ -69,3 +70,4 @@ ] ),

specular: { value: /*@__PURE__*/ new Color( 0x111111 ) },
shininess: { value: 30 }
shininess: { value: 30 },
envMapIntensity: { value: 1 }
}

@@ -72,0 +74,0 @@ ] ),

@@ -71,4 +71,6 @@ export const vertex = /* glsl */`

#include <emissivemap_pars_fragment>
#include <cube_uv_reflection_fragment>
#include <envmap_common_pars_fragment>
#include <envmap_pars_fragment>
#include <envmap_physical_pars_fragment>
#include <fog_pars_fragment>

@@ -75,0 +77,0 @@ #include <bsdfs>

@@ -73,4 +73,6 @@ export const vertex = /* glsl */`

#include <emissivemap_pars_fragment>
#include <cube_uv_reflection_fragment>
#include <envmap_common_pars_fragment>
#include <envmap_pars_fragment>
#include <envmap_physical_pars_fragment>
#include <fog_pars_fragment>

@@ -77,0 +79,0 @@ #include <bsdfs>

@@ -55,4 +55,5 @@ export const vertex = /* glsl */`

#include <fog_fragment>
#include <premultiplied_alpha_fragment>
}
`;

@@ -138,3 +138,2 @@ import { Color } from '../../math/Color.js';

directionalShadowMap: { value: [] },
directionalShadowMatrix: { value: [] },

@@ -161,3 +160,2 @@

spotLightMap: { value: [] },
spotShadowMap: { value: [] },
spotLightMatrix: { value: [] },

@@ -182,3 +180,2 @@

pointShadowMap: { value: [] },
pointShadowMatrix: { value: [] },

@@ -185,0 +182,0 @@

@@ -72,5 +72,5 @@ import { GLSLNodeParser, NodeBuilder, TextureNode, vectorComponents, CodeNode } from '../../../nodes/Nodes.js';

precision lowp sampler2DShadow;
precision lowp sampler2DArrayShadow;
precision lowp samplerCubeShadow;
precision highp sampler2DShadow;
precision highp sampler2DArrayShadow;
precision highp samplerCubeShadow;
`;

@@ -364,3 +364,3 @@

return shaderStage.charAt( 0 ) + '_' + node.name;
return node.name;

@@ -550,7 +550,10 @@ }

* @param {string} levelSnippet - A GLSL snippet that represents the mip level, with level 0 containing a full size version of the texture.
* @param {?string} depthSnippet - A GLSL snippet that represents 0-based texture array index to sample.
* @param {?string} offsetSnippet - A GLSL snippet that represents the offset that will be applied to the unnormalized texture coordinate before sampling the texture.
* @return {string} The GLSL snippet.
*/
generateTextureLevel( texture, textureProperty, uvSnippet, levelSnippet, offsetSnippet ) {
generateTextureLevel( texture, textureProperty, uvSnippet, levelSnippet, depthSnippet, offsetSnippet ) {
if ( depthSnippet ) uvSnippet = `vec3( ${ uvSnippet }, ${ depthSnippet } )`;
if ( offsetSnippet ) {

@@ -573,7 +576,10 @@

* @param {string} biasSnippet - A GLSL snippet that represents the bias to apply to the mip level before sampling.
* @param {?string} depthSnippet - A GLSL snippet that represents 0-based texture array index to sample.
* @param {?string} offsetSnippet - A GLSL snippet that represents the offset that will be applied to the unnormalized texture coordinate before sampling the texture.
* @return {string} The GLSL snippet.
*/
generateTextureBias( texture, textureProperty, uvSnippet, biasSnippet, offsetSnippet ) {
generateTextureBias( texture, textureProperty, uvSnippet, biasSnippet, depthSnippet, offsetSnippet ) {
if ( depthSnippet ) uvSnippet = `vec3( ${ uvSnippet }, ${ depthSnippet } )`;
if ( offsetSnippet ) {

@@ -596,7 +602,10 @@

* @param {Array<string>} gradSnippet - An array holding both gradient GLSL snippets.
* @param {?string} depthSnippet - A GLSL snippet that represents 0-based texture array index to sample.
* @param {?string} offsetSnippet - A GLSL snippet that represents the offset that will be applied to the unnormalized texture coordinate before sampling the texture.
* @return {string} The GLSL snippet.
*/
generateTextureGrad( texture, textureProperty, uvSnippet, gradSnippet, offsetSnippet ) {
generateTextureGrad( texture, textureProperty, uvSnippet, gradSnippet, depthSnippet, offsetSnippet ) {
if ( depthSnippet ) uvSnippet = `vec3( ${ uvSnippet }, ${ depthSnippet } )`;
if ( offsetSnippet ) {

@@ -760,4 +769,14 @@

snippet = `samplerCubeShadow ${ uniform.name };`;
const texture = uniform.node.value;
if ( texture.compareFunction ) {
snippet = `samplerCubeShadow ${ uniform.name };`;
} else {
snippet = `samplerCube ${ uniform.name };`;
}
} else if ( uniform.type === 'buffer' ) {

@@ -774,28 +793,52 @@

const vectorType = this.getVectorType( uniform.type );
const groupName = uniform.groupNode.name;
snippet = `${ vectorType } ${ this.getPropertyName( uniform, shaderStage ) };`;
// Check if this group has already been processed
if ( uniformGroups[ groupName ] === undefined ) {
group = true;
// Get the shared uniform group that contains uniforms from all stages
const sharedUniformGroup = this.uniformGroups[ groupName ];
}
if ( sharedUniformGroup !== undefined ) {
const precision = uniform.node.precision;
// Generate snippets for ALL uniforms in this shared group
const snippets = [];
if ( precision !== null ) {
for ( const sharedUniform of sharedUniformGroup.uniforms ) {
snippet = precisionLib[ precision ] + ' ' + snippet;
const type = sharedUniform.getType();
const vectorType = this.getVectorType( type );
const precision = sharedUniform.nodeUniform.node.precision;
let uniformSnippet = `${ vectorType } ${ sharedUniform.name };`;
if ( precision !== null ) {
uniformSnippet = precisionLib[ precision ] + ' ' + uniformSnippet;
}
snippets.push( '\t' + uniformSnippet );
}
uniformGroups[ groupName ] = snippets;
}
}
group = true;
}
if ( group ) {
if ( ! group ) {
snippet = '\t' + snippet;
const precision = uniform.node.precision;
const groupName = uniform.groupNode.name;
const groupSnippets = uniformGroups[ groupName ] || ( uniformGroups[ groupName ] = [] );
if ( precision !== null ) {
groupSnippets.push( snippet );
snippet = precisionLib[ precision ] + ' ' + snippet;
} else {
}

@@ -816,3 +859,3 @@ snippet = 'uniform ' + snippet;

output += this._getGLSLUniformStruct( shaderStage + '_' + name, groupSnippets.join( '\n' ) ) + '\n';
output += this._getGLSLUniformStruct( name, groupSnippets.join( '\n' ) ) + '\n';

@@ -1285,2 +1328,14 @@ }

/**
* Returns the maximum number of bytes available for uniform buffers.
*
* @return {number} The maximum number of bytes available for uniform buffers.
*/
getUniformBufferLimit() {
const gl = this.renderer.backend.gl;
return gl.getParameter( gl.MAX_UNIFORM_BLOCK_SIZE );
}
/**
* Enables hardware clipping.

@@ -1607,15 +1662,21 @@ *

const uniformsStage = this.uniformGroups[ shaderStage ] || ( this.uniformGroups[ shaderStage ] = {} );
let uniformsGroup = this.uniformGroups[ groupName ];
let uniformsGroup = uniformsStage[ groupName ];
if ( uniformsGroup === undefined ) {
uniformsGroup = new NodeUniformsGroup( shaderStage + '_' + groupName, group );
//uniformsGroup.setVisibility( gpuShaderStageLib[ shaderStage ] );
uniformsGroup = new NodeUniformsGroup( groupName, group );
uniformsStage[ groupName ] = uniformsGroup;
this.uniformGroups[ groupName ] = uniformsGroup;
bindings.push( uniformsGroup );
} else {
// Add to bindings for this stage if not already present
if ( bindings.indexOf( uniformsGroup ) === - 1 ) {
bindings.push( uniformsGroup );
}
}

@@ -1625,4 +1686,12 @@

uniformsGroup.addUniform( uniformGPU );
// Only add uniform if not already present in the group (check by name to avoid duplicates across stages)
const uniformName = uniformGPU.name;
const alreadyExists = uniformsGroup.uniforms.some( u => u.name === uniformName );
if ( ! alreadyExists ) {
uniformsGroup.addUniform( uniformGPU );
}
}

@@ -1629,0 +1698,0 @@

@@ -7,6 +7,7 @@ import {

OneMinusSrcColorFactor, OneMinusSrcAlphaFactor, OneMinusDstColorFactor, OneMinusDstAlphaFactor,
NeverDepth, AlwaysDepth, LessDepth, LessEqualDepth, EqualDepth, GreaterEqualDepth, GreaterDepth, NotEqualDepth
NeverDepth, AlwaysDepth, LessDepth, LessEqualDepth, EqualDepth, GreaterEqualDepth, GreaterDepth, NotEqualDepth,
MaterialBlending
} from '../../../constants.js';
import { Vector4 } from '../../../math/Vector4.js';
import { error } from '../../../utils.js';
import { error, ReversedDepthFuncs, warnOnce } from '../../../utils.js';

@@ -66,2 +67,3 @@ let equationToGL, factorToGL;

this.currentColorMask = null;
this.currentDepthReversed = false;
this.currentDepthFunc = null;

@@ -275,3 +277,3 @@ this.currentDepthMask = null;

setMRTBlending( textures ) {
setMRTBlending( textures, mrt, material ) {

@@ -281,14 +283,143 @@ const gl = this.gl;

if ( ! drawBuffersIndexedExt ) return;
if ( ! drawBuffersIndexedExt ) {
for ( let i = 1; i < textures.length; i ++ ) {
warnOnce( 'WebGPURenderer: Multiple Render Targets (MRT) blending configuration is not fully supported in compatibility mode. The material blending will be used for all render targets.' );
// use opaque blending for additional render targets
drawBuffersIndexedExt.blendFuncSeparateiOES( i, gl.ONE, gl.ZERO, gl.ONE, gl.ZERO );
return;
}
for ( let i = 0; i < textures.length; i ++ ) {
const texture = textures[ i ];
let blending = null;
if ( mrt !== null ) {
const blendMode = mrt.getBlendMode( texture.name );
if ( blendMode.blending === MaterialBlending ) {
// use material blending
blending = material;
} else if ( blendMode.blending !== NoBlending ) {
blending = blendMode;
}
} else {
// use material blending
blending = material;
}
if ( blending !== null ) {
this._setMRTBlendingIndex( i, blending );
} else {
// use opaque blending (no blending)
drawBuffersIndexedExt.blendFuncSeparateiOES( i, gl.ONE, gl.ZERO, gl.ONE, gl.ZERO );
}
}
}
/**
* Applies blending configuration for a specific draw buffer index.
*
* @private
* @param {number} index - The draw buffer index.
* @param {Object} blending - The blending configuration (material or BlendMode).
*/
_setMRTBlendingIndex( index, blending ) {
const { gl } = this;
const drawBuffersIndexedExt = this.backend.drawBuffersIndexedExt;
const blendingType = blending.blending;
const blendSrc = blending.blendSrc;
const blendDst = blending.blendDst;
const blendEquation = blending.blendEquation;
const premultipliedAlpha = blending.premultipliedAlpha;
if ( blendingType === CustomBlending ) {
const blendSrcAlpha = blending.blendSrcAlpha !== null ? blending.blendSrcAlpha : blendSrc;
const blendDstAlpha = blending.blendDstAlpha !== null ? blending.blendDstAlpha : blendDst;
const blendEquationAlpha = blending.blendEquationAlpha !== null ? blending.blendEquationAlpha : blendEquation;
drawBuffersIndexedExt.blendEquationSeparateiOES( index, equationToGL[ blendEquation ], equationToGL[ blendEquationAlpha ] );
drawBuffersIndexedExt.blendFuncSeparateiOES( index, factorToGL[ blendSrc ], factorToGL[ blendDst ], factorToGL[ blendSrcAlpha ], factorToGL[ blendDstAlpha ] );
} else {
drawBuffersIndexedExt.blendEquationSeparateiOES( index, gl.FUNC_ADD, gl.FUNC_ADD );
if ( premultipliedAlpha ) {
switch ( blendingType ) {
case NormalBlending:
drawBuffersIndexedExt.blendFuncSeparateiOES( index, gl.ONE, gl.ONE_MINUS_SRC_ALPHA, gl.ONE, gl.ONE_MINUS_SRC_ALPHA );
break;
case AdditiveBlending:
drawBuffersIndexedExt.blendFuncSeparateiOES( index, gl.ONE, gl.ONE, gl.ONE, gl.ONE );
break;
case SubtractiveBlending:
drawBuffersIndexedExt.blendFuncSeparateiOES( index, gl.ZERO, gl.ONE_MINUS_SRC_COLOR, gl.ZERO, gl.ONE );
break;
case MultiplyBlending:
drawBuffersIndexedExt.blendFuncSeparateiOES( index, gl.DST_COLOR, gl.ONE_MINUS_SRC_ALPHA, gl.ZERO, gl.ONE );
break;
default:
drawBuffersIndexedExt.blendFuncSeparateiOES( index, gl.ONE, gl.ONE_MINUS_SRC_ALPHA, gl.ONE, gl.ONE_MINUS_SRC_ALPHA );
break;
}
} else {
switch ( blendingType ) {
case NormalBlending:
drawBuffersIndexedExt.blendFuncSeparateiOES( index, gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA, gl.ONE, gl.ONE_MINUS_SRC_ALPHA );
break;
case AdditiveBlending:
drawBuffersIndexedExt.blendFuncSeparateiOES( index, gl.SRC_ALPHA, gl.ONE, gl.ONE, gl.ONE );
break;
case SubtractiveBlending:
drawBuffersIndexedExt.blendFuncSeparateiOES( index, gl.ZERO, gl.ONE_MINUS_SRC_COLOR, gl.ZERO, gl.ONE );
break;
case MultiplyBlending:
drawBuffersIndexedExt.blendFuncSeparateiOES( index, gl.DST_COLOR, gl.ONE_MINUS_SRC_ALPHA, gl.ZERO, gl.ONE );
break;
default:
drawBuffersIndexedExt.blendFuncSeparateiOES( index, gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA, gl.ONE, gl.ONE_MINUS_SRC_ALPHA );
break;
}
}
}
}
/**
* Defines the blending.

@@ -485,3 +616,31 @@ *

/**
* Configures the WebGL state to use a reversed depth buffer.
*
* @param {boolean} reversed - Whether the depth buffer is reversed or not.
*/
setReversedDepth( reversed ) {
if ( this.currentDepthReversed !== reversed ) {
const ext = this.backend.extensions.get( 'EXT_clip_control' );
if ( reversed ) {
ext.clipControlEXT( ext.LOWER_LEFT_EXT, ext.ZERO_TO_ONE_EXT );
} else {
ext.clipControlEXT( ext.LOWER_LEFT_EXT, ext.NEGATIVE_ONE_TO_ONE_EXT );
}
this.currentDepthReversed = reversed;
}
}
/**
* Specifies whether depth values can be written when rendering

@@ -516,2 +675,4 @@ * into a framebuffer or not.

if ( this.currentDepthReversed ) depthFunc = ReversedDepthFuncs[ depthFunc ];
if ( this.currentDepthFunc !== depthFunc ) {

@@ -518,0 +679,0 @@

@@ -17,3 +17,3 @@ import { BackSide, FrontSide, CubeUVReflectionMapping, SRGBTransfer } from '../../constants.js';

function WebGLBackground( renderer, cubemaps, cubeuvmaps, state, objects, alpha, premultipliedAlpha ) {
function WebGLBackground( renderer, environments, state, objects, alpha, premultipliedAlpha ) {

@@ -37,3 +37,3 @@ const clearColor = new Color( 0x000000 );

const usePMREM = scene.backgroundBlurriness > 0; // use PMREM if the user wants to blur the background
background = ( usePMREM ? cubeuvmaps : cubemaps ).get( background );
background = environments.get( background, usePMREM );

@@ -40,0 +40,0 @@ }

@@ -17,3 +17,3 @@ import { IntType } from '../../constants.js';

const state = getBindingState( geometry, program, material );
const state = getBindingState( object, geometry, program, material );

@@ -71,12 +71,24 @@ if ( currentState !== state ) {

function getBindingState( geometry, program, material ) {
function getBindingState( object, geometry, program, material ) {
const wireframe = ( material.wireframe === true );
let programMap = bindingStates[ geometry.id ];
let objectMap = bindingStates[ geometry.id ];
if ( objectMap === undefined ) {
objectMap = {};
bindingStates[ geometry.id ] = objectMap;
}
// Each InstancedMesh requires unique binding states because it contains instanced attributes.
const objectId = ( object.isInstancedMesh === true ) ? object.id : 0;
let programMap = objectMap[ objectId ];
if ( programMap === undefined ) {
programMap = {};
bindingStates[ geometry.id ] = programMap;
objectMap[ objectId ] = programMap;

@@ -482,18 +494,24 @@ }

const programMap = bindingStates[ geometryId ];
const objectMap = bindingStates[ geometryId ];
for ( const programId in programMap ) {
for ( const objectId in objectMap ) {
const stateMap = programMap[ programId ];
const programMap = objectMap[ objectId ];
for ( const wireframe in stateMap ) {
for ( const programId in programMap ) {
deleteVertexArrayObject( stateMap[ wireframe ].object );
const stateMap = programMap[ programId ];
delete stateMap[ wireframe ];
for ( const wireframe in stateMap ) {
deleteVertexArrayObject( stateMap[ wireframe ].object );
delete stateMap[ wireframe ];
}
delete programMap[ programId ];
}
delete programMap[ programId ];
}

@@ -511,18 +529,24 @@

const programMap = bindingStates[ geometry.id ];
const objectMap = bindingStates[ geometry.id ];
for ( const programId in programMap ) {
for ( const objectId in objectMap ) {
const stateMap = programMap[ programId ];
const programMap = objectMap[ objectId ];
for ( const wireframe in stateMap ) {
for ( const programId in programMap ) {
deleteVertexArrayObject( stateMap[ wireframe ].object );
const stateMap = programMap[ programId ];
delete stateMap[ wireframe ];
for ( const wireframe in stateMap ) {
deleteVertexArrayObject( stateMap[ wireframe ].object );
delete stateMap[ wireframe ];
}
delete programMap[ programId ];
}
delete programMap[ programId ];
}

@@ -538,18 +562,64 @@

const programMap = bindingStates[ geometryId ];
const objectMap = bindingStates[ geometryId ];
if ( programMap[ program.id ] === undefined ) continue;
for ( const objectId in objectMap ) {
const stateMap = programMap[ program.id ];
const programMap = objectMap[ objectId ];
for ( const wireframe in stateMap ) {
if ( programMap[ program.id ] === undefined ) continue;
deleteVertexArrayObject( stateMap[ wireframe ].object );
const stateMap = programMap[ program.id ];
delete stateMap[ wireframe ];
for ( const wireframe in stateMap ) {
deleteVertexArrayObject( stateMap[ wireframe ].object );
delete stateMap[ wireframe ];
}
delete programMap[ program.id ];
}
delete programMap[ program.id ];
}
}
function releaseStatesOfObject( object ) {
for ( const geometryId in bindingStates ) {
const objectMap = bindingStates[ geometryId ];
const objectId = ( object.isInstancedMesh === true ) ? object.id : 0;
const programMap = objectMap[ objectId ];
if ( programMap === undefined ) continue;
for ( const programId in programMap ) {
const stateMap = programMap[ programId ];
for ( const wireframe in stateMap ) {
deleteVertexArrayObject( stateMap[ wireframe ].object );
delete stateMap[ wireframe ];
}
delete programMap[ programId ];
}
delete objectMap[ objectId ];
if ( Object.keys( objectMap ).length === 0 ) {
delete bindingStates[ geometryId ];
}
}

@@ -559,2 +629,3 @@

function reset() {

@@ -589,2 +660,3 @@

releaseStatesOfGeometry: releaseStatesOfGeometry,
releaseStatesOfObject: releaseStatesOfObject,
releaseStatesOfProgram: releaseStatesOfProgram,

@@ -591,0 +663,0 @@

import { Uint16BufferAttribute, Uint32BufferAttribute } from '../../core/BufferAttribute.js';
import { arrayNeedsUint32 } from '../../utils.js';

@@ -88,2 +87,8 @@ function WebGLGeometries( gl, attributes, info, bindingStates ) {

if ( geometryPosition === undefined ) {
return;
}
if ( geometryIndex !== null ) {

@@ -104,3 +109,3 @@

} else if ( geometryPosition !== undefined ) {
} else {

@@ -120,9 +125,7 @@ const array = geometryPosition.array;

} else {
return;
}
const attribute = new ( arrayNeedsUint32( indices ) ? Uint32BufferAttribute : Uint16BufferAttribute )( indices, 1 );
// check whether a 32 bit or 16 bit buffer is required to store the indices
// account for PRIMITIVE_RESTART_FIXED_INDEX, #24565
const attribute = new ( geometryPosition.count >= 65535 ? Uint32BufferAttribute : Uint16BufferAttribute )( indices, 1 );
attribute.version = version;

@@ -129,0 +132,0 @@

@@ -50,2 +50,8 @@ import { BackSide } from '../../constants.js';

if ( material.envMap ) {
uniforms.envMapIntensity.value = material.envMapIntensity;
}
} else if ( material.isMeshToonMaterial ) {

@@ -61,2 +67,8 @@

if ( material.envMap ) {
uniforms.envMapIntensity.value = material.envMapIntensity;
}
} else if ( material.isMeshStandardMaterial ) {

@@ -63,0 +75,0 @@

@@ -1,2 +0,2 @@

function WebGLObjects( gl, geometries, attributes, info ) {
function WebGLObjects( gl, geometries, attributes, bindingStates, info ) {

@@ -76,2 +76,4 @@ let updateMap = new WeakMap();

bindingStates.releaseStatesOfObject( instancedMesh );
attributes.remove( instancedMesh.instanceMatrix );

@@ -78,0 +80,0 @@

@@ -735,4 +735,4 @@ import { WebGLUniforms } from './WebGLUniforms.js';

parameters.vertexTangents && parameters.flatShading === false ? '#define USE_TANGENT' : '',
parameters.vertexColors || parameters.instancingColor || parameters.batchingColor ? '#define USE_COLOR' : '',
parameters.vertexAlphas ? '#define USE_COLOR_ALPHA' : '',
parameters.vertexColors || parameters.instancingColor ? '#define USE_COLOR' : '',
parameters.vertexAlphas || parameters.batchingColor ? '#define USE_COLOR_ALPHA' : '',
parameters.vertexUv1s ? '#define USE_UV1' : '',

@@ -739,0 +739,0 @@ parameters.vertexUv2s ? '#define USE_UV2' : '',

@@ -10,3 +10,3 @@ import { BackSide, DoubleSide, CubeUVReflectionMapping, ObjectSpaceNormalMap, TangentSpaceNormalMap, NoToneMapping, NormalBlending, LinearSRGBColorSpace, SRGBTransfer } from '../../constants.js';

function WebGLPrograms( renderer, cubemaps, cubeuvmaps, extensions, capabilities, bindingStates, clipping ) {
function WebGLPrograms( renderer, environments, extensions, capabilities, bindingStates, clipping ) {

@@ -55,5 +55,6 @@ const _programLayers = new Layers();

const geometry = object.geometry;
const environment = material.isMeshStandardMaterial ? scene.environment : null;
const environment = ( material.isMeshStandardMaterial || material.isMeshLambertMaterial || material.isMeshPhongMaterial ) ? scene.environment : null;
const envMap = ( material.isMeshStandardMaterial ? cubeuvmaps : cubemaps ).get( material.envMap || environment );
const usePMREM = material.isMeshStandardMaterial || ( material.isMeshLambertMaterial && ! material.envMap ) || ( material.isMeshPhongMaterial && ! material.envMap );
const envMap = environments.get( material.envMap || environment, usePMREM );
const envMapCubeUVHeight = ( !! envMap ) && ( envMap.mapping === CubeUVReflectionMapping ) ? envMap.image.height : null;

@@ -307,3 +308,8 @@

flatShading: ( material.flatShading === true && material.wireframe === false ),
flatShading: material.wireframe === false && (
material.flatShading === true ||
( geometry.attributes.normal === undefined && HAS_NORMALMAP === false &&
( material.isMeshLambertMaterial || material.isMeshPhongMaterial || material.isMeshStandardMaterial || material.isMeshPhysicalMaterial )
)
),

@@ -310,0 +316,0 @@ sizeAttenuation: material.sizeAttenuation === true,

@@ -15,2 +15,6 @@ function painterSortStable( a, b ) {

} else if ( a.materialVariant !== b.materialVariant ) {
return a.materialVariant - b.materialVariant;
} else if ( a.z !== b.z ) {

@@ -70,2 +74,11 @@

function materialVariant( object ) {
let variant = 0;
if ( object.isInstancedMesh ) variant += 2;
if ( object.isSkinnedMesh ) variant += 1;
return variant;
}
function getNextRenderItem( object, geometry, material, groupOrder, z, group ) {

@@ -82,2 +95,3 @@

material: material,
materialVariant: materialVariant( object ),
groupOrder: groupOrder,

@@ -97,2 +111,3 @@ renderOrder: object.renderOrder,

renderItem.material = material;
renderItem.materialVariant = materialVariant( object );
renderItem.groupOrder = groupOrder;

@@ -99,0 +114,0 @@ renderItem.renderOrder = object.renderOrder;

@@ -99,6 +99,6 @@ import { FrontSide, BackSide, DoubleSide, NearestFilter, LinearFilter, PCFShadowMap, VSMShadowMap, NoBlending, LessEqualCompare, GreaterEqualCompare, DepthFormat, UnsignedIntType, RGFormat, HalfFloatType, FloatType, PCFSoftShadowMap } from '../../constants.js';

if ( lights.type === PCFSoftShadowMap ) {
if ( this.type === PCFSoftShadowMap ) {
warn( 'WebGLShadowMap: PCFSoftShadowMap has been deprecated. Using PCFShadowMap instead.' );
lights.type = PCFShadowMap;
this.type = PCFShadowMap;

@@ -201,2 +201,5 @@ }

const reversedDepthBuffer = renderer.state.buffers.depth.getReversed();
shadow.camera._reversedDepth = reversedDepthBuffer;
if ( shadow.map === null || typeChanged === true ) {

@@ -260,4 +263,2 @@

const reversedDepthBuffer = renderer.state.buffers.depth.getReversed();
if ( this.type === PCFShadowMap ) {

@@ -264,0 +265,0 @@

import { NotEqualDepth, GreaterDepth, GreaterEqualDepth, EqualDepth, LessEqualDepth, LessDepth, AlwaysDepth, NeverDepth, CullFaceFront, CullFaceBack, CullFaceNone, DoubleSide, BackSide, CustomBlending, MultiplyBlending, SubtractiveBlending, AdditiveBlending, NoBlending, NormalBlending, AddEquation, SubtractEquation, ReverseSubtractEquation, MinEquation, MaxEquation, ZeroFactor, OneFactor, SrcColorFactor, SrcAlphaFactor, SrcAlphaSaturateFactor, DstColorFactor, DstAlphaFactor, OneMinusSrcColorFactor, OneMinusSrcAlphaFactor, OneMinusDstColorFactor, OneMinusDstAlphaFactor, ConstantColorFactor, OneMinusConstantColorFactor, ConstantAlphaFactor, OneMinusConstantAlphaFactor } from '../../constants.js';
import { Color } from '../../math/Color.js';
import { Vector4 } from '../../math/Vector4.js';
import { error } from '../../utils.js';
import { error, ReversedDepthFuncs } from '../../utils.js';
const reversedFuncs = {
[ NeverDepth ]: AlwaysDepth,
[ LessDepth ]: GreaterDepth,
[ EqualDepth ]: NotEqualDepth,
[ LessEqualDepth ]: GreaterEqualDepth,
[ AlwaysDepth ]: NeverDepth,
[ GreaterDepth ]: LessDepth,
[ NotEqualDepth ]: EqualDepth,
[ GreaterEqualDepth ]: LessEqualDepth,
};
function WebGLState( gl, extensions ) {

@@ -149,3 +137,3 @@

if ( currentReversed ) depthFunc = reversedFuncs[ depthFunc ];
if ( currentReversed ) depthFunc = ReversedDepthFuncs[ depthFunc ];

@@ -218,2 +206,4 @@ if ( currentDepthFunc !== depthFunc ) {

currentDepthClear = depth;
if ( currentReversed ) {

@@ -226,3 +216,2 @@

gl.clearDepth( depth );
currentDepthClear = depth;

@@ -881,7 +870,13 @@ }

gl.polygonOffset( factor, units );
currentPolygonOffsetFactor = factor;
currentPolygonOffsetUnits = units;
if ( depthBuffer.getReversed() ) {
factor = - factor;
}
gl.polygonOffset( factor, units );
}

@@ -888,0 +883,0 @@

@@ -6,5 +6,6 @@ import {

import { FloatType, IntType, UnsignedIntType } from '../../../constants.js';
import { FloatType, IntType, UnsignedIntType, Compatibility } from '../../../constants.js';
import { NodeAccess } from '../../../nodes/core/constants.js';
import { isTypedArray, error } from '../../../utils.js';
import { hashString } from '../../../nodes/core/NodeUtils.js';

@@ -103,3 +104,3 @@ /**

const entries = this._createLayoutEntries( bindGroup );
const bindGroupLayoutKey = JSON.stringify( entries );
const bindGroupLayoutKey = hashString( JSON.stringify( entries ) );

@@ -229,4 +230,2 @@ // try to find an existing layout in the cache

binding.clearUpdateRanges();
}

@@ -331,17 +330,6 @@

const bindingData = backend.get( binding );
const buffer = backend.get( binding.attribute ).buffer;
if ( bindingData.buffer === undefined ) {
entriesGPU.push( { binding: bindingPoint, resource: { buffer: buffer } } );
const attribute = binding.attribute;
//const usage = GPUBufferUsage.STORAGE | GPUBufferUsage.VERTEX | /*GPUBufferUsage.COPY_SRC |*/ GPUBufferUsage.COPY_DST;
//backend.attributeUtils.createAttribute( attribute, usage ); // @TODO: Move it to universal renderer
bindingData.buffer = backend.get( attribute ).buffer;
}
entriesGPU.push( { binding: bindingPoint, resource: { buffer: bindingData.buffer } } );
} else if ( binding.isSampledTexture ) {

@@ -589,8 +577,9 @@

if ( binding.texture.compareFunction !== null ) {
if ( binding.texture.compareFunction !== null && backend.hasCompatibility( Compatibility.TEXTURE_COMPARE ) ) {
sampler.type = GPUSamplerBindingType.Comparison;
} else if ( backend.compatibilityMode ) {
} else {
// Depth textures without compare must use non-filtering sampler
sampler.type = GPUSamplerBindingType.NonFiltering;

@@ -597,0 +586,0 @@

@@ -9,3 +9,3 @@ export const GPUPrimitiveTopology = {

export const GPUShaderStage = ( typeof self !== 'undefined' ) ? self.GPUShaderStage : { VERTEX: 1, FRAGMENT: 2, COMPUTE: 4 };
export const GPUShaderStage = ( typeof self !== 'undefined' && self.GPUShaderStage ) ? self.GPUShaderStage : { VERTEX: 1, FRAGMENT: 2, COMPUTE: 4 };

@@ -12,0 +12,0 @@ export const GPUCompareFunction = {

@@ -10,3 +10,3 @@ import { BlendColorFactor, OneMinusBlendColorFactor, } from '../../common/Constants.js';

NeverDepth, AlwaysDepth, LessDepth, LessEqualDepth, EqualDepth, GreaterEqualDepth, GreaterDepth, NotEqualDepth,
NoBlending, NormalBlending, AdditiveBlending, SubtractiveBlending, MultiplyBlending, CustomBlending,
NoBlending, NormalBlending, AdditiveBlending, SubtractiveBlending, MultiplyBlending, CustomBlending, MaterialBlending,
ZeroFactor, OneFactor, SrcColorFactor, OneMinusSrcColorFactor, SrcAlphaFactor, OneMinusSrcAlphaFactor, DstColorFactor,

@@ -19,3 +19,3 @@ OneMinusDstColorFactor, DstAlphaFactor, OneMinusDstAlphaFactor, SrcAlphaSaturateFactor,

import { error } from '../../../utils.js';
import { error, ReversedDepthFuncs, warnOnce } from '../../../utils.js';

@@ -121,9 +121,9 @@ /**

// blending
// material blending
let blending;
let materialBlending;
if ( material.blending !== NoBlending && ( material.blending !== NormalBlending || material.transparent !== false ) ) {
blending = this._getBlending( material );
materialBlending = this._getBlending( material );

@@ -154,24 +154,49 @@ }

const textures = renderObject.context.textures;
const mrt = renderObject.context.mrt;
for ( let i = 0; i < textures.length; i ++ ) {
const colorFormat = utils.getTextureFormatGPU( textures[ i ] );
const texture = textures[ i ];
const colorFormat = utils.getTextureFormatGPU( texture );
if ( i === 0 ) {
// mrt blending
targets.push( {
format: colorFormat,
blend: blending,
writeMask: colorWriteMask
} );
let blending;
if ( mrt !== null ) {
if ( this.backend.compatibilityMode !== true ) {
const blendMode = mrt.getBlendMode( texture.name );
if ( blendMode.blending === MaterialBlending ) {
blending = materialBlending;
} else if ( blendMode.blending !== NoBlending ) {
blending = this._getBlending( blendMode );
}
} else {
warnOnce( 'WebGPURenderer: Multiple Render Targets (MRT) blending configuration is not fully supported in compatibility mode. The material blending will be used for all render targets.' );
blending = materialBlending;
}
} else {
targets.push( {
format: colorFormat,
writeMask: colorWriteMask
} );
blending = materialBlending;
}
targets.push( {
format: colorFormat,
blend: blending,
writeMask: colorWriteMask
} );
}

@@ -185,3 +210,3 @@

format: colorFormat,
blend: blending,
blend: materialBlending,
writeMask: colorWriteMask

@@ -233,3 +258,3 @@ } );

depthStencil.stencilFront = stencilFront;
depthStencil.stencilBack = {}; // three.js does not provide an API to configure the back function (gl.stencilFuncSeparate() was never used)
depthStencil.stencilBack = stencilFront; // apply the same stencil ops to both faces, matching gl.stencilOp() which is not face-separated
depthStencil.stencilReadMask = material.stencilFuncMask;

@@ -371,13 +396,13 @@ depthStencil.stencilWriteMask = material.stencilWriteMask;

* @private
* @param {Material} material - The material.
* @param {Material|BlendMode} object - The object containing blending information.
* @return {Object} The blending state.
*/
_getBlending( material ) {
_getBlending( object ) {
let color, alpha;
const blending = material.blending;
const blendSrc = material.blendSrc;
const blendDst = material.blendDst;
const blendEquation = material.blendEquation;
const blending = object.blending;
const blendSrc = object.blendSrc;
const blendDst = object.blendDst;
const blendEquation = object.blendEquation;

@@ -387,5 +412,5 @@

const blendSrcAlpha = material.blendSrcAlpha !== null ? material.blendSrcAlpha : blendSrc;
const blendDstAlpha = material.blendDstAlpha !== null ? material.blendDstAlpha : blendDst;
const blendEquationAlpha = material.blendEquationAlpha !== null ? material.blendEquationAlpha : blendEquation;
const blendSrcAlpha = object.blendSrcAlpha !== null ? object.blendSrcAlpha : blendSrc;
const blendDstAlpha = object.blendDstAlpha !== null ? object.blendDstAlpha : blendDst;
const blendEquationAlpha = object.blendEquationAlpha !== null ? object.blendEquationAlpha : blendEquation;

@@ -406,3 +431,3 @@ color = {

const premultipliedAlpha = material.premultipliedAlpha;
const premultipliedAlpha = object.premultipliedAlpha;

@@ -460,7 +485,7 @@ const setBlend = ( srcRGB, dstRGB, srcAlpha, dstAlpha ) => {

case SubtractiveBlending:
error( 'WebGPURenderer: SubtractiveBlending requires material.premultipliedAlpha = true' );
error( `WebGPURenderer: "SubtractiveBlending" requires "${ object.isMaterial ? 'material' : 'blendMode' }.premultipliedAlpha = true".` );
break;
case MultiplyBlending:
error( 'WebGPURenderer: MultiplyBlending requires material.premultipliedAlpha = true' );
error( `WebGPURenderer: "MultiplyBlending" requires "${ object.isMaterial ? 'material' : 'blendMode' }.premultipliedAlpha = true".` );
break;

@@ -782,3 +807,3 @@

const depthFunc = material.depthFunc;
const depthFunc = ( this.backend.parameters.reversedDepthBuffer ) ? ReversedDepthFuncs[ material.depthFunc ] : material.depthFunc;

@@ -785,0 +810,0 @@ switch ( depthFunc ) {

import DataMap from '../../common/DataMap.js';
import { GPUTextureViewDimension, GPUIndexFormat, GPUFilterMode, GPUPrimitiveTopology, GPULoadOp, GPUStoreOp } from './WebGPUConstants.js';
import { GPUFilterMode, GPULoadOp, GPUStoreOp } from './WebGPUConstants.js';

@@ -27,36 +27,35 @@ /**

const mipmapVertexSource = `
const mipmapSource = `
struct VarysStruct {
@builtin( position ) Position: vec4<f32>,
@location( 0 ) vTex : vec2<f32>
@builtin( position ) Position: vec4f,
@location( 0 ) vTex : vec2f,
@location( 1 ) @interpolate(flat, either) vBaseArrayLayer: u32,
};
@group( 0 ) @binding ( 2 )
var<uniform> flipY: u32;
@vertex
fn main( @builtin( vertex_index ) vertexIndex : u32 ) -> VarysStruct {
fn mainVS(
@builtin( vertex_index ) vertexIndex : u32,
@builtin( instance_index ) instanceIndex : u32 ) -> VarysStruct {
var Varys : VarysStruct;
var pos = array< vec2<f32>, 4 >(
vec2<f32>( -1.0, 1.0 ),
vec2<f32>( 1.0, 1.0 ),
vec2<f32>( -1.0, -1.0 ),
vec2<f32>( 1.0, -1.0 )
var pos = array(
vec2f( -1, -1 ),
vec2f( -1, 3 ),
vec2f( 3, -1 ),
);
var tex = array< vec2<f32>, 4 >(
vec2<f32>( 0.0, 0.0 ),
vec2<f32>( 1.0, 0.0 ),
vec2<f32>( 0.0, 1.0 ),
vec2<f32>( 1.0, 1.0 )
);
let p = pos[ vertexIndex ];
let mult = select( vec2f( 0.5, -0.5 ), vec2f( 0.5, 0.5 ), flipY != 0 );
Varys.vTex = p * mult + vec2f( 0.5 );
Varys.Position = vec4f( p, 0, 1 );
Varys.vBaseArrayLayer = instanceIndex;
Varys.vTex = tex[ vertexIndex ];
Varys.Position = vec4<f32>( pos[ vertexIndex ], 0.0, 1.0 );
return Varys;
}
`;
const mipmapFragmentSource = `
@group( 0 ) @binding( 0 )

@@ -66,23 +65,37 @@ var imgSampler : sampler;

@group( 0 ) @binding( 1 )
var img : texture_2d<f32>;
var img2d : texture_2d<f32>;
@fragment
fn main( @location( 0 ) vTex : vec2<f32> ) -> @location( 0 ) vec4<f32> {
fn main_2d( Varys: VarysStruct ) -> @location( 0 ) vec4<f32> {
return textureSample( img, imgSampler, vTex );
return textureSample( img2d, imgSampler, Varys.vTex );
}
`;
const flipYFragmentSource = `
@group( 0 ) @binding( 0 )
var imgSampler : sampler;
@group( 0 ) @binding( 1 )
var img2dArray : texture_2d_array<f32>;
@fragment
fn main_2d_array( Varys: VarysStruct ) -> @location( 0 ) vec4<f32> {
return textureSample( img2dArray, imgSampler, Varys.vTex, Varys.vBaseArrayLayer );
}
const faceMat = array(
mat3x3f( 0, 0, -2, 0, -2, 0, 1, 1, 1 ), // pos-x
mat3x3f( 0, 0, 2, 0, -2, 0, -1, 1, -1 ), // neg-x
mat3x3f( 2, 0, 0, 0, 0, 2, -1, 1, -1 ), // pos-y
mat3x3f( 2, 0, 0, 0, 0, -2, -1, -1, 1 ), // neg-y
mat3x3f( 2, 0, 0, 0, -2, 0, -1, 1, 1 ), // pos-z
mat3x3f( -2, 0, 0, 0, -2, 0, 1, 1, -1 ), // neg-z
);
@group( 0 ) @binding( 1 )
var img : texture_2d<f32>;
var imgCube : texture_cube<f32>;
@fragment
fn main( @location( 0 ) vTex : vec2<f32> ) -> @location( 0 ) vec4<f32> {
fn main_cube( Varys: VarysStruct ) -> @location( 0 ) vec4<f32> {
return textureSample( img, imgSampler, vec2( vTex.x, 1.0 - vTex.y ) );
return textureSample( imgCube, imgSampler, faceMat[ Varys.vBaseArrayLayer ] * vec3f( fract( Varys.vTex ), 1 ) );

@@ -107,45 +120,36 @@ }

/**
* A cache for GPU render pipelines used for copy/transfer passes.
* Every texture format requires a unique pipeline.
*
* @type {Object<string,GPURenderPipeline>}
* flip uniform buffer
* @type {GPUBuffer}
*/
this.transferPipelines = {};
this.flipUniformBuffer = device.createBuffer( {
size: 4,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST
} );
device.queue.writeBuffer( this.flipUniformBuffer, 0, new Uint32Array( [ 1 ] ) );
/**
* A cache for GPU render pipelines used for flipY passes.
* Every texture format requires a unique pipeline.
*
* @type {Object<string,GPURenderPipeline>}
* no flip uniform buffer
* @type {GPUBuffer}
*/
this.flipYPipelines = {};
/**
* The mipmap vertex shader module.
*
* @type {GPUShaderModule}
*/
this.mipmapVertexShaderModule = device.createShaderModule( {
label: 'mipmapVertex',
code: mipmapVertexSource
this.noFlipUniformBuffer = device.createBuffer( {
size: 4,
usage: GPUBufferUsage.UNIFORM
} );
/**
* The mipmap fragment shader module.
* A cache for GPU render pipelines used for copy/transfer passes.
* Every texture format and textureBindingViewDimension combo requires a unique pipeline.
*
* @type {GPUShaderModule}
* @type {Object<string,GPURenderPipeline>}
*/
this.mipmapFragmentShaderModule = device.createShaderModule( {
label: 'mipmapFragment',
code: mipmapFragmentSource
} );
this.transferPipelines = {};
/**
* The flipY fragment shader module.
* The mipmap shader module.
*
* @type {GPUShaderModule}
*/
this.flipYFragmentShaderModule = device.createShaderModule( {
label: 'flipYFragment',
code: flipYFragmentSource
this.mipmapShaderModule = device.createShaderModule( {
label: 'mipmap',
code: mipmapSource
} );

@@ -160,7 +164,10 @@

* @param {string} format - The GPU texture format
* @param {string?} textureBindingViewDimension - The GPU texture binding view dimension
* @return {GPURenderPipeline} The GPU render pipeline.
*/
getTransferPipeline( format ) {
getTransferPipeline( format, textureBindingViewDimension ) {
let pipeline = this.transferPipelines[ format ];
textureBindingViewDimension = textureBindingViewDimension || '2d-array';
const key = `${ format }-${ textureBindingViewDimension }`;
let pipeline = this.transferPipelines[ key ];

@@ -170,20 +177,15 @@ if ( pipeline === undefined ) {

pipeline = this.device.createRenderPipeline( {
label: `mipmap-${ format }`,
label: `mipmap-${ format }-${ textureBindingViewDimension }`,
vertex: {
module: this.mipmapVertexShaderModule,
entryPoint: 'main'
module: this.mipmapShaderModule,
},
fragment: {
module: this.mipmapFragmentShaderModule,
entryPoint: 'main',
module: this.mipmapShaderModule,
entryPoint: `main_${ textureBindingViewDimension.replace( '-', '_' ) }`,
targets: [ { format } ]
},
primitive: {
topology: GPUPrimitiveTopology.TriangleStrip,
stripIndexFormat: GPUIndexFormat.Uint32
},
layout: 'auto'
} );
this.transferPipelines[ format ] = pipeline;
this.transferPipelines[ key ] = pipeline;

@@ -197,41 +199,2 @@ }

/**
* Returns a render pipeline for the flipY render pass. The pass
* requires a unique render pipeline for each texture format.
*
* @param {string} format - The GPU texture format
* @return {GPURenderPipeline} The GPU render pipeline.
*/
getFlipYPipeline( format ) {
let pipeline = this.flipYPipelines[ format ];
if ( pipeline === undefined ) {
pipeline = this.device.createRenderPipeline( {
label: `flipY-${ format }`,
vertex: {
module: this.mipmapVertexShaderModule,
entryPoint: 'main'
},
fragment: {
module: this.flipYFragmentShaderModule,
entryPoint: 'main',
targets: [ { format } ]
},
primitive: {
topology: GPUPrimitiveTopology.TriangleStrip,
stripIndexFormat: GPUIndexFormat.Uint32
},
layout: 'auto'
} );
this.flipYPipelines[ format ] = pipeline;
}
return pipeline;
}
/**
* Flip the contents of the given GPU texture along its vertical axis.

@@ -248,7 +211,4 @@ *

const transferPipeline = this.getTransferPipeline( format );
const flipYPipeline = this.getFlipYPipeline( format );
const tempTexture = this.device.createTexture( {
size: { width, height, depthOrArrayLayers: 1 },
size: { width, height },
format,

@@ -258,19 +218,8 @@ usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING

const srcView = textureGPU.createView( {
baseMipLevel: 0,
mipLevelCount: 1,
dimension: GPUTextureViewDimension.TwoD,
baseArrayLayer
} );
const copyTransferPipeline = this.getTransferPipeline( format, textureGPU.textureBindingViewDimension );
const flipTransferPipeline = this.getTransferPipeline( format, tempTexture.textureBindingViewDimension );
const dstView = tempTexture.createView( {
baseMipLevel: 0,
mipLevelCount: 1,
dimension: GPUTextureViewDimension.TwoD,
baseArrayLayer: 0
} );
const commandEncoder = this.device.createCommandEncoder( {} );
const pass = ( pipeline, sourceView, destinationView ) => {
const pass = ( pipeline, sourceTexture, sourceArrayLayer, destinationTexture, destinationArrayLayer, flipY ) => {

@@ -286,3 +235,10 @@ const bindGroupLayout = pipeline.getBindGroupLayout( 0 ); // @TODO: Consider making this static.

binding: 1,
resource: sourceView
resource: sourceTexture.createView( {
dimension: sourceTexture.textureBindingViewDimension || '2d-array',
baseMipLevel: 0,
mipLevelCount: 1,
} ),
}, {
binding: 2,
resource: { buffer: flipY ? this.flipUniformBuffer : this.noFlipUniformBuffer }
} ]

@@ -293,6 +249,11 @@ } );

colorAttachments: [ {
view: destinationView,
view: destinationTexture.createView( {
dimension: '2d',
baseMipLevel: 0,
mipLevelCount: 1,
baseArrayLayer: destinationArrayLayer,
arrayLayerCount: 1,
} ),
loadOp: GPULoadOp.Clear,
storeOp: GPUStoreOp.Store,
clearValue: [ 0, 0, 0, 0 ]
} ]

@@ -303,3 +264,3 @@ } );

passEncoder.setBindGroup( 0, bindGroup );
passEncoder.draw( 4, 1, 0, 0 );
passEncoder.draw( 3, 1, 0, sourceArrayLayer );
passEncoder.end();

@@ -309,4 +270,4 @@

pass( transferPipeline, srcView, dstView );
pass( flipYPipeline, dstView, srcView );
pass( copyTransferPipeline, textureGPU, baseArrayLayer, tempTexture, 0, false );
pass( flipTransferPipeline, tempTexture, 0, textureGPU, baseArrayLayer, true );

@@ -323,18 +284,10 @@ this.device.queue.submit( [ commandEncoder.finish() ] );

* @param {GPUTexture} textureGPU - The GPU texture object.
* @param {Object} textureGPUDescriptor - The texture descriptor.
* @param {number} [baseArrayLayer=0] - The index of the first array layer accessible to the texture view.
* @param {?GPUCommandEncoder} [encoder=null] - An optional command encoder used to generate mipmaps.
*/
generateMipmaps( textureGPU, textureGPUDescriptor, baseArrayLayer = 0, encoder = null ) {
generateMipmaps( textureGPU, encoder = null ) {
const textureData = this.get( textureGPU );
if ( textureData.layers === undefined ) {
const passes = textureData.layers || this._mipmapCreateBundles( textureGPU );
textureData.layers = [];
}
const passes = textureData.layers[ baseArrayLayer ] || this._mipmapCreateBundles( textureGPU, textureGPUDescriptor, baseArrayLayer );
const commandEncoder = encoder || this.device.createCommandEncoder( { label: 'mipmapEncoder' } );

@@ -346,3 +299,3 @@

textureData.layers[ baseArrayLayer ] = passes;
textureData.layers = passes;

@@ -356,64 +309,63 @@ }

* @param {GPUTexture} textureGPU - The GPU texture object.
* @param {Object} textureGPUDescriptor - The texture descriptor.
* @param {number} baseArrayLayer - The index of the first array layer accessible to the texture view.
* @return {Array<Object>} An array of render bundles.
*/
_mipmapCreateBundles( textureGPU, textureGPUDescriptor, baseArrayLayer ) {
_mipmapCreateBundles( textureGPU ) {
const pipeline = this.getTransferPipeline( textureGPUDescriptor.format );
const textureBindingViewDimension = textureGPU.textureBindingViewDimension || '2d-array';
const pipeline = this.getTransferPipeline( textureGPU.format, textureBindingViewDimension );
const bindGroupLayout = pipeline.getBindGroupLayout( 0 ); // @TODO: Consider making this static.
let srcView = textureGPU.createView( {
baseMipLevel: 0,
mipLevelCount: 1,
dimension: GPUTextureViewDimension.TwoD,
baseArrayLayer
} );
const passes = [];
for ( let i = 1; i < textureGPUDescriptor.mipLevelCount; i ++ ) {
for ( let baseMipLevel = 1; baseMipLevel < textureGPU.mipLevelCount; baseMipLevel ++ ) {
const bindGroup = this.device.createBindGroup( {
layout: bindGroupLayout,
entries: [ {
binding: 0,
resource: this.mipmapSampler
}, {
binding: 1,
resource: srcView
} ]
} );
for ( let baseArrayLayer = 0; baseArrayLayer < textureGPU.depthOrArrayLayers; baseArrayLayer ++ ) {
const dstView = textureGPU.createView( {
baseMipLevel: i,
mipLevelCount: 1,
dimension: GPUTextureViewDimension.TwoD,
baseArrayLayer
} );
const bindGroup = this.device.createBindGroup( {
layout: bindGroupLayout,
entries: [ {
binding: 0,
resource: this.mipmapSampler
}, {
binding: 1,
resource: textureGPU.createView( {
dimension: textureBindingViewDimension,
baseMipLevel: baseMipLevel - 1,
mipLevelCount: 1,
} ),
}, {
binding: 2,
resource: { buffer: this.noFlipUniformBuffer }
} ]
} );
const passDescriptor = {
colorAttachments: [ {
view: dstView,
loadOp: GPULoadOp.Clear,
storeOp: GPUStoreOp.Store,
clearValue: [ 0, 0, 0, 0 ]
} ]
};
const passDescriptor = {
colorAttachments: [ {
view: textureGPU.createView( {
dimension: '2d',
baseMipLevel,
mipLevelCount: 1,
baseArrayLayer,
arrayLayerCount: 1,
} ),
loadOp: GPULoadOp.Clear,
storeOp: GPUStoreOp.Store,
} ]
};
const passEncoder = this.device.createRenderBundleEncoder( {
colorFormats: [ textureGPUDescriptor.format ]
} );
const passEncoder = this.device.createRenderBundleEncoder( {
colorFormats: [ textureGPU.format ]
} );
passEncoder.setPipeline( pipeline );
passEncoder.setBindGroup( 0, bindGroup );
passEncoder.draw( 4, 1, 0, 0 );
passEncoder.setPipeline( pipeline );
passEncoder.setBindGroup( 0, bindGroup );
passEncoder.draw( 3, 1, 0, baseArrayLayer );
passes.push( {
renderBundles: [ passEncoder.finish() ],
passDescriptor
} );
passes.push( {
renderBundles: [ passEncoder.finish() ],
passDescriptor
} );
srcView = dstView;
}

@@ -420,0 +372,0 @@ }

@@ -19,2 +19,3 @@ import {

R11_EAC_Format, SIGNED_R11_EAC_Format, RG11_EAC_Format, SIGNED_RG11_EAC_Format,
Compatibility
} from '../../../constants.js';

@@ -128,2 +129,11 @@ import { CubeTexture } from '../../../textures/CubeTexture.js';

// Depth textures without compare function must use non-filtering (nearest) sampling
if ( texture.isDepthTexture && texture.compareFunction === null ) {
samplerDescriptorGPU.magFilter = GPUFilterMode.Nearest;
samplerDescriptorGPU.minFilter = GPUFilterMode.Nearest;
samplerDescriptorGPU.mipmapFilter = GPUFilterMode.Nearest;
}
// anisotropy can only be used when all filter modes are set to linear.

@@ -137,3 +147,3 @@

if ( texture.isDepthTexture && texture.compareFunction !== null ) {
if ( texture.isDepthTexture && texture.compareFunction !== null && backend.hasCompatibility( Compatibility.TEXTURE_COMPARE ) ) {

@@ -308,4 +318,14 @@ samplerDescriptorGPU.compare = _compareToWebGPU[ texture.compareFunction ];

textureData.texture = backend.device.createTexture( textureDescriptorGPU );
try {
textureData.texture = backend.device.createTexture( textureDescriptorGPU );
} catch ( e ) {
warn( 'WebGPURenderer: Failed to create texture with descriptor:', textureDescriptorGPU );
this.createDefaultTexture( texture );
return;
}
if ( isMSAA ) {

@@ -358,22 +378,4 @@

if ( texture.isCubeTexture ) {
this._generateMipmaps( textureData.texture, encoder );
for ( let i = 0; i < 6; i ++ ) {
this._generateMipmaps( textureData.texture, textureData.textureDescriptorGPU, i, encoder );
}
} else {
const depth = texture.image.depth || 1;
for ( let i = 0; i < depth; i ++ ) {
this._generateMipmaps( textureData.texture, textureData.textureDescriptorGPU, i, encoder );
}
}
}

@@ -828,9 +830,7 @@

* @param {GPUTexture} textureGPU - The GPU texture object.
* @param {Object} textureDescriptorGPU - The texture descriptor.
* @param {number} [baseArrayLayer=0] - The index of the first array layer accessible to the texture view.
* @param {?GPUCommandEncoder} [encoder=null] - An optional command encoder used to generate mipmaps.
*/
_generateMipmaps( textureGPU, textureDescriptorGPU, baseArrayLayer = 0, encoder = null ) {
_generateMipmaps( textureGPU, encoder = null ) {
this._getPassUtils().generateMipmaps( textureGPU, textureDescriptorGPU, baseArrayLayer, encoder );
this._getPassUtils().generateMipmaps( textureGPU, encoder );

@@ -837,0 +837,0 @@ }

@@ -37,14 +37,18 @@ import { HalfFloatType, UnsignedByteType } from '../../../constants.js';

if ( renderContext.depthTexture !== null ) {
if ( renderContext.depth ) {
format = this.getTextureFormatGPU( renderContext.depthTexture );
if ( renderContext.depthTexture !== null ) {
} else if ( renderContext.depth && renderContext.stencil ) {
format = this.getTextureFormatGPU( renderContext.depthTexture );
format = GPUTextureFormat.Depth24PlusStencil8;
} else if ( renderContext.stencil ) {
} else if ( renderContext.depth ) {
format = GPUTextureFormat.Depth24PlusStencil8;
format = GPUTextureFormat.Depth24Plus;
} else {
format = GPUTextureFormat.Depth24Plus;
}
}

@@ -51,0 +55,0 @@

@@ -35,2 +35,3 @@ import Renderer from '../common/Renderer.js';

* @property {boolean} [logarithmicDepthBuffer=false] - Whether logarithmic depth buffer is enabled or not.
* @property {boolean} [reversedDepthBuffer=false] - Whether reversed depth buffer is enabled or not.
* @property {boolean} [alpha=true] - Whether the default framebuffer (which represents the final contents of the canvas) should be transparent or opaque.

@@ -37,0 +38,0 @@ * @property {boolean} [depth=true] - Whether the default framebuffer should have a depth buffer or not.

@@ -752,4 +752,4 @@ import { ArrayCamera } from '../../cameras/ArrayCamera.js';

cameraXR.layers.mask = camera.layers.mask | 0b110;
cameraL.layers.mask = cameraXR.layers.mask & 0b011;
cameraR.layers.mask = cameraXR.layers.mask & 0b101;
cameraL.layers.mask = cameraXR.layers.mask & ~ 0b100;
cameraR.layers.mask = cameraXR.layers.mask & ~ 0b010;

@@ -756,0 +756,0 @@ const parent = camera.parent;

@@ -71,3 +71,3 @@ import { EventDispatcher } from '../core/EventDispatcher.js';

/**
* The UUID of the material.
* The UUID of the texture.
*

@@ -80,3 +80,3 @@ * @type {string}

/**
* The name of the material.
* The name of the texture.
*

@@ -83,0 +83,0 @@ * @type {string}

@@ -6,3 +6,2 @@ import { REVISION } from './constants.js';

export { WebGL3DRenderTarget } from './renderers/WebGL3DRenderTarget.js';
export { WebGLCubeRenderTarget } from './renderers/WebGLCubeRenderTarget.js';
export { WebGLRenderTarget } from './renderers/WebGLRenderTarget.js';

@@ -115,2 +114,3 @@ export { WebXRController } from './renderers/webxr/WebXRController.js';

export { CubicInterpolant } from './math/interpolants/CubicInterpolant.js';
export { BezierInterpolant } from './math/interpolants/BezierInterpolant.js';
export { Interpolant } from './math/Interpolant.js';

@@ -117,0 +117,0 @@ export { Triangle } from './math/Triangle.js';

export * from './Three.Core.js';
export { WebGLRenderer } from './renderers/WebGLRenderer.js';
export { WebGLCubeRenderTarget } from './renderers/WebGLCubeRenderTarget.js';
export { ShaderLib } from './renderers/shaders/ShaderLib.js';

@@ -5,0 +6,0 @@ export { UniformsLib } from './renderers/shaders/UniformsLib.js';

@@ -32,3 +32,2 @@ import { TSL } from 'three/webgpu';

export const Schlick_to_F0 = TSL.Schlick_to_F0;
export const ScriptableNodeResources = TSL.ScriptableNodeResources;
export const ShaderNode = TSL.ShaderNode;

@@ -62,3 +61,2 @@ export const Stack = TSL.Stack;

export const atan = TSL.atan;
export const atan2 = TSL.atan2;
export const atomicAdd = TSL.atomicAdd;

@@ -103,3 +101,2 @@ export const atomicAnd = TSL.atomicAnd;

export const bumpMap = TSL.bumpMap;
export const burn = TSL.burn;
export const builtin = TSL.builtin;

@@ -133,2 +130,3 @@ export const builtinAOContext = TSL.builtinAOContext;

export const clearcoatRoughness = TSL.clearcoatRoughness;
export const clipSpace = TSL.clipSpace;
export const code = TSL.code;

@@ -176,3 +174,2 @@ export const color = TSL.color;

export const div = TSL.div;
export const dodge = TSL.dodge;
export const dot = TSL.dot;

@@ -184,6 +181,6 @@ export const drawIndex = TSL.drawIndex;

export const equal = TSL.equal;
export const equals = TSL.equals;
export const equirectUV = TSL.equirectUV;
export const exp = TSL.exp;
export const exp2 = TSL.exp2;
export const exponentialHeightFogFactor = TSL.exponentialHeightFogFactor;
export const expression = TSL.expression;

@@ -426,3 +423,2 @@ export const faceDirection = TSL.faceDirection;

export const outputStruct = TSL.outputStruct;
export const overlay = TSL.overlay;
export const overloadingFn = TSL.overloadingFn;

@@ -497,4 +493,2 @@ export const packHalf2x16 = TSL.packHalf2x16;

export const screenUV = TSL.screenUV;
export const scriptable = TSL.scriptable;
export const scriptableValue = TSL.scriptableValue;
export const select = TSL.select;

@@ -530,3 +524,2 @@ export const setCurrentStack = TSL.setCurrentStack;

export const storageBarrier = TSL.storageBarrier;
export const storageObject = TSL.storageObject;
export const storageTexture = TSL.storageTexture;

@@ -623,2 +616,4 @@ export const string = TSL.string;

export const viewZToPerspectiveDepth = TSL.viewZToPerspectiveDepth;
export const viewZToReversedOrthographicDepth = TSL.viewZToReversedOrthographicDepth;
export const viewZToReversedPerspectiveDepth = TSL.viewZToReversedPerspectiveDepth;
export const viewport = TSL.viewport;

@@ -629,2 +624,3 @@ export const viewportCoordinate = TSL.viewportCoordinate;

export const viewportMipTexture = TSL.viewportMipTexture;
export const viewportOpaqueMipTexture = TSL.viewportOpaqueMipTexture;
export const viewportResolution = TSL.viewportResolution;

@@ -631,0 +627,0 @@ export const viewportSafeUV = TSL.viewportSafeUV;

@@ -9,5 +9,7 @@ export * from './Three.Core.js';

export { default as PMREMGenerator } from './renderers/common/extras/PMREMGenerator.js';
export { default as RenderPipeline } from './renderers/common/RenderPipeline.js';
export { default as PostProcessing } from './renderers/common/PostProcessing.js';
import * as RendererUtils from './renderers/common/RendererUtils.js';
export { RendererUtils };
export { default as CubeRenderTarget } from './renderers/common/CubeRenderTarget.js';
export { default as StorageTexture } from './renderers/common/StorageTexture.js';

@@ -26,2 +28,3 @@ export { default as Storage3DTexture } from './renderers/common/Storage3DTexture.js';

export { default as CanvasTarget } from './renderers/common/CanvasTarget.js';
export { default as BlendMode } from './renderers/common/BlendMode.js';
export { ClippingGroup } from './objects/ClippingGroup.js';

@@ -28,0 +31,0 @@ export * from './nodes/Nodes.js';

@@ -9,2 +9,3 @@ export * from './Three.Core.js';

export { default as PMREMGenerator } from './renderers/common/extras/PMREMGenerator.js';
export { default as RenderPipeline } from './renderers/common/RenderPipeline.js';
export { default as PostProcessing } from './renderers/common/PostProcessing.js';

@@ -24,2 +25,3 @@ import * as RendererUtils from './renderers/common/RendererUtils.js';

export { default as CanvasTarget } from './renderers/common/CanvasTarget.js';
export { default as BlendMode } from './renderers/common/BlendMode.js';
export { ClippingGroup } from './objects/ClippingGroup.js';

@@ -26,0 +28,0 @@ export * from './nodes/Nodes.js';

@@ -0,1 +1,10 @@

import { AlwaysDepth, EqualDepth, GreaterDepth, GreaterEqualDepth, LessDepth, LessEqualDepth, NeverDepth, NotEqualDepth } from './constants.js';
/**
* Finds the minimum value in an array.
*
* @private
* @param {Array<number>} array - The array to search for the minimum value.
* @return {number} The minimum value in the array, or Infinity if the array is empty.
*/
function arrayMin( array ) {

@@ -17,2 +26,9 @@

/**
* Finds the maximum value in an array.
*
* @private
* @param {Array<number>} array - The array to search for the maximum value.
* @return {number} The maximum value in the array, or -Infinity if the array is empty.
*/
function arrayMax( array ) {

@@ -34,2 +50,14 @@

/**
* Checks if an array contains values that require Uint32 representation.
*
* This function determines whether the array contains any values >= 65535,
* which would require a Uint32Array rather than a Uint16Array for proper storage.
* The function iterates from the end of the array, assuming larger values are
* typically located at the end.
*
* @private
* @param {Array<number>} array - The array to check.
* @return {boolean} True if the array contains values >= 65535, false otherwise.
*/
function arrayNeedsUint32( array ) {

@@ -49,2 +77,10 @@

/**
* Map of typed array constructor names to their constructors.
* This mapping enables dynamic creation of typed arrays based on string type names.
*
* @private
* @constant
* @type {Object<string, TypedArrayConstructor>}
*/
const TYPED_ARRAYS = {

@@ -62,2 +98,10 @@ Int8Array: Int8Array,

/**
* Creates a typed array of the specified type from the given buffer.
*
* @private
* @param {string} type - The name of the typed array type (e.g., 'Float32Array', 'Uint16Array').
* @param {ArrayBuffer} buffer - The buffer to create the typed array from.
* @return {TypedArray} A new typed array of the specified type.
*/
function getTypedArray( type, buffer ) {

@@ -81,2 +125,12 @@

/**
* Creates an XHTML element with the specified tag name.
*
* This function uses the XHTML namespace to create DOM elements,
* ensuring proper element creation in XML-based contexts.
*
* @private
* @param {string} name - The tag name of the element to create (e.g., 'canvas', 'div').
* @return {HTMLElement} The created XHTML element.
*/
function createElementNS( name ) {

@@ -88,2 +142,11 @@

/**
* Creates a canvas element configured for block display.
*
* This is a convenience function that creates a canvas element with
* display style set to 'block', which is commonly used in three.js
* rendering contexts to avoid inline element spacing issues.
*
* @return {HTMLCanvasElement} A canvas element with display set to 'block'.
*/
function createCanvasElement() {

@@ -97,6 +160,28 @@

/**
* Internal cache for tracking warning messages to prevent duplicate warnings.
*
* @private
* @type {Object<string, boolean>}
*/
const _cache = {};
/**
* Custom console function handler for intercepting log, warn, and error calls.
*
* @private
* @type {Function|null}
*/
let _setConsoleFunction = null;
/**
* Sets a custom function to handle console output.
*
* This allows external code to intercept and handle console.log, console.warn,
* and console.error calls made by three.js, which is useful for custom logging,
* testing, or debugging workflows.
*
* @param {Function} fn - The function to handle console output. Should accept
* (type, message, ...params) where type is 'log', 'warn', or 'error'.
*/
function setConsoleFunction( fn ) {

@@ -108,2 +193,7 @@

/**
* Gets the currently set custom console function.
*
* @return {Function|null} The custom console function, or null if not set.
*/
function getConsoleFunction() {

@@ -115,2 +205,12 @@

/**
* Logs an informational message with the 'THREE.' prefix.
*
* If a custom console function is set via setConsoleFunction(), it will be used
* instead of the native console.log. The first parameter is treated as the
* method name and is automatically prefixed with 'THREE.'.
*
* @param {...any} params - The message components. The first param is used as
* the method name and prefixed with 'THREE.'.
*/
function log( ...params ) {

@@ -132,4 +232,46 @@

/**
* Enhances log/warn/error messages related to TSL.
*
* @param {Array<any>} params - The original message parameters.
* @returns {Array<any>} The filtered and enhanced message parameters.
*/
function enhanceLogMessage( params ) {
const message = params[ 0 ];
if ( typeof message === 'string' && message.startsWith( 'TSL:' ) ) {
const stackTrace = params[ 1 ];
if ( stackTrace && stackTrace.isStackTrace ) {
params[ 0 ] += ' ' + stackTrace.getLocation();
} else {
params[ 1 ] = 'Stack trace not available. Enable "THREE.Node.captureStackTrace" to capture stack traces.';
}
}
return params;
}
/**
* Logs a warning message with the 'THREE.' prefix.
*
* If a custom console function is set via setConsoleFunction(), it will be used
* instead of the native console.warn. The first parameter is treated as the
* method name and is automatically prefixed with 'THREE.'.
*
* @param {...any} params - The message components. The first param is used as
* the method name and prefixed with 'THREE.'.
*/
function warn( ...params ) {
params = enhanceLogMessage( params );
const message = 'THREE.' + params.shift();

@@ -143,4 +285,14 @@

console.warn( message, ...params );
const stackTrace = params[ 0 ];
if ( stackTrace && stackTrace.isStackTrace ) {
console.warn( stackTrace.getError( message ) );
} else {
console.warn( message, ...params );
}
}

@@ -150,4 +302,16 @@

/**
* Logs an error message with the 'THREE.' prefix.
*
* If a custom console function is set via setConsoleFunction(), it will be used
* instead of the native console.error. The first parameter is treated as the
* method name and is automatically prefixed with 'THREE.'.
*
* @param {...any} params - The message components. The first param is used as
* the method name and prefixed with 'THREE.'.
*/
function error( ...params ) {
params = enhanceLogMessage( params );
const message = 'THREE.' + params.shift();

@@ -161,4 +325,14 @@

console.error( message, ...params );
const stackTrace = params[ 0 ];
if ( stackTrace && stackTrace.isStackTrace ) {
console.error( stackTrace.getError( message ) );
} else {
console.error( message, ...params );
}
}

@@ -168,2 +342,11 @@

/**
* Logs a warning message only once, preventing duplicate warnings.
*
* This function maintains an internal cache of warning messages and will only
* output each unique warning message once. Useful for warnings that may be
* triggered repeatedly but should only be shown to the user once.
*
* @param {...any} params - The warning message components.
*/
function warnOnce( ...params ) {

@@ -181,2 +364,16 @@

/**
* Asynchronously probes for WebGL sync object completion.
*
* This function creates a promise that resolves when the WebGL sync object
* signals completion or rejects if the sync operation fails. It uses polling
* at the specified interval to check the sync status without blocking the
* main thread. This is useful for GPU-CPU synchronization in WebGL contexts.
*
* @private
* @param {WebGL2RenderingContext} gl - The WebGL rendering context.
* @param {WebGLSync} sync - The WebGL sync object to wait for.
* @param {number} interval - The polling interval in milliseconds.
* @return {Promise<void>} A promise that resolves when the sync completes or rejects if it fails.
*/
function probeAsync( gl, sync, interval ) {

@@ -211,2 +408,14 @@

/**
* Converts a projection matrix from normalized device coordinates (NDC)
* range [-1, 1] to [0, 1].
*
* This conversion is commonly needed when working with depth textures or
* render targets that expect depth values in the [0, 1] range rather than
* the standard OpenGL NDC range of [-1, 1]. The function modifies the
* projection matrix in place.
*
* @private
* @param {Matrix4} projectionMatrix - The projection matrix to convert (modified in place).
*/
function toNormalizedProjectionMatrix( projectionMatrix ) {

@@ -224,2 +433,17 @@

/**
* Reverses the depth range of a projection matrix.
*
* This function inverts the depth mapping of a projection matrix, which is
* useful for reversed-Z depth buffer techniques that can improve depth
* precision. The function handles both perspective and orthographic projection
* matrices differently and modifies the matrix in place.
*
* For perspective matrices (where m[11] === -1), the depth mapping is
* reversed with an offset. For orthographic matrices, a simpler reversal
* is applied.
*
* @private
* @param {Matrix4} projectionMatrix - The projection matrix to reverse (modified in place).
*/
function toReversedProjectionMatrix( projectionMatrix ) {

@@ -245,2 +469,21 @@

export { arrayMin, arrayMax, arrayNeedsUint32, getTypedArray, createElementNS, createCanvasElement, setConsoleFunction, getConsoleFunction, log, warn, error, warnOnce, probeAsync, toNormalizedProjectionMatrix, toReversedProjectionMatrix, isTypedArray };
/**
* Used to select the correct depth functions
* when reversed depth buffer is used.
*
* @private
* @type {Object}
*/
const ReversedDepthFuncs = {
[ NeverDepth ]: AlwaysDepth,
[ LessDepth ]: GreaterDepth,
[ EqualDepth ]: NotEqualDepth,
[ LessEqualDepth ]: GreaterEqualDepth,
[ AlwaysDepth ]: NeverDepth,
[ GreaterDepth ]: LessDepth,
[ NotEqualDepth ]: EqualDepth,
[ GreaterEqualDepth ]: LessEqualDepth,
};
export { arrayMin, arrayMax, arrayNeedsUint32, getTypedArray, createElementNS, createCanvasElement, setConsoleFunction, getConsoleFunction, log, warn, error, warnOnce, probeAsync, toNormalizedProjectionMatrix, toReversedProjectionMatrix, isTypedArray, ReversedDepthFuncs };
/**
* MeshGouraudMaterial
*
* Lambert illumination model with Gouraud (per-vertex) shading
*
*/
import { UniformsUtils, UniformsLib, ShaderMaterial, Color, MultiplyOperation } from 'three';
const GouraudShader = {
name: 'GouraudShader',
uniforms: UniformsUtils.merge( [
UniformsLib.common,
UniformsLib.specularmap,
UniformsLib.envmap,
UniformsLib.aomap,
UniformsLib.lightmap,
UniformsLib.emissivemap,
UniformsLib.fog,
UniformsLib.lights,
{
emissive: { value: new Color( 0x000000 ) }
}
] ),
vertexShader: /* glsl */`
#define GOURAUD
varying vec3 vLightFront;
varying vec3 vIndirectFront;
#ifdef DOUBLE_SIDED
varying vec3 vLightBack;
varying vec3 vIndirectBack;
#endif
#include <common>
#include <uv_pars_vertex>
#include <envmap_pars_vertex>
#include <bsdfs>
#include <lights_pars_begin>
#include <color_pars_vertex>
#include <fog_pars_vertex>
#include <morphtarget_pars_vertex>
#include <skinning_pars_vertex>
#include <shadowmap_pars_vertex>
#include <logdepthbuf_pars_vertex>
#include <clipping_planes_pars_vertex>
void main() {
#include <uv_vertex>
#include <color_vertex>
#include <morphcolor_vertex>
#include <beginnormal_vertex>
#include <morphnormal_vertex>
#include <skinbase_vertex>
#include <skinnormal_vertex>
#include <defaultnormal_vertex>
#include <begin_vertex>
#include <morphtarget_vertex>
#include <skinning_vertex>
#include <project_vertex>
#include <logdepthbuf_vertex>
#include <clipping_planes_vertex>
#include <worldpos_vertex>
#include <envmap_vertex>
// inlining legacy <lights_lambert_vertex>
vec3 diffuse = vec3( 1.0 );
vec3 geometryPosition = mvPosition.xyz;
vec3 geometryNormal = normalize( transformedNormal );
vec3 geometryViewDir = ( isOrthographic ) ? vec3( 0, 0, 1 ) : normalize( -mvPosition.xyz );
vec3 backGeometryNormal = - geometryNormal;
vLightFront = vec3( 0.0 );
vIndirectFront = vec3( 0.0 );
#ifdef DOUBLE_SIDED
vLightBack = vec3( 0.0 );
vIndirectBack = vec3( 0.0 );
#endif
IncidentLight directLight;
float dotNL;
vec3 directLightColor_Diffuse;
vIndirectFront += getAmbientLightIrradiance( ambientLightColor );
#if defined( USE_LIGHT_PROBES )
vIndirectFront += getLightProbeIrradiance( lightProbe, geometryNormal );
#endif
#ifdef DOUBLE_SIDED
vIndirectBack += getAmbientLightIrradiance( ambientLightColor );
#if defined( USE_LIGHT_PROBES )
vIndirectBack += getLightProbeIrradiance( lightProbe, backGeometryNormal );
#endif
#endif
#if NUM_POINT_LIGHTS > 0
#pragma unroll_loop_start
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
getPointLightInfo( pointLights[ i ], geometryPosition, directLight );
dotNL = dot( geometryNormal, directLight.direction );
directLightColor_Diffuse = directLight.color;
vLightFront += saturate( dotNL ) * directLightColor_Diffuse;
#ifdef DOUBLE_SIDED
vLightBack += saturate( - dotNL ) * directLightColor_Diffuse;
#endif
}
#pragma unroll_loop_end
#endif
#if NUM_SPOT_LIGHTS > 0
#pragma unroll_loop_start
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
getSpotLightInfo( spotLights[ i ], geometryPosition, directLight );
dotNL = dot( geometryNormal, directLight.direction );
directLightColor_Diffuse = directLight.color;
vLightFront += saturate( dotNL ) * directLightColor_Diffuse;
#ifdef DOUBLE_SIDED
vLightBack += saturate( - dotNL ) * directLightColor_Diffuse;
#endif
}
#pragma unroll_loop_end
#endif
#if NUM_DIR_LIGHTS > 0
#pragma unroll_loop_start
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
getDirectionalLightInfo( directionalLights[ i ], directLight );
dotNL = dot( geometryNormal, directLight.direction );
directLightColor_Diffuse = directLight.color;
vLightFront += saturate( dotNL ) * directLightColor_Diffuse;
#ifdef DOUBLE_SIDED
vLightBack += saturate( - dotNL ) * directLightColor_Diffuse;
#endif
}
#pragma unroll_loop_end
#endif
#if NUM_HEMI_LIGHTS > 0
#pragma unroll_loop_start
for ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {
vIndirectFront += getHemisphereLightIrradiance( hemisphereLights[ i ], geometryNormal );
#ifdef DOUBLE_SIDED
vIndirectBack += getHemisphereLightIrradiance( hemisphereLights[ i ], backGeometryNormal );
#endif
}
#pragma unroll_loop_end
#endif
#include <shadowmap_vertex>
#include <fog_vertex>
}`,
fragmentShader: /* glsl */`
#define GOURAUD
uniform vec3 diffuse;
uniform vec3 emissive;
uniform float opacity;
varying vec3 vLightFront;
varying vec3 vIndirectFront;
#ifdef DOUBLE_SIDED
varying vec3 vLightBack;
varying vec3 vIndirectBack;
#endif
#include <common>
#include <dithering_pars_fragment>
#include <color_pars_fragment>
#include <uv_pars_fragment>
#include <map_pars_fragment>
#include <alphamap_pars_fragment>
#include <alphatest_pars_fragment>
#include <aomap_pars_fragment>
#include <lightmap_pars_fragment>
#include <emissivemap_pars_fragment>
#include <envmap_common_pars_fragment>
#include <envmap_pars_fragment>
#include <bsdfs>
#include <lights_pars_begin>
#include <fog_pars_fragment>
#include <shadowmap_pars_fragment>
#include <shadowmask_pars_fragment>
#include <specularmap_pars_fragment>
#include <logdepthbuf_pars_fragment>
#include <clipping_planes_pars_fragment>
void main() {
#include <clipping_planes_fragment>
vec4 diffuseColor = vec4( diffuse, opacity );
ReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );
vec3 totalEmissiveRadiance = emissive;
#include <logdepthbuf_fragment>
#include <map_fragment>
#include <color_fragment>
#include <alphamap_fragment>
#include <alphatest_fragment>
#include <specularmap_fragment>
#include <emissivemap_fragment>
// accumulation
#ifdef DOUBLE_SIDED
reflectedLight.indirectDiffuse += ( gl_FrontFacing ) ? vIndirectFront : vIndirectBack;
#else
reflectedLight.indirectDiffuse += vIndirectFront;
#endif
#ifdef USE_LIGHTMAP
vec4 lightMapTexel = texture2D( lightMap, vLightMapUv );
vec3 lightMapIrradiance = lightMapTexel.rgb * lightMapIntensity;
reflectedLight.indirectDiffuse += lightMapIrradiance;
#endif
reflectedLight.indirectDiffuse *= BRDF_Lambert( diffuseColor.rgb );
#ifdef DOUBLE_SIDED
reflectedLight.directDiffuse = ( gl_FrontFacing ) ? vLightFront : vLightBack;
#else
reflectedLight.directDiffuse = vLightFront;
#endif
reflectedLight.directDiffuse *= BRDF_Lambert( diffuseColor.rgb ) * getShadowMask();
// modulation
#include <aomap_fragment>
vec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;
#include <envmap_fragment>
#include <opaque_fragment>
#include <tonemapping_fragment>
#include <colorspace_fragment>
#include <fog_fragment>
#include <premultiplied_alpha_fragment>
#include <dithering_fragment>
}`
};
//
class MeshGouraudMaterial extends ShaderMaterial {
constructor( parameters ) {
super();
console.warn( 'THREE.MeshGouraudMaterial: MeshGouraudMaterial has been deprecated and will be removed with r183. Use THREE.MeshLambertMaterial instead.' ); // @deprecated r173
this.isMeshGouraudMaterial = true;
this.type = 'MeshGouraudMaterial';
//this.color = new THREE.Color( 0xffffff ); // diffuse
//this.map = null;
//this.lightMap = null;
//this.lightMapIntensity = 1.0;
//this.aoMap = null;
//this.aoMapIntensity = 1.0;
//this.emissive = new THREE.Color( 0x000000 );
//this.emissiveIntensity = 1.0;
//this.emissiveMap = null;
//this.specularMap = null;
//this.alphaMap = null;
//this.envMap = null;
this.combine = MultiplyOperation; // combine has no uniform
//this.reflectivity = 1;
//this.refractionRatio = 0.98;
this.fog = false; // set to use scene fog
this.lights = true; // set to use scene lights
this.clipping = false; // set to use user-defined clipping planes
const shader = GouraudShader;
this.defines = Object.assign( {}, shader.defines );
this.uniforms = UniformsUtils.clone( shader.uniforms );
this.vertexShader = shader.vertexShader;
this.fragmentShader = shader.fragmentShader;
const exposePropertyNames = [
'map', 'lightMap', 'lightMapIntensity', 'aoMap', 'aoMapIntensity',
'emissive', 'emissiveIntensity', 'emissiveMap', 'specularMap', 'alphaMap',
'envMap', 'reflectivity', 'refractionRatio', 'opacity', 'diffuse'
];
for ( const propertyName of exposePropertyNames ) {
Object.defineProperty( this, propertyName, {
get: function () {
return this.uniforms[ propertyName ].value;
},
set: function ( value ) {
this.uniforms[ propertyName ].value = value;
}
} );
}
Object.defineProperty( this, 'color', Object.getOwnPropertyDescriptor( this, 'diffuse' ) );
this.setValues( parameters );
}
copy( source ) {
super.copy( source );
this.color.copy( source.color );
this.map = source.map;
this.lightMap = source.lightMap;
this.lightMapIntensity = source.lightMapIntensity;
this.aoMap = source.aoMap;
this.aoMapIntensity = source.aoMapIntensity;
this.emissive.copy( source.emissive );
this.emissiveMap = source.emissiveMap;
this.emissiveIntensity = source.emissiveIntensity;
this.specularMap = source.specularMap;
this.alphaMap = source.alphaMap;
this.envMap = source.envMap;
this.combine = source.combine;
this.reflectivity = source.reflectivity;
this.refractionRatio = source.refractionRatio;
this.wireframe = source.wireframe;
this.wireframeLinewidth = source.wireframeLinewidth;
this.wireframeLinecap = source.wireframeLinecap;
this.wireframeLinejoin = source.wireframeLinejoin;
this.fog = source.fog;
return this;
}
}
export { MeshGouraudMaterial };
import { MeshPhysicalMaterial } from 'three';
/**
* The aim of this mesh material is to use information from a post processing pass in the diffuse color pass.
* This material is based on the MeshPhysicalMaterial.
*
* In the current state, only the information of a screen space AO pass can be used in the material.
* Actually, the output of any screen space AO (SSAO, GTAO) can be used,
* as it is only necessary to provide the AO in one color channel of a texture,
* however the AO pass must be rendered prior to the color pass,
* which makes the post-processing pass somewhat of a pre-processing pass.
* Fot this purpose a new map (`aoPassMap`) is added to the material.
* The value of the map is used the same way as the `aoMap` value.
*
* Motivation to use the outputs AO pass directly in the material:
* The incident light of a fragment is composed of ambient light, direct light and indirect light
* Ambient Occlusion only occludes ambient light and environment light, but not direct light.
* Direct light is only occluded by geometry that casts shadows.
* And of course the emitted light should not be darkened by ambient occlusion either.
* This cannot be achieved if the AO post processing pass is simply blended with the diffuse render pass.
*
* Further extension work might be to use the output of an SSR pass or an HBIL pass from a previous frame.
* This would then create the possibility of SSR and IR depending on material properties such as `roughness`, `metalness` and `reflectivity`.
*
* @augments MeshPhysicalMaterial
* @three_import import { MeshPostProcessingMaterial } from 'three/addons/materials/MeshPostProcessingMaterial.js';
*/
class MeshPostProcessingMaterial extends MeshPhysicalMaterial {
/**
* Constructs a new conditional line material.
*
* @param {Object} [parameters] - An object with one or more properties
* defining the material's appearance. Any property of the material
* (including any property from inherited materials) can be passed
* in here. Color values can be passed any type of value accepted
* by {@link Color#set}.
*/
constructor( parameters ) {
const aoPassMap = parameters.aoPassMap;
const aoPassMapScale = parameters.aoPassMapScale || 1.0;
delete parameters.aoPassMap;
delete parameters.aoPassMapScale;
super( parameters );
this.onBeforeCompile = this._onBeforeCompile;
this.customProgramCacheKey = this._customProgramCacheKey;
this._aoPassMap = aoPassMap;
/**
* The scale of the AO pass.
*
* @type {number}
* @default 1
*/
this.aoPassMapScale = aoPassMapScale;
this._shader = null;
}
/**
* A texture representing the AO pass.
*
* @type {Texture}
*/
get aoPassMap() {
return this._aoPassMap;
}
set aoPassMap( aoPassMap ) {
this._aoPassMap = aoPassMap;
this.needsUpdate = true;
this._setUniforms();
}
_customProgramCacheKey() {
return this._aoPassMap !== undefined && this._aoPassMap !== null ? 'aoPassMap' : '';
}
_onBeforeCompile( shader ) {
this._shader = shader;
if ( this._aoPassMap !== undefined && this._aoPassMap !== null ) {
shader.fragmentShader = shader.fragmentShader.replace(
'#include <aomap_pars_fragment>',
aomap_pars_fragment_replacement
);
shader.fragmentShader = shader.fragmentShader.replace(
'#include <aomap_fragment>',
aomap_fragment_replacement
);
}
this._setUniforms();
}
_setUniforms() {
if ( this._shader ) {
this._shader.uniforms.tAoPassMap = { value: this._aoPassMap };
this._shader.uniforms.aoPassMapScale = { value: this.aoPassMapScale };
}
}
}
const aomap_pars_fragment_replacement = /* glsl */`
#ifdef USE_AOMAP
uniform sampler2D aoMap;
uniform float aoMapIntensity;
#endif
uniform sampler2D tAoPassMap;
uniform float aoPassMapScale;
`;
const aomap_fragment_replacement = /* glsl */`
#ifndef AOPASSMAP_SWIZZLE
#define AOPASSMAP_SWIZZLE r
#endif
float ambientOcclusion = texelFetch( tAoPassMap, ivec2( gl_FragCoord.xy * aoPassMapScale ), 0 ).AOPASSMAP_SWIZZLE;
#ifdef USE_AOMAP
// reads channel R, compatible with a combined OcclusionRoughnessMetallic (RGB) texture
ambientOcclusion = min( ambientOcclusion, texture2D( aoMap, vAoMapUv ).r );
ambientOcclusion *= ( ambientOcclusion - 1.0 ) * aoMapIntensity + 1.0;
#endif
reflectedLight.indirectDiffuse *= ambientOcclusion;
#if defined( USE_CLEARCOAT )
clearcoatSpecularIndirect *= ambientOcclusion;
#endif
#if defined( USE_SHEEN )
sheenSpecularIndirect *= ambientOcclusion;
#endif
#if defined( USE_ENVMAP ) && defined( STANDARD )
float dotNV = saturate( dot( geometryNormal, geometryViewDir ) );
reflectedLight.indirectSpecular *= computeSpecularOcclusion( dotNV, ambientOcclusion, material.roughness );
#endif
`;
export { MeshPostProcessingMaterial };
import {
Color,
Vector3
} from 'three';
/**
* @module GodRaysShader
* @three_import import * as GodRaysShader from 'three/addons/shaders/GodRaysShader.js';
*/
/**
* God-rays (crepuscular rays)
*
* Similar implementation to the one used by Crytek for CryEngine 2 [Sousa2008].
* Blurs a mask generated from the depth map along radial lines emanating from the light
* source. The blur repeatedly applies a blur filter of increasing support but constant
* sample count to produce a blur filter with large support.
*
* My implementation performs 3 passes, similar to the implementation from Sousa. I found
* just 6 samples per pass produced acceptable results. The blur is applied three times,
* with decreasing filter support. The result is equivalent to a single pass with
* 6*6*6 = 216 samples.
*
* References:
* - [Sousa2008, Crysis Next Gen Effects, GDC2008](http://www.crytek.com/sites/default/files/GDC08_SousaT_CrysisEffects.ppt).
*
* @constant
* @type {ShaderMaterial~Shader}
*/
const GodRaysDepthMaskShader = {
name: 'GodRaysDepthMaskShader',
uniforms: {
tInput: {
value: null
}
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
varying vec2 vUv;
uniform sampler2D tInput;
void main() {
gl_FragColor = vec4( 1.0 ) - texture2D( tInput, vUv );
}`
};
/**
* The god-ray generation shader.
*
* First pass:
*
* The depth map is blurred along radial lines towards the "sun". The
* output is written to a temporary render target (I used a 1/4 sized
* target).
*
* Pass two & three:
*
* The results of the previous pass are re-blurred, each time with a
* decreased distance between samples.
*
* @constant
* @type {ShaderMaterial~Shader}
*/
const GodRaysGenerateShader = {
name: 'GodRaysGenerateShader',
uniforms: {
tInput: {
value: null
},
fStepSize: {
value: 1.0
},
vSunPositionScreenSpace: {
value: new Vector3()
}
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
#define TAPS_PER_PASS 6.0
varying vec2 vUv;
uniform sampler2D tInput;
uniform vec3 vSunPositionScreenSpace;
uniform float fStepSize; // filter step size
void main() {
// delta from current pixel to "sun" position
vec2 delta = vSunPositionScreenSpace.xy - vUv;
float dist = length( delta );
// Step vector (uv space)
vec2 stepv = fStepSize * delta / dist;
// Number of iterations between pixel and sun
float iters = dist/fStepSize;
vec2 uv = vUv.xy;
float col = 0.0;
// This breaks ANGLE in Chrome 22
// - see http://code.google.com/p/chromium/issues/detail?id=153105
/*
// Unrolling didn't do much on my hardware (ATI Mobility Radeon 3450),
// so i've just left the loop
"for ( float i = 0.0; i < TAPS_PER_PASS; i += 1.0 ) {",
// Accumulate samples, making sure we don't walk past the light source.
// The check for uv.y < 1 would not be necessary with "border" UV wrap
// mode, with a black border color. I don't think this is currently
// exposed by three.js. As a result there might be artifacts when the
// sun is to the left, right or bottom of screen as these cases are
// not specifically handled.
" col += ( i <= iters && uv.y < 1.0 ? texture2D( tInput, uv ).r : 0.0 );",
" uv += stepv;",
"}",
*/
// Unrolling loop manually makes it work in ANGLE
float f = min( 1.0, max( vSunPositionScreenSpace.z / 1000.0, 0.0 ) ); // used to fade out godrays
if ( 0.0 <= iters && uv.y < 1.0 ) col += texture2D( tInput, uv ).r * f;
uv += stepv;
if ( 1.0 <= iters && uv.y < 1.0 ) col += texture2D( tInput, uv ).r * f;
uv += stepv;
if ( 2.0 <= iters && uv.y < 1.0 ) col += texture2D( tInput, uv ).r * f;
uv += stepv;
if ( 3.0 <= iters && uv.y < 1.0 ) col += texture2D( tInput, uv ).r * f;
uv += stepv;
if ( 4.0 <= iters && uv.y < 1.0 ) col += texture2D( tInput, uv ).r * f;
uv += stepv;
if ( 5.0 <= iters && uv.y < 1.0 ) col += texture2D( tInput, uv ).r * f;
uv += stepv;
// Should technically be dividing by 'iters but 'TAPS_PER_PASS' smooths out
// objectionable artifacts, in particular near the sun position. The side
// effect is that the result is darker than it should be around the sun, as
// TAPS_PER_PASS is greater than the number of samples actually accumulated.
// When the result is inverted (in the shader 'godrays_combine this produces
// a slight bright spot at the position of the sun, even when it is occluded.
gl_FragColor = vec4( col/TAPS_PER_PASS );
gl_FragColor.a = 1.0;
}`
};
/**
* Additively applies god rays from texture tGodRays to a background (tColors).
* fGodRayIntensity attenuates the god rays.
*
* @constant
* @type {ShaderMaterial~Shader}
*/
const GodRaysCombineShader = {
name: 'GodRaysCombineShader',
uniforms: {
tColors: {
value: null
},
tGodRays: {
value: null
},
fGodRayIntensity: {
value: 0.69
}
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
varying vec2 vUv;
uniform sampler2D tColors;
uniform sampler2D tGodRays;
uniform float fGodRayIntensity;
void main() {
// Since THREE.MeshDepthMaterial renders foreground objects white and background
// objects black, the god-rays will be white streaks. Therefore value is inverted
// before being combined with tColors
gl_FragColor = texture2D( tColors, vUv ) + fGodRayIntensity * vec4( 1.0 - texture2D( tGodRays, vUv ).r );
gl_FragColor.a = 1.0;
}`
};
/**
* A dodgy sun/sky shader. Makes a bright spot at the sun location. Would be
* cheaper/faster/simpler to implement this as a simple sun sprite.
*
* @constant
* @type {Object}
*/
const GodRaysFakeSunShader = {
name: 'GodRaysFakeSunShader',
uniforms: {
vSunPositionScreenSpace: {
value: new Vector3()
},
fAspect: {
value: 1.0
},
sunColor: {
value: new Color( 0xffee00 )
},
bgColor: {
value: new Color( 0x000000 )
}
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
varying vec2 vUv;
uniform vec3 vSunPositionScreenSpace;
uniform float fAspect;
uniform vec3 sunColor;
uniform vec3 bgColor;
void main() {
vec2 diff = vUv - vSunPositionScreenSpace.xy;
// Correct for aspect ratio
diff.x *= fAspect;
float prop = clamp( length( diff ) / 0.5, 0.0, 1.0 );
prop = 0.35 * pow( 1.0 - prop, 3.0 );
gl_FragColor.xyz = ( vSunPositionScreenSpace.z > 0.0 ) ? mix( sunColor, bgColor, 1.0 - prop ) : bgColor;
gl_FragColor.w = 1.0;
}`
};
export { GodRaysDepthMaskShader, GodRaysGenerateShader, GodRaysCombineShader, GodRaysFakeSunShader };
import { UVMapping } from '../../constants.js';
import { Euler } from '../../math/Euler.js';
import { Matrix4 } from '../../math/Matrix4.js';
import Node from '../core/Node.js';
import { renderGroup } from '../core/UniformGroupNode.js';
import { nodeImmutable, uniform } from '../tsl/TSLBase.js';
import { reference } from './ReferenceNode.js';
import { error } from '../../utils.js';
const _e1 = /*@__PURE__*/ new Euler();
const _m1 = /*@__PURE__*/ new Matrix4();
/**
* This module allows access to a collection of scene properties. The following predefined TSL objects
* are available for easier use:
*
* - `backgroundBlurriness`: A node that represents the scene's background blurriness.
* - `backgroundIntensity`: A node that represents the scene's background intensity.
* - `backgroundRotation`: A node that represents the scene's background rotation.
*
* @augments Node
*/
class SceneNode extends Node {
static get type() {
return 'SceneNode';
}
/**
* Constructs a new scene node.
*
* @param {('backgroundBlurriness'|'backgroundIntensity'|'backgroundRotation')} scope - The scope defines the type of scene property that is accessed.
* @param {?Scene} [scene=null] - A reference to the scene.
*/
constructor( scope = SceneNode.BACKGROUND_BLURRINESS, scene = null ) {
super();
/**
* The scope defines the type of scene property that is accessed.
*
* @type {('backgroundBlurriness'|'backgroundIntensity'|'backgroundRotation')}
*/
this.scope = scope;
/**
* A reference to the scene that is going to be accessed.
*
* @type {?Scene}
* @default null
*/
this.scene = scene;
}
/**
* Depending on the scope, the method returns a different type of node that represents
* the respective scene property.
*
* @param {NodeBuilder} builder - The current node builder.
* @return {Node} The output node.
*/
setup( builder ) {
const scope = this.scope;
const scene = this.scene !== null ? this.scene : builder.scene;
let output;
if ( scope === SceneNode.BACKGROUND_BLURRINESS ) {
output = reference( 'backgroundBlurriness', 'float', scene );
} else if ( scope === SceneNode.BACKGROUND_INTENSITY ) {
output = reference( 'backgroundIntensity', 'float', scene );
} else if ( scope === SceneNode.BACKGROUND_ROTATION ) {
output = uniform( 'mat4' ).setName( 'backgroundRotation' ).setGroup( renderGroup ).onRenderUpdate( () => {
const background = scene.background;
if ( background !== null && background.isTexture && background.mapping !== UVMapping ) {
_e1.copy( scene.backgroundRotation );
// accommodate left-handed frame
_e1.x *= - 1; _e1.y *= - 1; _e1.z *= - 1;
_m1.makeRotationFromEuler( _e1 );
} else {
_m1.identity();
}
return _m1;
} );
} else {
error( 'SceneNode: Unknown scope:', scope );
}
return output;
}
}
SceneNode.BACKGROUND_BLURRINESS = 'backgroundBlurriness';
SceneNode.BACKGROUND_INTENSITY = 'backgroundIntensity';
SceneNode.BACKGROUND_ROTATION = 'backgroundRotation';
export default SceneNode;
/**
* TSL object that represents the scene's background blurriness.
*
* @tsl
* @type {SceneNode}
*/
export const backgroundBlurriness = /*@__PURE__*/ nodeImmutable( SceneNode, SceneNode.BACKGROUND_BLURRINESS );
/**
* TSL object that represents the scene's background intensity.
*
* @tsl
* @type {SceneNode}
*/
export const backgroundIntensity = /*@__PURE__*/ nodeImmutable( SceneNode, SceneNode.BACKGROUND_INTENSITY );
/**
* TSL object that represents the scene's background rotation.
*
* @tsl
* @type {SceneNode}
*/
export const backgroundRotation = /*@__PURE__*/ nodeImmutable( SceneNode, SceneNode.BACKGROUND_ROTATION );
import Node from '../core/Node.js';
import { scriptableValue } from './ScriptableValueNode.js';
import { nodeProxy, float } from '../tsl/TSLBase.js';
import { hashArray, hashString } from '../core/NodeUtils.js';
/**
* A Map-like data structure for managing resources of scriptable nodes.
*
* @augments Map
*/
class Resources extends Map {
get( key, callback = null, ...params ) {
if ( this.has( key ) ) return super.get( key );
if ( callback !== null ) {
const value = callback( ...params );
this.set( key, value );
return value;
}
}
}
class Parameters {
constructor( scriptableNode ) {
this.scriptableNode = scriptableNode;
}
get parameters() {
return this.scriptableNode.parameters;
}
get layout() {
return this.scriptableNode.getLayout();
}
getInputLayout( id ) {
return this.scriptableNode.getInputLayout( id );
}
get( name ) {
const param = this.parameters[ name ];
const value = param ? param.getValue() : null;
return value;
}
}
/**
* Defines the resources (e.g. namespaces) of scriptable nodes.
*
* @type {Resources}
*/
export const ScriptableNodeResources = new Resources();
/**
* This type of node allows to implement nodes with custom scripts. The script
* section is represented as an instance of `CodeNode` written with JavaScript.
* The script itself must adhere to a specific structure.
*
* - main(): Executed once by default and every time `node.needsUpdate` is set.
* - layout: The layout object defines the script's interface (inputs and outputs).
*
* ```js
* ScriptableNodeResources.set( 'TSL', TSL );
*
* const scriptableNode = scriptable( js( `
* layout = {
* outputType: 'node',
* elements: [
* { name: 'source', inputType: 'node' },
* ]
* };
*
* const { mul, oscSine } = TSL;
*
* function main() {
* const source = parameters.get( 'source' ) || float();
* return mul( source, oscSine() ) );
* }
*
* ` ) );
*
* scriptableNode.setParameter( 'source', color( 1, 0, 0 ) );
*
* const material = new THREE.MeshBasicNodeMaterial();
* material.colorNode = scriptableNode;
* ```
*
* @augments Node
*/
class ScriptableNode extends Node {
static get type() {
return 'ScriptableNode';
}
/**
* Constructs a new scriptable node.
*
* @param {?CodeNode} [codeNode=null] - The code node.
* @param {Object} [parameters={}] - The parameters definition.
*/
constructor( codeNode = null, parameters = {} ) {
super();
/**
* The code node.
*
* @type {?CodeNode}
* @default null
*/
this.codeNode = codeNode;
/**
* The parameters definition.
*
* @type {Object}
* @default {}
*/
this.parameters = parameters;
this._local = new Resources();
this._output = scriptableValue( null );
this._outputs = {};
this._source = this.source;
this._method = null;
this._object = null;
this._value = null;
this._needsOutputUpdate = true;
this.onRefresh = this.onRefresh.bind( this );
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isScriptableNode = true;
}
/**
* The source code of the scriptable node.
*
* @type {string}
*/
get source() {
return this.codeNode ? this.codeNode.code : '';
}
/**
* Sets the reference of a local script variable.
*
* @param {string} name - The variable name.
* @param {Object} value - The reference to set.
* @return {Resources} The resource map
*/
setLocal( name, value ) {
return this._local.set( name, value );
}
/**
* Gets the value of a local script variable.
*
* @param {string} name - The variable name.
* @return {Object} The value.
*/
getLocal( name ) {
return this._local.get( name );
}
/**
* Event listener for the `refresh` event.
*/
onRefresh() {
this._refresh();
}
/**
* Returns an input from the layout with the given id/name.
*
* @param {string} id - The id/name of the input.
* @return {Object} The element entry.
*/
getInputLayout( id ) {
for ( const element of this.getLayout() ) {
if ( element.inputType && ( element.id === id || element.name === id ) ) {
return element;
}
}
}
/**
* Returns an output from the layout with the given id/name.
*
* @param {string} id - The id/name of the output.
* @return {Object} The element entry.
*/
getOutputLayout( id ) {
for ( const element of this.getLayout() ) {
if ( element.outputType && ( element.id === id || element.name === id ) ) {
return element;
}
}
}
/**
* Defines a script output for the given name and value.
*
* @param {string} name - The name of the output.
* @param {Node} value - The node value.
* @return {ScriptableNode} A reference to this node.
*/
setOutput( name, value ) {
const outputs = this._outputs;
if ( outputs[ name ] === undefined ) {
outputs[ name ] = scriptableValue( value );
} else {
outputs[ name ].value = value;
}
return this;
}
/**
* Returns a script output for the given name.
*
* @param {string} name - The name of the output.
* @return {ScriptableValueNode} The node value.
*/
getOutput( name ) {
return this._outputs[ name ];
}
/**
* Returns a parameter for the given name
*
* @param {string} name - The name of the parameter.
* @return {ScriptableValueNode} The node value.
*/
getParameter( name ) {
return this.parameters[ name ];
}
/**
* Sets a value for the given parameter name.
*
* @param {string} name - The parameter name.
* @param {any} value - The parameter value.
* @return {ScriptableNode} A reference to this node.
*/
setParameter( name, value ) {
const parameters = this.parameters;
if ( value && value.isScriptableNode ) {
this.deleteParameter( name );
parameters[ name ] = value;
parameters[ name ].getDefaultOutput().events.addEventListener( 'refresh', this.onRefresh );
} else if ( value && value.isScriptableValueNode ) {
this.deleteParameter( name );
parameters[ name ] = value;
parameters[ name ].events.addEventListener( 'refresh', this.onRefresh );
} else if ( parameters[ name ] === undefined ) {
parameters[ name ] = scriptableValue( value );
parameters[ name ].events.addEventListener( 'refresh', this.onRefresh );
} else {
parameters[ name ].value = value;
}
return this;
}
/**
* Returns the value of this node which is the value of
* the default output.
*
* @return {Node} The value.
*/
getValue() {
return this.getDefaultOutput().getValue();
}
/**
* Deletes a parameter from the script.
*
* @param {string} name - The parameter to remove.
* @return {ScriptableNode} A reference to this node.
*/
deleteParameter( name ) {
let valueNode = this.parameters[ name ];
if ( valueNode ) {
if ( valueNode.isScriptableNode ) valueNode = valueNode.getDefaultOutput();
valueNode.events.removeEventListener( 'refresh', this.onRefresh );
}
return this;
}
/**
* Deletes all parameters from the script.
*
* @return {ScriptableNode} A reference to this node.
*/
clearParameters() {
for ( const name of Object.keys( this.parameters ) ) {
this.deleteParameter( name );
}
this.needsUpdate = true;
return this;
}
/**
* Calls a function from the script.
*
* @param {string} name - The function name.
* @param {...any} params - A list of parameters.
* @return {any} The result of the function call.
*/
call( name, ...params ) {
const object = this.getObject();
const method = object[ name ];
if ( typeof method === 'function' ) {
return method( ...params );
}
}
/**
* Asynchronously calls a function from the script.
*
* @param {string} name - The function name.
* @param {...any} params - A list of parameters.
* @return {Promise<any>} The result of the function call.
*/
async callAsync( name, ...params ) {
const object = this.getObject();
const method = object[ name ];
if ( typeof method === 'function' ) {
return method.constructor.name === 'AsyncFunction' ? await method( ...params ) : method( ...params );
}
}
/**
* Overwritten since the node types is inferred from the script's output.
*
* @param {NodeBuilder} builder - The current node builder
* @return {string} The node type.
*/
getNodeType( builder ) {
return this.getDefaultOutputNode().getNodeType( builder );
}
/**
* Refreshes the script node.
*
* @param {?string} [output=null] - An optional output.
*/
refresh( output = null ) {
if ( output !== null ) {
this.getOutput( output ).refresh();
} else {
this._refresh();
}
}
/**
* Returns an object representation of the script.
*
* @return {Object} The result object.
*/
getObject() {
if ( this.needsUpdate ) this.dispose();
if ( this._object !== null ) return this._object;
//
const refresh = () => this.refresh();
const setOutput = ( id, value ) => this.setOutput( id, value );
const parameters = new Parameters( this );
const THREE = ScriptableNodeResources.get( 'THREE' );
const TSL = ScriptableNodeResources.get( 'TSL' );
const method = this.getMethod();
const params = [ parameters, this._local, ScriptableNodeResources, refresh, setOutput, THREE, TSL ];
this._object = method( ...params );
const layout = this._object.layout;
if ( layout ) {
if ( layout.cache === false ) {
this._local.clear();
}
// default output
this._output.outputType = layout.outputType || null;
if ( Array.isArray( layout.elements ) ) {
for ( const element of layout.elements ) {
const id = element.id || element.name;
if ( element.inputType ) {
if ( this.getParameter( id ) === undefined ) this.setParameter( id, null );
this.getParameter( id ).inputType = element.inputType;
}
if ( element.outputType ) {
if ( this.getOutput( id ) === undefined ) this.setOutput( id, null );
this.getOutput( id ).outputType = element.outputType;
}
}
}
}
return this._object;
}
deserialize( data ) {
super.deserialize( data );
for ( const name in this.parameters ) {
let valueNode = this.parameters[ name ];
if ( valueNode.isScriptableNode ) valueNode = valueNode.getDefaultOutput();
valueNode.events.addEventListener( 'refresh', this.onRefresh );
}
}
/**
* Returns the layout of the script.
*
* @return {Object} The script's layout.
*/
getLayout() {
return this.getObject().layout;
}
/**
* Returns default node output of the script.
*
* @return {Node} The default node output.
*/
getDefaultOutputNode() {
const output = this.getDefaultOutput().value;
if ( output && output.isNode ) {
return output;
}
return float();
}
/**
* Returns default output of the script.
*
* @return {ScriptableValueNode} The default output.
*/
getDefaultOutput() {
return this._exec()._output;
}
/**
* Returns a function created from the node's script.
*
* @return {Function} The function representing the node's code.
*/
getMethod() {
if ( this.needsUpdate ) this.dispose();
if ( this._method !== null ) return this._method;
//
const parametersProps = [ 'parameters', 'local', 'global', 'refresh', 'setOutput', 'THREE', 'TSL' ];
const interfaceProps = [ 'layout', 'init', 'main', 'dispose' ];
const properties = interfaceProps.join( ', ' );
const declarations = 'var ' + properties + '; var output = {};\n';
const returns = '\nreturn { ...output, ' + properties + ' };';
const code = declarations + this.codeNode.code + returns;
//
this._method = new Function( ...parametersProps, code );
return this._method;
}
/**
* Frees all internal resources.
*/
dispose() {
if ( this._method === null ) return;
if ( this._object && typeof this._object.dispose === 'function' ) {
this._object.dispose();
}
this._method = null;
this._object = null;
this._source = null;
this._value = null;
this._needsOutputUpdate = true;
this._output.value = null;
this._outputs = {};
}
setup() {
return this.getDefaultOutputNode();
}
getCacheKey( force ) {
const values = [ hashString( this.source ), this.getDefaultOutputNode().getCacheKey( force ) ];
for ( const param in this.parameters ) {
values.push( this.parameters[ param ].getCacheKey( force ) );
}
return hashArray( values );
}
set needsUpdate( value ) {
if ( value === true ) this.dispose();
}
get needsUpdate() {
return this.source !== this._source;
}
/**
* Executes the `main` function of the script.
*
* @private
* @return {ScriptableNode} A reference to this node.
*/
_exec() {
if ( this.codeNode === null ) return this;
if ( this._needsOutputUpdate === true ) {
this._value = this.call( 'main' );
this._needsOutputUpdate = false;
}
this._output.value = this._value;
return this;
}
/**
* Executes the refresh.
*
* @private
*/
_refresh() {
this.needsUpdate = true;
this._exec();
this._output.refresh();
}
}
export default ScriptableNode;
/**
* TSL function for creating a scriptable node.
*
* @tsl
* @function
* @param {CodeNode} [codeNode] - The code node.
* @param {?Object} [parameters={}] - The parameters definition.
* @returns {ScriptableNode}
*/
export const scriptable = /*@__PURE__*/ nodeProxy( ScriptableNode ).setParameterLength( 1, 2 );
import Node from '../core/Node.js';
import { arrayBufferToBase64, base64ToArrayBuffer } from '../core/NodeUtils.js';
import { nodeProxy, float } from '../tsl/TSLBase.js';
import { EventDispatcher } from '../../core/EventDispatcher.js';
/**
* `ScriptableNode` uses this class to manage script inputs and outputs.
*
* @augments Node
*/
class ScriptableValueNode extends Node {
static get type() {
return 'ScriptableValueNode';
}
/**
* Constructs a new scriptable node.
*
* @param {any} [value=null] - The value.
*/
constructor( value = null ) {
super();
/**
* A reference to the value.
*
* @private
* @default null
*/
this._value = value;
/**
* Depending on the type of `_value`, this property might cache parsed data.
*
* @private
* @default null
*/
this._cache = null;
/**
* If this node represents an input, this property represents the input type.
*
* @type {?string}
* @default null
*/
this.inputType = null;
/**
* If this node represents an output, this property represents the output type.
*
* @type {?string}
* @default null
*/
this.outputType = null;
/**
* An event dispatcher for managing events.
*
* @type {EventDispatcher}
*/
this.events = new EventDispatcher();
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isScriptableValueNode = true;
}
/**
* Whether this node represents an output or not.
*
* @type {boolean}
* @readonly
* @default true
*/
get isScriptableOutputNode() {
return this.outputType !== null;
}
set value( val ) {
if ( this._value === val ) return;
if ( this._cache && this.inputType === 'URL' && this.value.value instanceof ArrayBuffer ) {
URL.revokeObjectURL( this._cache );
this._cache = null;
}
this._value = val;
this.events.dispatchEvent( { type: 'change' } );
this.refresh();
}
/**
* The node's value.
*
* @type {any}
*/
get value() {
return this._value;
}
/**
* Dispatches the `refresh` event.
*/
refresh() {
this.events.dispatchEvent( { type: 'refresh' } );
}
/**
* The `value` property usually represents a node or even binary data in form of array buffers.
* In this case, this method tries to return the actual value behind the complex type.
*
* @return {any} The value.
*/
getValue() {
const value = this.value;
if ( value && this._cache === null && this.inputType === 'URL' && value.value instanceof ArrayBuffer ) {
this._cache = URL.createObjectURL( new Blob( [ value.value ] ) );
} else if ( value && value.value !== null && value.value !== undefined && (
( ( this.inputType === 'URL' || this.inputType === 'String' ) && typeof value.value === 'string' ) ||
( this.inputType === 'Number' && typeof value.value === 'number' ) ||
( this.inputType === 'Vector2' && value.value.isVector2 ) ||
( this.inputType === 'Vector3' && value.value.isVector3 ) ||
( this.inputType === 'Vector4' && value.value.isVector4 ) ||
( this.inputType === 'Color' && value.value.isColor ) ||
( this.inputType === 'Matrix3' && value.value.isMatrix3 ) ||
( this.inputType === 'Matrix4' && value.value.isMatrix4 )
) ) {
return value.value;
}
return this._cache || value;
}
/**
* Overwritten since the node type is inferred from the value.
*
* @param {NodeBuilder} builder - The current node builder.
* @return {string} The node type.
*/
getNodeType( builder ) {
return this.value && this.value.isNode ? this.value.getNodeType( builder ) : 'float';
}
setup() {
return this.value && this.value.isNode ? this.value : float();
}
serialize( data ) {
super.serialize( data );
if ( this.value !== null ) {
if ( this.inputType === 'ArrayBuffer' ) {
data.value = arrayBufferToBase64( this.value );
} else {
data.value = this.value ? this.value.toJSON( data.meta ).uuid : null;
}
} else {
data.value = null;
}
data.inputType = this.inputType;
data.outputType = this.outputType;
}
deserialize( data ) {
super.deserialize( data );
let value = null;
if ( data.value !== null ) {
if ( data.inputType === 'ArrayBuffer' ) {
value = base64ToArrayBuffer( data.value );
} else if ( data.inputType === 'Texture' ) {
value = data.meta.textures[ data.value ];
} else {
value = data.meta.nodes[ data.value ] || null;
}
}
this.value = value;
this.inputType = data.inputType;
this.outputType = data.outputType;
}
}
export default ScriptableValueNode;
/**
* TSL function for creating a scriptable value node.
*
* @tsl
* @function
* @param {any} [value] - The value.
* @returns {ScriptableValueNode}
*/
export const scriptableValue = /*@__PURE__*/ nodeProxy( ScriptableValueNode ).setParameterLength( 1 );
import TempNode from '../core/TempNode.js';
import { nodeProxy } from '../tsl/TSLBase.js';
/**
* Represents a posterize effect which reduces the number of colors
* in an image, resulting in a more blocky and stylized appearance.
*
* @augments TempNode
*/
class PosterizeNode extends TempNode {
static get type() {
return 'PosterizeNode';
}
/**
* Constructs a new posterize node.
*
* @param {Node} sourceNode - The input color.
* @param {Node} stepsNode - Controls the intensity of the posterization effect. A lower number results in a more blocky appearance.
*/
constructor( sourceNode, stepsNode ) {
super();
/**
* The input color.
*
* @type {Node}
*/
this.sourceNode = sourceNode;
/**
* Controls the intensity of the posterization effect. A lower number results in a more blocky appearance.
*
* @type {Node}
*/
this.stepsNode = stepsNode;
}
setup() {
const { sourceNode, stepsNode } = this;
return sourceNode.mul( stepsNode ).floor().div( stepsNode );
}
}
export default PosterizeNode;
/**
* TSL function for creating a posterize node.
*
* @tsl
* @function
* @param {Node} sourceNode - The input color.
* @param {Node} stepsNode - Controls the intensity of the posterization effect. A lower number results in a more blocky appearance.
* @returns {PosterizeNode}
*/
export const posterize = /*@__PURE__*/ nodeProxy( PosterizeNode ).setParameterLength( 2 );
import Node from '../core/Node.js';
import { uv } from '../accessors/UV.js';
import { nodeProxy, float, vec2 } from '../tsl/TSLBase.js';
/**
* Can be used to compute texture coordinates for animated sprite sheets.
*
* ```js
* const uvNode = spritesheetUV( vec2( 6, 6 ), uv(), time.mul( animationSpeed ) );
*
* material.colorNode = texture( spriteSheet, uvNode );
* ```
*
* @augments Node
*/
class SpriteSheetUVNode extends Node {
static get type() {
return 'SpriteSheetUVNode';
}
/**
* Constructs a new sprite sheet uv node.
*
* @param {Node<vec2>} countNode - The node that defines the number of sprites in the x and y direction (e.g 6x6).
* @param {Node<vec2>} [uvNode=uv()] - The uv node.
* @param {Node<float>} [frameNode=float()] - The node that defines the current frame/sprite.
*/
constructor( countNode, uvNode = uv(), frameNode = float( 0 ) ) {
super( 'vec2' );
/**
* The node that defines the number of sprites in the x and y direction (e.g 6x6).
*
* @type {Node<vec2>}
*/
this.countNode = countNode;
/**
* The uv node.
*
* @type {Node<vec2>}
*/
this.uvNode = uvNode;
/**
* The node that defines the current frame/sprite.
*
* @type {Node<float>}
*/
this.frameNode = frameNode;
}
setup() {
const { frameNode, uvNode, countNode } = this;
const { width, height } = countNode;
const frameNum = frameNode.mod( width.mul( height ) ).floor();
const column = frameNum.mod( width );
const row = height.sub( frameNum.add( 1 ).div( width ).ceil() );
const scale = countNode.reciprocal();
const uvFrameOffset = vec2( column, row );
return uvNode.add( uvFrameOffset ).mul( scale );
}
}
export default SpriteSheetUVNode;
/**
* TSL function for creating a sprite sheet uv node.
*
* @tsl
* @function
* @param {Node<vec2>} countNode - The node that defines the number of sprites in the x and y direction (e.g 6x6).
* @param {?Node<vec2>} [uvNode=uv()] - The uv node.
* @param {?Node<float>} [frameNode=float()] - The node that defines the current frame/sprite.
* @returns {SpriteSheetUVNode}
*/
export const spritesheetUV = /*@__PURE__*/ nodeProxy( SpriteSheetUVNode ).setParameterLength( 3 );
import DataMap from '../DataMap.js';
import ChainMap from '../ChainMap.js';
import NodeBuilderState from './NodeBuilderState.js';
import NodeMaterial from '../../../materials/nodes/NodeMaterial.js';
import { cubeMapNode } from '../../../nodes/utils/CubeMapNode.js';
import { NodeFrame } from '../../../nodes/Nodes.js';
import { objectGroup, renderGroup, frameGroup, cubeTexture, texture, texture3D, vec3, fog, rangeFogFactor, densityFogFactor, reference, pmremTexture, screenUV } from '../../../nodes/TSL.js';
import { builtin } from '../../../nodes/accessors/BuiltinNode.js';
import { CubeUVReflectionMapping, EquirectangularReflectionMapping, EquirectangularRefractionMapping } from '../../../constants.js';
import { hashArray } from '../../../nodes/core/NodeUtils.js';
import { error } from '../../../utils.js';
const _outputNodeMap = new WeakMap();
const _chainKeys = [];
const _cacheKeyValues = [];
/**
* This renderer module manages node-related objects and is the
* primary interface between the renderer and the node system.
*
* @private
* @augments DataMap
*/
class Nodes extends DataMap {
/**
* Constructs a new nodes management component.
*
* @param {Renderer} renderer - The renderer.
* @param {Backend} backend - The renderer's backend.
*/
constructor( renderer, backend ) {
super();
/**
* The renderer.
*
* @type {Renderer}
*/
this.renderer = renderer;
/**
* The renderer's backend.
*
* @type {Backend}
*/
this.backend = backend;
/**
* The node frame.
*
* @type {Renderer}
*/
this.nodeFrame = new NodeFrame();
/**
* A cache for managing node builder states.
*
* @type {Map<number,NodeBuilderState>}
*/
this.nodeBuilderCache = new Map();
/**
* A cache for managing data cache key data.
*
* @type {ChainMap}
*/
this.callHashCache = new ChainMap();
/**
* A cache for managing node uniforms group data.
*
* @type {ChainMap}
*/
this.groupsData = new ChainMap();
/**
* A cache for managing node objects of
* scene properties like fog or environments.
*
* @type {Object<string,WeakMap>}
*/
this.cacheLib = {};
}
/**
* Returns `true` if the given node uniforms group must be updated or not.
*
* @param {NodeUniformsGroup} nodeUniformsGroup - The node uniforms group.
* @return {boolean} Whether the node uniforms group requires an update or not.
*/
updateGroup( nodeUniformsGroup ) {
const groupNode = nodeUniformsGroup.groupNode;
const name = groupNode.name;
// objectGroup is always updated
if ( name === objectGroup.name ) return true;
// renderGroup is updated once per render/compute call
if ( name === renderGroup.name ) {
const uniformsGroupData = this.get( nodeUniformsGroup );
const renderId = this.nodeFrame.renderId;
if ( uniformsGroupData.renderId !== renderId ) {
uniformsGroupData.renderId = renderId;
return true;
}
return false;
}
// frameGroup is updated once per frame
if ( name === frameGroup.name ) {
const uniformsGroupData = this.get( nodeUniformsGroup );
const frameId = this.nodeFrame.frameId;
if ( uniformsGroupData.frameId !== frameId ) {
uniformsGroupData.frameId = frameId;
return true;
}
return false;
}
// other groups are updated just when groupNode.needsUpdate is true
_chainKeys[ 0 ] = groupNode;
_chainKeys[ 1 ] = nodeUniformsGroup;
let groupData = this.groupsData.get( _chainKeys );
if ( groupData === undefined ) this.groupsData.set( _chainKeys, groupData = {} );
_chainKeys.length = 0;
if ( groupData.version !== groupNode.version ) {
groupData.version = groupNode.version;
return true;
}
return false;
}
/**
* Returns the cache key for the given render object.
*
* @param {RenderObject} renderObject - The render object.
* @return {number} The cache key.
*/
getForRenderCacheKey( renderObject ) {
return renderObject.initialCacheKey;
}
/**
* Returns a node builder state for the given render object.
*
* @param {RenderObject} renderObject - The render object.
* @return {NodeBuilderState} The node builder state.
*/
getForRender( renderObject ) {
const renderObjectData = this.get( renderObject );
let nodeBuilderState = renderObjectData.nodeBuilderState;
if ( nodeBuilderState === undefined ) {
const { nodeBuilderCache } = this;
const cacheKey = this.getForRenderCacheKey( renderObject );
nodeBuilderState = nodeBuilderCache.get( cacheKey );
if ( nodeBuilderState === undefined ) {
const createNodeBuilder = ( material ) => {
const nodeBuilder = this.backend.createNodeBuilder( renderObject.object, this.renderer );
nodeBuilder.scene = renderObject.scene;
nodeBuilder.material = material;
nodeBuilder.camera = renderObject.camera;
nodeBuilder.context.material = material;
nodeBuilder.lightsNode = renderObject.lightsNode;
nodeBuilder.environmentNode = this.getEnvironmentNode( renderObject.scene );
nodeBuilder.fogNode = this.getFogNode( renderObject.scene );
nodeBuilder.clippingContext = renderObject.clippingContext;
if ( this.renderer.getOutputRenderTarget() ? this.renderer.getOutputRenderTarget().multiview : false ) {
nodeBuilder.enableMultiview();
}
return nodeBuilder;
};
let nodeBuilder = createNodeBuilder( renderObject.material );
try {
nodeBuilder.build();
} catch ( e ) {
nodeBuilder = createNodeBuilder( new NodeMaterial() );
nodeBuilder.build();
error( 'TSL: ' + e );
}
nodeBuilderState = this._createNodeBuilderState( nodeBuilder );
nodeBuilderCache.set( cacheKey, nodeBuilderState );
}
nodeBuilderState.usedTimes ++;
renderObjectData.nodeBuilderState = nodeBuilderState;
}
return nodeBuilderState;
}
/**
* Deletes the given object from the internal data map
*
* @param {any} object - The object to delete.
* @return {?Object} The deleted dictionary.
*/
delete( object ) {
if ( object.isRenderObject ) {
const nodeBuilderState = this.get( object ).nodeBuilderState;
nodeBuilderState.usedTimes --;
if ( nodeBuilderState.usedTimes === 0 ) {
this.nodeBuilderCache.delete( this.getForRenderCacheKey( object ) );
}
}
return super.delete( object );
}
/**
* Returns a node builder state for the given compute node.
*
* @param {Node} computeNode - The compute node.
* @return {NodeBuilderState} The node builder state.
*/
getForCompute( computeNode ) {
const computeData = this.get( computeNode );
let nodeBuilderState = computeData.nodeBuilderState;
if ( nodeBuilderState === undefined ) {
const nodeBuilder = this.backend.createNodeBuilder( computeNode, this.renderer );
nodeBuilder.build();
nodeBuilderState = this._createNodeBuilderState( nodeBuilder );
computeData.nodeBuilderState = nodeBuilderState;
}
return nodeBuilderState;
}
/**
* Creates a node builder state for the given node builder.
*
* @private
* @param {NodeBuilder} nodeBuilder - The node builder.
* @return {NodeBuilderState} The node builder state.
*/
_createNodeBuilderState( nodeBuilder ) {
return new NodeBuilderState(
nodeBuilder.vertexShader,
nodeBuilder.fragmentShader,
nodeBuilder.computeShader,
nodeBuilder.getAttributesArray(),
nodeBuilder.getBindings(),
nodeBuilder.updateNodes,
nodeBuilder.updateBeforeNodes,
nodeBuilder.updateAfterNodes,
nodeBuilder.observer,
nodeBuilder.transforms
);
}
/**
* Returns an environment node for the current configured
* scene environment.
*
* @param {Scene} scene - The scene.
* @return {Node} A node representing the current scene environment.
*/
getEnvironmentNode( scene ) {
this.updateEnvironment( scene );
let environmentNode = null;
if ( scene.environmentNode && scene.environmentNode.isNode ) {
environmentNode = scene.environmentNode;
} else {
const sceneData = this.get( scene );
if ( sceneData.environmentNode ) {
environmentNode = sceneData.environmentNode;
}
}
return environmentNode;
}
/**
* Returns a background node for the current configured
* scene background.
*
* @param {Scene} scene - The scene.
* @return {Node} A node representing the current scene background.
*/
getBackgroundNode( scene ) {
this.updateBackground( scene );
let backgroundNode = null;
if ( scene.backgroundNode && scene.backgroundNode.isNode ) {
backgroundNode = scene.backgroundNode;
} else {
const sceneData = this.get( scene );
if ( sceneData.backgroundNode ) {
backgroundNode = sceneData.backgroundNode;
}
}
return backgroundNode;
}
/**
* Returns a fog node for the current configured scene fog.
*
* @param {Scene} scene - The scene.
* @return {Node} A node representing the current scene fog.
*/
getFogNode( scene ) {
this.updateFog( scene );
return scene.fogNode || this.get( scene ).fogNode || null;
}
/**
* Returns a cache key for the given scene and lights node.
* This key is used by `RenderObject` as a part of the dynamic
* cache key (a key that must be checked every time the render
* objects is drawn).
*
* @param {Scene} scene - The scene.
* @param {LightsNode} lightsNode - The lights node.
* @return {number} The cache key.
*/
getCacheKey( scene, lightsNode ) {
_chainKeys[ 0 ] = scene;
_chainKeys[ 1 ] = lightsNode;
const callId = this.renderer.info.calls;
const cacheKeyData = this.callHashCache.get( _chainKeys ) || {};
if ( cacheKeyData.callId !== callId ) {
const environmentNode = this.getEnvironmentNode( scene );
const fogNode = this.getFogNode( scene );
if ( lightsNode ) _cacheKeyValues.push( lightsNode.getCacheKey( true ) );
if ( environmentNode ) _cacheKeyValues.push( environmentNode.getCacheKey() );
if ( fogNode ) _cacheKeyValues.push( fogNode.getCacheKey() );
_cacheKeyValues.push( this.renderer.getOutputRenderTarget() && this.renderer.getOutputRenderTarget().multiview ? 1 : 0 );
_cacheKeyValues.push( this.renderer.shadowMap.enabled ? 1 : 0 );
_cacheKeyValues.push( this.renderer.shadowMap.type );
cacheKeyData.callId = callId;
cacheKeyData.cacheKey = hashArray( _cacheKeyValues );
this.callHashCache.set( _chainKeys, cacheKeyData );
_cacheKeyValues.length = 0;
}
_chainKeys.length = 0;
return cacheKeyData.cacheKey;
}
/**
* A boolean that indicates whether tone mapping should be enabled
* or not.
*
* @type {boolean}
*/
get isToneMappingState() {
return this.renderer.getRenderTarget() ? false : true;
}
/**
* If a scene background is configured, this method makes sure to
* represent the background with a corresponding node-based implementation.
*
* @param {Scene} scene - The scene.
*/
updateBackground( scene ) {
const sceneData = this.get( scene );
const background = scene.background;
if ( background ) {
const forceUpdate = ( scene.backgroundBlurriness === 0 && sceneData.backgroundBlurriness > 0 ) || ( scene.backgroundBlurriness > 0 && sceneData.backgroundBlurriness === 0 );
if ( sceneData.background !== background || forceUpdate ) {
const backgroundNode = this.getCacheNode( 'background', background, () => {
if ( background.isCubeTexture === true || ( background.mapping === EquirectangularReflectionMapping || background.mapping === EquirectangularRefractionMapping || background.mapping === CubeUVReflectionMapping ) ) {
if ( scene.backgroundBlurriness > 0 || background.mapping === CubeUVReflectionMapping ) {
return pmremTexture( background );
} else {
let envMap;
if ( background.isCubeTexture === true ) {
envMap = cubeTexture( background );
} else {
envMap = texture( background );
}
return cubeMapNode( envMap );
}
} else if ( background.isTexture === true ) {
return texture( background, screenUV.flipY() ).setUpdateMatrix( true );
} else if ( background.isColor !== true ) {
error( 'WebGPUNodes: Unsupported background configuration.', background );
}
}, forceUpdate );
sceneData.backgroundNode = backgroundNode;
sceneData.background = background;
sceneData.backgroundBlurriness = scene.backgroundBlurriness;
}
} else if ( sceneData.backgroundNode ) {
delete sceneData.backgroundNode;
delete sceneData.background;
}
}
/**
* This method is part of the caching of nodes which are used to represents the
* scene's background, fog or environment.
*
* @param {string} type - The type of object to cache.
* @param {Object} object - The object.
* @param {Function} callback - A callback that produces a node representation for the given object.
* @param {boolean} [forceUpdate=false] - Whether an update should be enforced or not.
* @return {Node} The node representation.
*/
getCacheNode( type, object, callback, forceUpdate = false ) {
const nodeCache = this.cacheLib[ type ] || ( this.cacheLib[ type ] = new WeakMap() );
let node = nodeCache.get( object );
if ( node === undefined || forceUpdate ) {
node = callback();
nodeCache.set( object, node );
}
return node;
}
/**
* If a scene fog is configured, this method makes sure to
* represent the fog with a corresponding node-based implementation.
*
* @param {Scene} scene - The scene.
*/
updateFog( scene ) {
const sceneData = this.get( scene );
const sceneFog = scene.fog;
if ( sceneFog ) {
if ( sceneData.fog !== sceneFog ) {
const fogNode = this.getCacheNode( 'fog', sceneFog, () => {
if ( sceneFog.isFogExp2 ) {
const color = reference( 'color', 'color', sceneFog ).setGroup( renderGroup );
const density = reference( 'density', 'float', sceneFog ).setGroup( renderGroup );
return fog( color, densityFogFactor( density ) );
} else if ( sceneFog.isFog ) {
const color = reference( 'color', 'color', sceneFog ).setGroup( renderGroup );
const near = reference( 'near', 'float', sceneFog ).setGroup( renderGroup );
const far = reference( 'far', 'float', sceneFog ).setGroup( renderGroup );
return fog( color, rangeFogFactor( near, far ) );
} else {
error( 'Renderer: Unsupported fog configuration.', sceneFog );
}
} );
sceneData.fogNode = fogNode;
sceneData.fog = sceneFog;
}
} else {
delete sceneData.fogNode;
delete sceneData.fog;
}
}
/**
* If a scene environment is configured, this method makes sure to
* represent the environment with a corresponding node-based implementation.
*
* @param {Scene} scene - The scene.
*/
updateEnvironment( scene ) {
const sceneData = this.get( scene );
const environment = scene.environment;
if ( environment ) {
if ( sceneData.environment !== environment ) {
const environmentNode = this.getCacheNode( 'environment', environment, () => {
if ( environment.isCubeTexture === true ) {
return cubeTexture( environment );
} else if ( environment.isTexture === true ) {
return texture( environment );
} else {
error( 'Nodes: Unsupported environment configuration.', environment );
}
} );
sceneData.environmentNode = environmentNode;
sceneData.environment = environment;
}
} else if ( sceneData.environmentNode ) {
delete sceneData.environmentNode;
delete sceneData.environment;
}
}
getNodeFrame( renderer = this.renderer, scene = null, object = null, camera = null, material = null ) {
const nodeFrame = this.nodeFrame;
nodeFrame.renderer = renderer;
nodeFrame.scene = scene;
nodeFrame.object = object;
nodeFrame.camera = camera;
nodeFrame.material = material;
return nodeFrame;
}
getNodeFrameForRender( renderObject ) {
return this.getNodeFrame( renderObject.renderer, renderObject.scene, renderObject.object, renderObject.camera, renderObject.material );
}
/**
* Returns the current output cache key.
*
* @return {string} The output cache key.
*/
getOutputCacheKey() {
const renderer = this.renderer;
return renderer.toneMapping + ',' + renderer.currentColorSpace + ',' + renderer.xr.isPresenting;
}
/**
* Checks if the output configuration (tone mapping and color space) for
* the given target has changed.
*
* @param {Texture} outputTarget - The output target.
* @return {boolean} Whether the output configuration has changed or not.
*/
hasOutputChange( outputTarget ) {
const cacheKey = _outputNodeMap.get( outputTarget );
return cacheKey !== this.getOutputCacheKey();
}
/**
* Returns a node that represents the output configuration (tone mapping and
* color space) for the current target.
*
* @param {Texture} outputTarget - The output target.
* @return {Node} The output node.
*/
getOutputNode( outputTarget ) {
const renderer = this.renderer;
const cacheKey = this.getOutputCacheKey();
const output = outputTarget.isArrayTexture ?
texture3D( outputTarget, vec3( screenUV, builtin( 'gl_ViewID_OVR' ) ) ).renderOutput( renderer.toneMapping, renderer.currentColorSpace ) :
texture( outputTarget, screenUV ).renderOutput( renderer.toneMapping, renderer.currentColorSpace );
_outputNodeMap.set( outputTarget, cacheKey );
return output;
}
/**
* Triggers the call of `updateBefore()` methods
* for all nodes of the given render object.
*
* @param {RenderObject} renderObject - The render object.
*/
updateBefore( renderObject ) {
const nodeBuilder = renderObject.getNodeBuilderState();
for ( const node of nodeBuilder.updateBeforeNodes ) {
// update frame state for each node
this.getNodeFrameForRender( renderObject ).updateBeforeNode( node );
}
}
/**
* Triggers the call of `updateAfter()` methods
* for all nodes of the given render object.
*
* @param {RenderObject} renderObject - The render object.
*/
updateAfter( renderObject ) {
const nodeBuilder = renderObject.getNodeBuilderState();
for ( const node of nodeBuilder.updateAfterNodes ) {
// update frame state for each node
this.getNodeFrameForRender( renderObject ).updateAfterNode( node );
}
}
/**
* Triggers the call of `update()` methods
* for all nodes of the given compute node.
*
* @param {Node} computeNode - The compute node.
*/
updateForCompute( computeNode ) {
const nodeFrame = this.getNodeFrame();
const nodeBuilder = this.getForCompute( computeNode );
for ( const node of nodeBuilder.updateNodes ) {
nodeFrame.updateNode( node );
}
}
/**
* Triggers the call of `update()` methods
* for all nodes of the given compute node.
*
* @param {RenderObject} renderObject - The render object.
*/
updateForRender( renderObject ) {
const nodeFrame = this.getNodeFrameForRender( renderObject );
const nodeBuilder = renderObject.getNodeBuilderState();
for ( const node of nodeBuilder.updateNodes ) {
nodeFrame.updateNode( node );
}
}
/**
* Returns `true` if the given render object requires a refresh.
*
* @param {RenderObject} renderObject - The render object.
* @return {boolean} Whether the given render object requires a refresh or not.
*/
needsRefresh( renderObject ) {
const nodeFrame = this.getNodeFrameForRender( renderObject );
const monitor = renderObject.getMonitor();
return monitor.needsRefresh( renderObject, nodeFrame );
}
/**
* Frees the internal resources.
*/
dispose() {
super.dispose();
this.nodeFrame = new NodeFrame();
this.nodeBuilderCache = new Map();
this.cacheLib = {};
}
}
export default Nodes;
import { CubeReflectionMapping, CubeRefractionMapping, EquirectangularReflectionMapping, EquirectangularRefractionMapping } from '../../constants.js';
import { WebGLCubeRenderTarget } from '../WebGLCubeRenderTarget.js';
function WebGLCubeMaps( renderer ) {
let cubemaps = new WeakMap();
function mapTextureMapping( texture, mapping ) {
if ( mapping === EquirectangularReflectionMapping ) {
texture.mapping = CubeReflectionMapping;
} else if ( mapping === EquirectangularRefractionMapping ) {
texture.mapping = CubeRefractionMapping;
}
return texture;
}
function get( texture ) {
if ( texture && texture.isTexture ) {
const mapping = texture.mapping;
if ( mapping === EquirectangularReflectionMapping || mapping === EquirectangularRefractionMapping ) {
if ( cubemaps.has( texture ) ) {
const cubemap = cubemaps.get( texture ).texture;
return mapTextureMapping( cubemap, texture.mapping );
} else {
const image = texture.image;
if ( image && image.height > 0 ) {
const renderTarget = new WebGLCubeRenderTarget( image.height );
renderTarget.fromEquirectangularTexture( renderer, texture );
cubemaps.set( texture, renderTarget );
texture.addEventListener( 'dispose', onTextureDispose );
return mapTextureMapping( renderTarget.texture, texture.mapping );
} else {
// image not yet ready. try the conversion next frame
return null;
}
}
}
}
return texture;
}
function onTextureDispose( event ) {
const texture = event.target;
texture.removeEventListener( 'dispose', onTextureDispose );
const cubemap = cubemaps.get( texture );
if ( cubemap !== undefined ) {
cubemaps.delete( texture );
cubemap.dispose();
}
}
function dispose() {
cubemaps = new WeakMap();
}
return {
get: get,
dispose: dispose
};
}
export { WebGLCubeMaps };
import { CubeReflectionMapping, CubeRefractionMapping, EquirectangularReflectionMapping, EquirectangularRefractionMapping } from '../../constants.js';
import { PMREMGenerator } from '../../extras/PMREMGenerator.js';
function WebGLCubeUVMaps( renderer ) {
let cubeUVmaps = new WeakMap();
let pmremGenerator = null;
function get( texture ) {
if ( texture && texture.isTexture ) {
const mapping = texture.mapping;
const isEquirectMap = ( mapping === EquirectangularReflectionMapping || mapping === EquirectangularRefractionMapping );
const isCubeMap = ( mapping === CubeReflectionMapping || mapping === CubeRefractionMapping );
// equirect/cube map to cubeUV conversion
if ( isEquirectMap || isCubeMap ) {
let renderTarget = cubeUVmaps.get( texture );
const currentPMREMVersion = renderTarget !== undefined ? renderTarget.texture.pmremVersion : 0;
if ( texture.isRenderTargetTexture && texture.pmremVersion !== currentPMREMVersion ) {
if ( pmremGenerator === null ) pmremGenerator = new PMREMGenerator( renderer );
renderTarget = isEquirectMap ? pmremGenerator.fromEquirectangular( texture, renderTarget ) : pmremGenerator.fromCubemap( texture, renderTarget );
renderTarget.texture.pmremVersion = texture.pmremVersion;
cubeUVmaps.set( texture, renderTarget );
return renderTarget.texture;
} else {
if ( renderTarget !== undefined ) {
return renderTarget.texture;
} else {
const image = texture.image;
if ( ( isEquirectMap && image && image.height > 0 ) || ( isCubeMap && image && isCubeTextureComplete( image ) ) ) {
if ( pmremGenerator === null ) pmremGenerator = new PMREMGenerator( renderer );
renderTarget = isEquirectMap ? pmremGenerator.fromEquirectangular( texture ) : pmremGenerator.fromCubemap( texture );
renderTarget.texture.pmremVersion = texture.pmremVersion;
cubeUVmaps.set( texture, renderTarget );
texture.addEventListener( 'dispose', onTextureDispose );
return renderTarget.texture;
} else {
// image not yet ready. try the conversion next frame
return null;
}
}
}
}
}
return texture;
}
function isCubeTextureComplete( image ) {
let count = 0;
const length = 6;
for ( let i = 0; i < length; i ++ ) {
if ( image[ i ] !== undefined ) count ++;
}
return count === length;
}
function onTextureDispose( event ) {
const texture = event.target;
texture.removeEventListener( 'dispose', onTextureDispose );
const cubemapUV = cubeUVmaps.get( texture );
if ( cubemapUV !== undefined ) {
cubeUVmaps.delete( texture );
cubemapUV.dispose();
}
}
function dispose() {
cubeUVmaps = new WeakMap();
if ( pmremGenerator !== null ) {
pmremGenerator.dispose();
pmremGenerator = null;
}
}
return {
get: get,
dispose: dispose
};
}
export { WebGLCubeUVMaps };

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display