New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@pmndrs/vanilla

Package Overview
Dependencies
Maintainers
4
Versions
54
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@pmndrs/vanilla - npm Package Compare versions

Comparing version 1.9.8 to 1.10.0

core/Outlines.cjs.js

2

core/index.cjs.js

@@ -1,1 +0,1 @@

"use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("./pcss.cjs.js"),s=require("./Caustics.cjs.js"),r=require("./shaderMaterial.cjs.js"),a=require("./AccumulativeShadows.cjs.js"),i=require("./useFBO.cjs.js"),t=require("./Grid.cjs.js");require("three"),require("three/examples/jsm/postprocessing/Pass"),require("../materials/MeshDiscardMaterial.cjs.js"),exports.pcss=e.pcss,exports.Caustics=s.Caustics,exports.shaderMaterial=r.shaderMaterial,exports.ProgressiveLightMap=a.ProgressiveLightMap,exports.SoftShadowMaterial=a.SoftShadowMaterial,exports.useFBO=i.useFBO,exports.Grid=t.Grid;
"use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("./pcss.cjs.js"),s=require("./Caustics.cjs.js"),r=require("./shaderMaterial.cjs.js"),i=require("./AccumulativeShadows.cjs.js"),t=require("./useFBO.cjs.js"),a=require("./Grid.cjs.js"),u=require("./Outlines.cjs.js");require("three"),require("three/examples/jsm/postprocessing/Pass"),require("../materials/MeshDiscardMaterial.cjs.js"),require("three-stdlib"),require("lodash-es"),exports.pcss=e.pcss,exports.Caustics=s.Caustics,exports.shaderMaterial=r.shaderMaterial,exports.ProgressiveLightMap=i.ProgressiveLightMap,exports.SoftShadowMaterial=i.SoftShadowMaterial,exports.useFBO=t.useFBO,exports.Grid=a.Grid,exports.Outlines=u.Outlines;

@@ -7,1 +7,2 @@ export * from './pcss';

export * from './Grid';
export * from './Outlines';

@@ -7,4 +7,7 @@ export { pcss } from './pcss.js';

export { Grid } from './Grid.js';
export { Outlines } from './Outlines.js';
import 'three';
import 'three/examples/jsm/postprocessing/Pass';
import '../materials/MeshDiscardMaterial.js';
import 'three-stdlib';
import 'lodash-es';

@@ -1,1 +0,1 @@

"use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("three"),n=require("three/examples/jsm/postprocessing/Pass");function t(e){if(e&&e.__esModule)return e;var n=Object.create(null);return e&&Object.keys(e).forEach((function(t){if("default"!==t){var r=Object.getOwnPropertyDescriptor(e,t);Object.defineProperty(n,t,r.get?r:{enumerable:!0,get:function(){return e[t]}})}})),n.default=e,Object.freeze(n)}var r=t(e);function o(e,n,t,o){const i=Object.entries(e),a=Object.fromEntries(i.map((([e,n])=>[e,{value:n}])));class s extends r.ShaderMaterial{constructor(e){super({...e,uniforms:a,vertexShader:n,fragmentShader:t});for(const[e]of i)Object.defineProperty(this,e,{get:()=>this.uniforms[e].value,set:n=>this.uniforms[e].value=n});Object.assign(this,e),null==o||o(this)}}return s.key=r.MathUtils.generateUUID(),s}function i(e=1024,n=1024,t={samples:0,depth:!1}){var o=e,i=n,a=t,s=a.samples||0,l=a.depth,c=Object.assign({},a);delete c.samples,delete c.depth;var u=new r.WebGLRenderTarget(o,i,Object.assign({minFilter:r.LinearFilter,magFilter:r.LinearFilter,type:r.HalfFloatType},c));return l&&(u.depthTexture=new r.DepthTexture(o,i,r.FloatType)),u.samples=s,u}function a(e=r.FrontSide){const n={value:new r.Matrix4};return Object.assign(new r.MeshNormalMaterial({side:e}),{viewMatrix:n,onBeforeCompile:e=>{e.uniforms.viewMatrix=n,e.fragmentShader="vec3 inverseTransformDirection( in vec3 dir, in mat4 matrix ) {\n return normalize( ( vec4( dir, 0.0 ) * matrix ).xyz );\n }\n"+e.fragmentShader.replace("#include <normal_fragment_maps>","#include <normal_fragment_maps>\n normal = inverseTransformDirection( normal, viewMatrix );\n")}})}const s=o({causticsTexture:null,causticsTextureB:null,color:new r.Color,lightProjMatrix:new r.Matrix4,lightViewMatrix:new r.Matrix4},"varying vec3 vWorldPosition; \n void main() {\n gl_Position = projectionMatrix * viewMatrix * modelMatrix * vec4(position, 1.);\n vec4 worldPosition = modelMatrix * vec4(position, 1.);\n vWorldPosition = worldPosition.xyz;\n }","varying vec3 vWorldPosition;\n uniform vec3 color;\n uniform sampler2D causticsTexture; \n uniform sampler2D causticsTextureB; \n uniform mat4 lightProjMatrix;\n uniform mat4 lightViewMatrix;\n void main() {\n // Apply caustics \n vec4 lightSpacePos = lightProjMatrix * lightViewMatrix * vec4(vWorldPosition, 1.0);\n lightSpacePos.xyz /= lightSpacePos.w;\n lightSpacePos.xyz = lightSpacePos.xyz * 0.5 + 0.5; \n vec3 front = texture2D(causticsTexture, lightSpacePos.xy).rgb;\n vec3 back = texture2D(causticsTextureB, lightSpacePos.xy).rgb;\n gl_FragColor = vec4((front + back) * color, 1.0);\n #include <tonemapping_fragment>\n #include <encodings_fragment>\n }"),l=o({cameraMatrixWorld:new r.Matrix4,cameraProjectionMatrixInv:new r.Matrix4,normalTexture:null,depthTexture:null,lightDir:new r.Vector3(0,1,0),lightPlaneNormal:new r.Vector3(0,1,0),lightPlaneConstant:0,near:.1,far:100,modelMatrix:new r.Matrix4,worldRadius:1/40,ior:1.1,bounces:0,resolution:1024,size:10,intensity:.5},"\n varying vec2 vUv;\n void main() {\n vUv = uv;\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n }"," \n uniform mat4 cameraMatrixWorld;\n uniform mat4 cameraProjectionMatrixInv;\n uniform vec3 lightDir;\n uniform vec3 lightPlaneNormal;\n uniform float lightPlaneConstant;\n uniform float near;\n uniform float far;\n uniform float time;\n uniform float worldRadius;\n uniform float resolution;\n uniform float size;\n uniform float intensity;\n uniform float ior;\n precision highp isampler2D;\n precision highp usampler2D;\n uniform sampler2D normalTexture;\n uniform sampler2D depthTexture;\n uniform float bounces;\n varying vec2 vUv;\n vec3 WorldPosFromDepth(float depth, vec2 coord) {\n float z = depth * 2.0 - 1.0;\n vec4 clipSpacePosition = vec4(coord * 2.0 - 1.0, z, 1.0);\n vec4 viewSpacePosition = cameraProjectionMatrixInv * clipSpacePosition;\n // Perspective division\n viewSpacePosition /= viewSpacePosition.w;\n vec4 worldSpacePosition = cameraMatrixWorld * viewSpacePosition;\n return worldSpacePosition.xyz;\n } \n float sdPlane( vec3 p, vec3 n, float h ) {\n // n must be normalized\n return dot(p,n) + h;\n }\n float planeIntersect( vec3 ro, vec3 rd, vec4 p ) {\n return -(dot(ro,p.xyz)+p.w)/dot(rd,p.xyz);\n }\n vec3 totalInternalReflection(vec3 ro, vec3 rd, vec3 pos, vec3 normal, float ior, out vec3 rayOrigin, out vec3 rayDirection) {\n rayOrigin = ro;\n rayDirection = rd;\n rayDirection = refract(rayDirection, normal, 1.0 / ior);\n rayOrigin = pos + rayDirection * 0.1;\n return rayDirection;\n }\n void main() {\n // Each sample consists of random offset in the x and y direction\n float caustic = 0.0;\n float causticTexelSize = (1.0 / resolution) * size * 2.0;\n float texelsNeeded = worldRadius / causticTexelSize;\n float sampleRadius = texelsNeeded / resolution;\n float sum = 0.0;\n if (texture2D(depthTexture, vUv).x == 1.0) {\n gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0);\n return;\n }\n vec2 offset1 = vec2(-0.5, -0.5);//vec2(rand() - 0.5, rand() - 0.5);\n vec2 offset2 = vec2(-0.5, 0.5);//vec2(rand() - 0.5, rand() - 0.5);\n vec2 offset3 = vec2(0.5, 0.5);//vec2(rand() - 0.5, rand() - 0.5);\n vec2 offset4 = vec2(0.5, -0.5);//vec2(rand() - 0.5, rand() - 0.5);\n vec2 uv1 = vUv + offset1 * sampleRadius;\n vec2 uv2 = vUv + offset2 * sampleRadius;\n vec2 uv3 = vUv + offset3 * sampleRadius;\n vec2 uv4 = vUv + offset4 * sampleRadius;\n vec3 normal1 = texture2D(normalTexture, uv1, -10.0).rgb * 2.0 - 1.0;\n vec3 normal2 = texture2D(normalTexture, uv2, -10.0).rgb * 2.0 - 1.0;\n vec3 normal3 = texture2D(normalTexture, uv3, -10.0).rgb * 2.0 - 1.0;\n vec3 normal4 = texture2D(normalTexture, uv4, -10.0).rgb * 2.0 - 1.0;\n float depth1 = texture2D(depthTexture, uv1, -10.0).x;\n float depth2 = texture2D(depthTexture, uv2, -10.0).x;\n float depth3 = texture2D(depthTexture, uv3, -10.0).x;\n float depth4 = texture2D(depthTexture, uv4, -10.0).x;\n // Sanity check the depths\n if (depth1 == 1.0 || depth2 == 1.0 || depth3 == 1.0 || depth4 == 1.0) {\n gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0);\n return;\n }\n vec3 pos1 = WorldPosFromDepth(depth1, uv1);\n vec3 pos2 = WorldPosFromDepth(depth2, uv2);\n vec3 pos3 = WorldPosFromDepth(depth3, uv3);\n vec3 pos4 = WorldPosFromDepth(depth4, uv4);\n vec3 originPos1 = WorldPosFromDepth(0.0, uv1);\n vec3 originPos2 = WorldPosFromDepth(0.0, uv2);\n vec3 originPos3 = WorldPosFromDepth(0.0, uv3);\n vec3 originPos4 = WorldPosFromDepth(0.0, uv4);\n vec3 endPos1, endPos2, endPos3, endPos4;\n vec3 endDir1, endDir2, endDir3, endDir4;\n totalInternalReflection(originPos1, lightDir, pos1, normal1, ior, endPos1, endDir1);\n totalInternalReflection(originPos2, lightDir, pos2, normal2, ior, endPos2, endDir2);\n totalInternalReflection(originPos3, lightDir, pos3, normal3, ior, endPos3, endDir3);\n totalInternalReflection(originPos4, lightDir, pos4, normal4, ior, endPos4, endDir4);\n float lightPosArea = length(cross(originPos2 - originPos1, originPos3 - originPos1)) + length(cross(originPos3 - originPos1, originPos4 - originPos1));\n float t1 = planeIntersect(endPos1, endDir1, vec4(lightPlaneNormal, lightPlaneConstant));\n float t2 = planeIntersect(endPos2, endDir2, vec4(lightPlaneNormal, lightPlaneConstant));\n float t3 = planeIntersect(endPos3, endDir3, vec4(lightPlaneNormal, lightPlaneConstant));\n float t4 = planeIntersect(endPos4, endDir4, vec4(lightPlaneNormal, lightPlaneConstant));\n vec3 finalPos1 = endPos1 + endDir1 * t1;\n vec3 finalPos2 = endPos2 + endDir2 * t2;\n vec3 finalPos3 = endPos3 + endDir3 * t3;\n vec3 finalPos4 = endPos4 + endDir4 * t4;\n float finalArea = length(cross(finalPos2 - finalPos1, finalPos3 - finalPos1)) + length(cross(finalPos3 - finalPos1, finalPos4 - finalPos1));\n caustic += intensity * (lightPosArea / finalArea);\n // Calculate the area of the triangle in light spaces\n gl_FragColor = vec4(vec3(max(caustic, 0.0)), 1.0);\n }"),c={depth:!0,minFilter:r.LinearFilter,magFilter:r.LinearFilter,type:r.UnsignedByteType},u={minFilter:r.LinearMipmapLinearFilter,magFilter:r.LinearFilter,type:r.FloatType,generateMipmaps:!0},m=o({},"void main() { }","void main() { gl_FragColor = vec4(0.0, 0.0, 0.0, 0.0); discard; }");const d=o({color:new r.Color(0),blend:2,alphaTest:.75,opacity:0,map:null},"varying vec2 vUv;\n void main() {\n gl_Position = projectionMatrix * viewMatrix * modelMatrix * vec4(position, 1.);\n vUv = uv;\n }","varying vec2 vUv;\n uniform sampler2D map;\n uniform vec3 color;\n uniform float opacity;\n uniform float alphaTest;\n uniform float blend;\n void main() {\n vec4 sampledDiffuseColor = texture2D(map, vUv);\n gl_FragColor = vec4(color * sampledDiffuseColor.r * blend, max(0.0, (1.0 - (sampledDiffuseColor.r + sampledDiffuseColor.g + sampledDiffuseColor.b) / alphaTest)) * opacity);\n #include <tonemapping_fragment>\n #include <encodings_fragment>\n }");const f=o({cellSize:.5,sectionSize:1,fadeDistance:100,fadeStrength:1,cellThickness:.5,sectionThickness:1,cellColor:new r.Color,sectionColor:new r.Color,infiniteGrid:!1,followCamera:!1,worldCamProjPosition:new r.Vector3,worldPlanePosition:new r.Vector3},"\n varying vec3 localPosition;\n varying vec4 worldPosition;\n \n uniform vec3 worldCamProjPosition;\n uniform vec3 worldPlanePosition;\n uniform float fadeDistance;\n uniform bool infiniteGrid;\n uniform bool followCamera;\n \n void main() {\n localPosition = position.xzy;\n if (infiniteGrid) localPosition *= 1.0 + fadeDistance;\n \n worldPosition = modelMatrix * vec4(localPosition, 1.0);\n if (followCamera) {\n worldPosition.xyz += (worldCamProjPosition - worldPlanePosition);\n localPosition = (inverse(modelMatrix) * worldPosition).xyz;\n }\n \n gl_Position = projectionMatrix * viewMatrix * worldPosition;\n }\n ",`\n varying vec3 localPosition;\n varying vec4 worldPosition;\n \n uniform vec3 worldCamProjPosition;\n uniform float cellSize;\n uniform float sectionSize;\n uniform vec3 cellColor;\n uniform vec3 sectionColor;\n uniform float fadeDistance;\n uniform float fadeStrength;\n uniform float cellThickness;\n uniform float sectionThickness;\n \n float getGrid(float size, float thickness) {\n vec2 r = localPosition.xz / size;\n vec2 grid = abs(fract(r - 0.5) - 0.5) / fwidth(r);\n float line = min(grid.x, grid.y) + 1.0 - thickness;\n return 1.0 - min(line, 1.0);\n }\n \n void main() {\n float g1 = getGrid(cellSize, cellThickness);\n float g2 = getGrid(sectionSize, sectionThickness);\n \n float dist = distance(worldCamProjPosition, worldPosition.xyz);\n float d = 1.0 - min(dist / fadeDistance, 1.0);\n vec3 color = mix(cellColor, sectionColor, min(1.0, sectionThickness * g2));\n \n gl_FragColor = vec4(color, (g1 + g2) * pow(d, fadeStrength));\n gl_FragColor.a = mix(0.75 * gl_FragColor.a, gl_FragColor.a, g2);\n if (gl_FragColor.a <= 0.0) discard;\n \n #include <tonemapping_fragment>\n #include <${parseInt(r.REVISION.replace(/\D+/g,""))>=154?"colorspace_fragment":"encodings_fragment"}>\n }\n `);class h extends r.MeshPhysicalMaterial{constructor({samples:e=6,transmissionSampler:n=!1,chromaticAberration:t=.05,transmission:o=0,_transmission:i=1,transmissionMap:a=null,roughness:s=0,thickness:l=0,thicknessMap:c=null,attenuationDistance:u=1/0,attenuationColor:m=new r.Color("white"),anisotropicBlur:d=.1,time:f=0,distortion:h=0,distortionScale:v=.5,temporalDistortion:p=0,buffer:g=null}={}){super(),this.uniforms={chromaticAberration:{value:t},transmission:{value:o},_transmission:{value:i},transmissionMap:{value:a},roughness:{value:s},thickness:{value:l},thicknessMap:{value:c},attenuationDistance:{value:u},attenuationColor:{value:m},anisotropicBlur:{value:d},time:{value:f},distortion:{value:h},distortionScale:{value:v},temporalDistortion:{value:p},buffer:{value:g}},this.onBeforeCompile=t=>{t.uniforms={...t.uniforms,...this.uniforms},n?t.defines.USE_SAMPLER="":t.defines.USE_TRANSMISSION="",t.fragmentShader="\n uniform float chromaticAberration; \n uniform float anisotropicBlur; \n uniform float time;\n uniform float distortion;\n uniform float distortionScale;\n uniform float temporalDistortion;\n uniform sampler2D buffer;\n\n vec3 random3(vec3 c) {\n float j = 4096.0*sin(dot(c,vec3(17.0, 59.4, 15.0)));\n vec3 r;\n r.z = fract(512.0*j);\n j *= .125;\n r.x = fract(512.0*j);\n j *= .125;\n r.y = fract(512.0*j);\n return r-0.5;\n }\n\n float seed = 0.0;\n uint hash( uint x ) {\n x += ( x << 10u );\n x ^= ( x >> 6u );\n x += ( x << 3u );\n x ^= ( x >> 11u );\n x += ( x << 15u );\n return x;\n }\n\n // Compound versions of the hashing algorithm I whipped together.\n uint hash( uvec2 v ) { return hash( v.x ^ hash(v.y) ); }\n uint hash( uvec3 v ) { return hash( v.x ^ hash(v.y) ^ hash(v.z) ); }\n uint hash( uvec4 v ) { return hash( v.x ^ hash(v.y) ^ hash(v.z) ^ hash(v.w) ); }\n\n // Construct a float with half-open range [0:1] using low 23 bits.\n // All zeroes yields 0.0, all ones yields the next smallest representable value below 1.0.\n float floatConstruct( uint m ) {\n const uint ieeeMantissa = 0x007FFFFFu; // binary32 mantissa bitmask\n const uint ieeeOne = 0x3F800000u; // 1.0 in IEEE binary32\n m &= ieeeMantissa; // Keep only mantissa bits (fractional part)\n m |= ieeeOne; // Add fractional part to 1.0\n float f = uintBitsToFloat( m ); // Range [1:2]\n return f - 1.0; // Range [0:1]\n }\n\n // Pseudo-random value in half-open range [0:1].\n float random( float x ) { return floatConstruct(hash(floatBitsToUint(x))); }\n float random( vec2 v ) { return floatConstruct(hash(floatBitsToUint(v))); }\n float random( vec3 v ) { return floatConstruct(hash(floatBitsToUint(v))); }\n float random( vec4 v ) { return floatConstruct(hash(floatBitsToUint(v))); }\n\n float rand() {\n float result = random(vec3(gl_FragCoord.xy, seed));\n seed += 1.0;\n return result;\n }\n\n const float F3 = 0.3333333;\n const float G3 = 0.1666667;\n\n float snoise(vec3 p) {\n vec3 s = floor(p + dot(p, vec3(F3)));\n vec3 x = p - s + dot(s, vec3(G3));\n vec3 e = step(vec3(0.0), x - x.yzx);\n vec3 i1 = e*(1.0 - e.zxy);\n vec3 i2 = 1.0 - e.zxy*(1.0 - e);\n vec3 x1 = x - i1 + G3;\n vec3 x2 = x - i2 + 2.0*G3;\n vec3 x3 = x - 1.0 + 3.0*G3;\n vec4 w, d;\n w.x = dot(x, x);\n w.y = dot(x1, x1);\n w.z = dot(x2, x2);\n w.w = dot(x3, x3);\n w = max(0.6 - w, 0.0);\n d.x = dot(random3(s), x);\n d.y = dot(random3(s + i1), x1);\n d.z = dot(random3(s + i2), x2);\n d.w = dot(random3(s + 1.0), x3);\n w *= w;\n w *= w;\n d *= w;\n return dot(d, vec4(52.0));\n }\n\n float snoiseFractal(vec3 m) {\n return 0.5333333* snoise(m)\n +0.2666667* snoise(2.0*m)\n +0.1333333* snoise(4.0*m)\n +0.0666667* snoise(8.0*m);\n }\n"+t.fragmentShader,t.fragmentShader=t.fragmentShader.replace("#include <transmission_pars_fragment>","\n #ifdef USE_TRANSMISSION\n // Transmission code is based on glTF-Sampler-Viewer\n // https://github.com/KhronosGroup/glTF-Sample-Viewer\n uniform float _transmission;\n uniform float thickness;\n uniform float attenuationDistance;\n uniform vec3 attenuationColor;\n #ifdef USE_TRANSMISSIONMAP\n uniform sampler2D transmissionMap;\n #endif\n #ifdef USE_THICKNESSMAP\n uniform sampler2D thicknessMap;\n #endif\n uniform vec2 transmissionSamplerSize;\n uniform sampler2D transmissionSamplerMap;\n uniform mat4 modelMatrix;\n uniform mat4 projectionMatrix;\n varying vec3 vWorldPosition;\n vec3 getVolumeTransmissionRay( const in vec3 n, const in vec3 v, const in float thickness, const in float ior, const in mat4 modelMatrix ) {\n // Direction of refracted light.\n vec3 refractionVector = refract( - v, normalize( n ), 1.0 / ior );\n // Compute rotation-independant scaling of the model matrix.\n vec3 modelScale;\n modelScale.x = length( vec3( modelMatrix[ 0 ].xyz ) );\n modelScale.y = length( vec3( modelMatrix[ 1 ].xyz ) );\n modelScale.z = length( vec3( modelMatrix[ 2 ].xyz ) );\n // The thickness is specified in local space.\n return normalize( refractionVector ) * thickness * modelScale;\n }\n float applyIorToRoughness( const in float roughness, const in float ior ) {\n // Scale roughness with IOR so that an IOR of 1.0 results in no microfacet refraction and\n // an IOR of 1.5 results in the default amount of microfacet refraction.\n return roughness * clamp( ior * 2.0 - 2.0, 0.0, 1.0 );\n }\n vec4 getTransmissionSample( const in vec2 fragCoord, const in float roughness, const in float ior ) {\n float framebufferLod = log2( transmissionSamplerSize.x ) * applyIorToRoughness( roughness, ior ); \n #ifdef USE_SAMPLER\n #ifdef texture2DLodEXT\n return texture2DLodEXT(transmissionSamplerMap, fragCoord.xy, framebufferLod);\n #else\n return texture2D(transmissionSamplerMap, fragCoord.xy, framebufferLod);\n #endif\n #else\n return texture2D(buffer, fragCoord.xy);\n #endif\n }\n vec3 applyVolumeAttenuation( const in vec3 radiance, const in float transmissionDistance, const in vec3 attenuationColor, const in float attenuationDistance ) {\n if ( isinf( attenuationDistance ) ) {\n // Attenuation distance is +∞, i.e. the transmitted color is not attenuated at all.\n return radiance;\n } else {\n // Compute light attenuation using Beer's law.\n vec3 attenuationCoefficient = -log( attenuationColor ) / attenuationDistance;\n vec3 transmittance = exp( - attenuationCoefficient * transmissionDistance ); // Beer's law\n return transmittance * radiance;\n }\n }\n vec4 getIBLVolumeRefraction( const in vec3 n, const in vec3 v, const in float roughness, const in vec3 diffuseColor,\n const in vec3 specularColor, const in float specularF90, const in vec3 position, const in mat4 modelMatrix,\n const in mat4 viewMatrix, const in mat4 projMatrix, const in float ior, const in float thickness,\n const in vec3 attenuationColor, const in float attenuationDistance ) {\n vec3 transmissionRay = getVolumeTransmissionRay( n, v, thickness, ior, modelMatrix );\n vec3 refractedRayExit = position + transmissionRay;\n // Project refracted vector on the framebuffer, while mapping to normalized device coordinates.\n vec4 ndcPos = projMatrix * viewMatrix * vec4( refractedRayExit, 1.0 );\n vec2 refractionCoords = ndcPos.xy / ndcPos.w;\n refractionCoords += 1.0;\n refractionCoords /= 2.0;\n // Sample framebuffer to get pixel the refracted ray hits.\n vec4 transmittedLight = getTransmissionSample( refractionCoords, roughness, ior );\n vec3 attenuatedColor = applyVolumeAttenuation( transmittedLight.rgb, length( transmissionRay ), attenuationColor, attenuationDistance );\n // Get the specular component.\n vec3 F = EnvironmentBRDF( n, v, specularColor, specularF90, roughness );\n return vec4( ( 1.0 - F ) * attenuatedColor * diffuseColor, transmittedLight.a );\n }\n #endif\n"),t.fragmentShader=t.fragmentShader.replace("#include <transmission_fragment>",` \n // Improve the refraction to use the world pos\n material.transmission = _transmission;\n material.transmissionAlpha = 1.0;\n material.thickness = thickness;\n material.attenuationDistance = attenuationDistance;\n material.attenuationColor = attenuationColor;\n #ifdef USE_TRANSMISSIONMAP\n material.transmission *= texture2D( transmissionMap, vUv ).r;\n #endif\n #ifdef USE_THICKNESSMAP\n material.thickness *= texture2D( thicknessMap, vUv ).g;\n #endif\n \n vec3 pos = vWorldPosition;\n vec3 v = normalize( cameraPosition - pos );\n vec3 n = inverseTransformDirection( normal, viewMatrix );\n vec3 transmission = vec3(0.0);\n float transmissionR, transmissionB, transmissionG;\n float randomCoords = rand();\n float thickness_smear = thickness * max(pow(roughnessFactor, 0.33), anisotropicBlur);\n vec3 distortionNormal = vec3(0.0);\n vec3 temporalOffset = vec3(time, -time, -time) * temporalDistortion;\n if (distortion > 0.0) {\n distortionNormal = distortion * vec3(snoiseFractal(vec3((pos * distortionScale + temporalOffset))), snoiseFractal(vec3(pos.zxy * distortionScale - temporalOffset)), snoiseFractal(vec3(pos.yxz * distortionScale + temporalOffset)));\n }\n for (float i = 0.0; i < ${e}.0; i ++) {\n vec3 sampleNorm = normalize(n + roughnessFactor * roughnessFactor * 2.0 * normalize(vec3(rand() - 0.5, rand() - 0.5, rand() - 0.5)) * pow(rand(), 0.33) + distortionNormal);\n transmissionR = getIBLVolumeRefraction(\n sampleNorm, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90,\n pos, modelMatrix, viewMatrix, projectionMatrix, material.ior, material.thickness + thickness_smear * (i + randomCoords) / float(${e}),\n material.attenuationColor, material.attenuationDistance\n ).r;\n transmissionG = getIBLVolumeRefraction(\n sampleNorm, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90,\n pos, modelMatrix, viewMatrix, projectionMatrix, material.ior * (1.0 + chromaticAberration * (i + randomCoords) / float(${e})) , material.thickness + thickness_smear * (i + randomCoords) / float(${e}),\n material.attenuationColor, material.attenuationDistance\n ).g;\n transmissionB = getIBLVolumeRefraction(\n sampleNorm, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90,\n pos, modelMatrix, viewMatrix, projectionMatrix, material.ior * (1.0 + 2.0 * chromaticAberration * (i + randomCoords) / float(${e})), material.thickness + thickness_smear * (i + randomCoords) / float(${e}),\n material.attenuationColor, material.attenuationDistance\n ).b;\n transmission.r += transmissionR;\n transmission.g += transmissionG;\n transmission.b += transmissionB;\n }\n transmission /= ${e}.0;\n totalDiffuse = mix( totalDiffuse, transmission.rgb, material.transmission );\n`)},Object.keys(this.uniforms).forEach((e=>Object.defineProperty(this,e,{get:()=>this.uniforms[e].value,set:n=>this.uniforms[e].value=n})))}}const v=o({depth:null,opacity:1,attenuation:2.5,anglePower:12,spotPosition:new e.Vector3(0,0,0),lightColor:new e.Color("white"),cameraNear:0,cameraFar:1,resolution:new e.Vector2(0,0),transparent:!0,depthWrite:!1},"\n varying vec3 vNormal;\n varying vec3 vWorldPosition;\n varying float vViewZ;\n varying float vIntensity;\n uniform vec3 spotPosition;\n uniform float attenuation;\n\n void main() {\n // compute intensity\n vNormal = normalize( normalMatrix * normal );\n vec4 worldPosition\t= modelMatrix * vec4( position, 1.0 );\n vWorldPosition = worldPosition.xyz;\n vec4 viewPosition = viewMatrix * worldPosition;\n vViewZ = viewPosition.z;\n float intensity\t= distance(worldPosition.xyz, spotPosition) / attenuation;\n intensity\t= 1.0 - clamp(intensity, 0.0, 1.0);\n vIntensity = intensity;\n // set gl_Position\n gl_Position\t= projectionMatrix * viewPosition;\n\n }","\n #include <packing>\n\n varying vec3 vNormal;\n varying vec3 vWorldPosition;\n uniform vec3 lightColor;\n uniform vec3 spotPosition;\n uniform float attenuation;\n uniform float anglePower;\n uniform sampler2D depth;\n uniform vec2 resolution;\n uniform float cameraNear;\n uniform float cameraFar;\n varying float vViewZ;\n varying float vIntensity;\n uniform float opacity;\n\n float readDepth( sampler2D depthSampler, vec2 coord ) {\n float fragCoordZ = texture2D( depthSampler, coord ).x;\n float viewZ = perspectiveDepthToViewZ(fragCoordZ, cameraNear, cameraFar);\n return viewZ;\n }\n\n void main() {\n float d = 1.0;\n bool isSoft = resolution[0] > 0.0 && resolution[1] > 0.0;\n if (isSoft) {\n vec2 sUv = gl_FragCoord.xy / resolution;\n d = readDepth(depth, sUv);\n }\n float intensity = vIntensity;\n vec3 normal\t= vec3(vNormal.x, vNormal.y, abs(vNormal.z));\n float angleIntensity\t= pow( dot(normal, vec3(0.0, 0.0, 1.0)), anglePower );\n intensity\t*= angleIntensity;\n // fades when z is close to sampled depth, meaning the cone is intersecting existing geometry\n if (isSoft) {\n intensity\t*= smoothstep(0., 1., vViewZ - d);\n }\n gl_FragColor = vec4(lightColor, intensity * opacity);\n\n #include <tonemapping_fragment>\n #include <encodings_fragment>\n }");class p extends e.ShaderMaterial{constructor(n=new e.Vector2){super({uniforms:{inputBuffer:new e.Uniform(null),depthBuffer:new e.Uniform(null),resolution:new e.Uniform(new e.Vector2),texelSize:new e.Uniform(new e.Vector2),halfTexelSize:new e.Uniform(new e.Vector2),kernel:new e.Uniform(0),scale:new e.Uniform(1),cameraNear:new e.Uniform(0),cameraFar:new e.Uniform(1),minDepthThreshold:new e.Uniform(0),maxDepthThreshold:new e.Uniform(1),depthScale:new e.Uniform(0),depthToBlurRatioBias:new e.Uniform(.25)},fragmentShader:"#include <common>\n #include <dithering_pars_fragment> \n uniform sampler2D inputBuffer;\n uniform sampler2D depthBuffer;\n uniform float cameraNear;\n uniform float cameraFar;\n uniform float minDepthThreshold;\n uniform float maxDepthThreshold;\n uniform float depthScale;\n uniform float depthToBlurRatioBias;\n varying vec2 vUv;\n varying vec2 vUv0;\n varying vec2 vUv1;\n varying vec2 vUv2;\n varying vec2 vUv3;\n\n void main() {\n float depthFactor = 0.0;\n \n #ifdef USE_DEPTH\n vec4 depth = texture2D(depthBuffer, vUv);\n depthFactor = smoothstep(minDepthThreshold, maxDepthThreshold, 1.0-(depth.r * depth.a));\n depthFactor *= depthScale;\n depthFactor = max(0.0, min(1.0, depthFactor + 0.25));\n #endif\n \n vec4 sum = texture2D(inputBuffer, mix(vUv0, vUv, depthFactor));\n sum += texture2D(inputBuffer, mix(vUv1, vUv, depthFactor));\n sum += texture2D(inputBuffer, mix(vUv2, vUv, depthFactor));\n sum += texture2D(inputBuffer, mix(vUv3, vUv, depthFactor));\n gl_FragColor = sum * 0.25 ;\n\n #include <dithering_fragment>\n #include <tonemapping_fragment>\n #include <encodings_fragment>\n }",vertexShader:"uniform vec2 texelSize;\n uniform vec2 halfTexelSize;\n uniform float kernel;\n uniform float scale;\n varying vec2 vUv;\n varying vec2 vUv0;\n varying vec2 vUv1;\n varying vec2 vUv2;\n varying vec2 vUv3;\n\n void main() {\n vec2 uv = position.xy * 0.5 + 0.5;\n vUv = uv;\n\n vec2 dUv = (texelSize * vec2(kernel) + halfTexelSize) * scale;\n vUv0 = vec2(uv.x - dUv.x, uv.y + dUv.y);\n vUv1 = vec2(uv.x + dUv.x, uv.y + dUv.y);\n vUv2 = vec2(uv.x + dUv.x, uv.y - dUv.y);\n vUv3 = vec2(uv.x - dUv.x, uv.y - dUv.y);\n\n gl_Position = vec4(position.xy, 1.0, 1.0);\n }",blending:e.NoBlending,depthWrite:!1,depthTest:!1}),this.toneMapped=!1,this.setTexelSize(n.x,n.y),this.kernel=new Float32Array([0,1,2,2,3])}setTexelSize(e,n){this.uniforms.texelSize.value.set(e,n),this.uniforms.halfTexelSize.value.set(e,n).multiplyScalar(.5)}setResolution(e){this.uniforms.resolution.value.copy(e)}}class g extends e.MeshStandardMaterial{constructor(e={}){super(),this._tDepth={value:null},this._distortionMap={value:null},this._tDiffuse={value:null},this._tDiffuseBlur={value:null},this._textureMatrix={value:null},this._hasBlur={value:!1},this._mirror={value:0},this._mixBlur={value:0},this._blurStrength={value:.5},this._minDepthThreshold={value:.9},this._maxDepthThreshold={value:1},this._depthScale={value:0},this._depthToBlurRatioBias={value:.25},this._distortion={value:1},this._mixContrast={value:1},this._tDepth={value:null},this._distortionMap={value:null},this._tDiffuse={value:null},this._tDiffuseBlur={value:null},this._textureMatrix={value:null},this._hasBlur={value:!1},this._mirror={value:0},this._mixBlur={value:0},this._blurStrength={value:.5},this._minDepthThreshold={value:.9},this._maxDepthThreshold={value:1},this._depthScale={value:0},this._depthToBlurRatioBias={value:.25},this._distortion={value:1},this._mixContrast={value:1},this.setValues(e)}onBeforeCompile(e){var n;null!=(n=e.defines)&&n.USE_UV||(e.defines.USE_UV=""),e.uniforms.hasBlur=this._hasBlur,e.uniforms.tDiffuse=this._tDiffuse,e.uniforms.tDepth=this._tDepth,e.uniforms.distortionMap=this._distortionMap,e.uniforms.tDiffuseBlur=this._tDiffuseBlur,e.uniforms.textureMatrix=this._textureMatrix,e.uniforms.mirror=this._mirror,e.uniforms.mixBlur=this._mixBlur,e.uniforms.mixStrength=this._blurStrength,e.uniforms.minDepthThreshold=this._minDepthThreshold,e.uniforms.maxDepthThreshold=this._maxDepthThreshold,e.uniforms.depthScale=this._depthScale,e.uniforms.depthToBlurRatioBias=this._depthToBlurRatioBias,e.uniforms.distortion=this._distortion,e.uniforms.mixContrast=this._mixContrast,e.vertexShader=`\n uniform mat4 textureMatrix;\n varying vec4 my_vUv;\n ${e.vertexShader}`,e.vertexShader=e.vertexShader.replace("#include <project_vertex>","#include <project_vertex>\n my_vUv = textureMatrix * vec4( position, 1.0 );\n gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );"),e.fragmentShader=`\n uniform sampler2D tDiffuse;\n uniform sampler2D tDiffuseBlur;\n uniform sampler2D tDepth;\n uniform sampler2D distortionMap;\n uniform float distortion;\n uniform float cameraNear;\n\t\t\t uniform float cameraFar;\n uniform bool hasBlur;\n uniform float mixBlur;\n uniform float mirror;\n uniform float mixStrength;\n uniform float minDepthThreshold;\n uniform float maxDepthThreshold;\n uniform float mixContrast;\n uniform float depthScale;\n uniform float depthToBlurRatioBias;\n varying vec4 my_vUv;\n ${e.fragmentShader}`,e.fragmentShader=e.fragmentShader.replace("#include <emissivemap_fragment>","#include <emissivemap_fragment>\n\n float distortionFactor = 0.0;\n #ifdef USE_DISTORTION\n distortionFactor = texture2D(distortionMap, vUv).r * distortion;\n #endif\n\n vec4 new_vUv = my_vUv;\n new_vUv.x += distortionFactor;\n new_vUv.y += distortionFactor;\n\n vec4 base = texture2DProj(tDiffuse, new_vUv);\n vec4 blur = texture2DProj(tDiffuseBlur, new_vUv);\n\n vec4 merge = base;\n\n #ifdef USE_NORMALMAP\n vec2 normal_uv = vec2(0.0);\n vec4 normalColor = texture2D(normalMap, vUv * normalScale);\n vec3 my_normal = normalize( vec3( normalColor.r * 2.0 - 1.0, normalColor.b, normalColor.g * 2.0 - 1.0 ) );\n vec3 coord = new_vUv.xyz / new_vUv.w;\n normal_uv = coord.xy + coord.z * my_normal.xz * 0.05;\n vec4 base_normal = texture2D(tDiffuse, normal_uv);\n vec4 blur_normal = texture2D(tDiffuseBlur, normal_uv);\n merge = base_normal;\n blur = blur_normal;\n #endif\n\n float depthFactor = 0.0001;\n float blurFactor = 0.0;\n\n #ifdef USE_DEPTH\n vec4 depth = texture2DProj(tDepth, new_vUv);\n depthFactor = smoothstep(minDepthThreshold, maxDepthThreshold, 1.0-(depth.r * depth.a));\n depthFactor *= depthScale;\n depthFactor = max(0.0001, min(1.0, depthFactor));\n\n #ifdef USE_BLUR\n blur = blur * min(1.0, depthFactor + depthToBlurRatioBias);\n merge = merge * min(1.0, depthFactor + 0.5);\n #else\n merge = merge * depthFactor;\n #endif\n\n #endif\n\n float reflectorRoughnessFactor = roughness;\n #ifdef USE_ROUGHNESSMAP\n vec4 reflectorTexelRoughness = texture2D( roughnessMap, vUv );\n reflectorRoughnessFactor *= reflectorTexelRoughness.g;\n #endif\n\n #ifdef USE_BLUR\n blurFactor = min(1.0, mixBlur * reflectorRoughnessFactor);\n merge = mix(merge, blur, blurFactor);\n #endif\n\n vec4 newMerge = vec4(0.0, 0.0, 0.0, 1.0);\n newMerge.r = (merge.r - 0.5) * mixContrast + 0.5;\n newMerge.g = (merge.g - 0.5) * mixContrast + 0.5;\n newMerge.b = (merge.b - 0.5) * mixContrast + 0.5;\n\n diffuseColor.rgb = diffuseColor.rgb * ((1.0 - min(1.0, mirror)) + newMerge.rgb * mixStrength);\n ")}get tDiffuse(){return this._tDiffuse.value}set tDiffuse(e){this._tDiffuse.value=e}get tDepth(){return this._tDepth.value}set tDepth(e){this._tDepth.value=e}get distortionMap(){return this._distortionMap.value}set distortionMap(e){this._distortionMap.value=e}get tDiffuseBlur(){return this._tDiffuseBlur.value}set tDiffuseBlur(e){this._tDiffuseBlur.value=e}get textureMatrix(){return this._textureMatrix.value}set textureMatrix(e){this._textureMatrix.value=e}get hasBlur(){return this._hasBlur.value}set hasBlur(e){this._hasBlur.value=e}get mirror(){return this._mirror.value}set mirror(e){this._mirror.value=e}get mixBlur(){return this._mixBlur.value}set mixBlur(e){this._mixBlur.value=e}get mixStrength(){return this._blurStrength.value}set mixStrength(e){this._blurStrength.value=e}get minDepthThreshold(){return this._minDepthThreshold.value}set minDepthThreshold(e){this._minDepthThreshold.value=e}get maxDepthThreshold(){return this._maxDepthThreshold.value}set maxDepthThreshold(e){this._maxDepthThreshold.value=e}get depthScale(){return this._depthScale.value}set depthScale(e){this._depthScale.value=e}get depthToBlurRatioBias(){return this._depthToBlurRatioBias.value}set depthToBlurRatioBias(e){this._depthToBlurRatioBias.value=e}get distortion(){return this._distortion.value}set distortion(e){this._distortion.value=e}get mixContrast(){return this._mixContrast.value}set mixContrast(e){this._mixContrast.value=e}}exports.BlurPass=class{constructor({gl:n,resolution:t,width:r=500,height:o=500,minDepthThreshold:i=0,maxDepthThreshold:a=1,depthScale:s=0,depthToBlurRatioBias:l=.25}){this.renderToScreen=!1,this.renderTargetA=new e.WebGLRenderTarget(t,t,{minFilter:e.LinearFilter,magFilter:e.LinearFilter,stencilBuffer:!1,depthBuffer:!1,type:e.HalfFloatType}),this.renderTargetB=this.renderTargetA.clone(),this.convolutionMaterial=new p,this.convolutionMaterial.setTexelSize(1/r,1/o),this.convolutionMaterial.setResolution(new e.Vector2(r,o)),this.scene=new e.Scene,this.camera=new e.Camera,this.convolutionMaterial.uniforms.minDepthThreshold.value=i,this.convolutionMaterial.uniforms.maxDepthThreshold.value=a,this.convolutionMaterial.uniforms.depthScale.value=s,this.convolutionMaterial.uniforms.depthToBlurRatioBias.value=l,this.convolutionMaterial.defines.USE_DEPTH=s>0;const c=new Float32Array([-1,-1,0,3,-1,0,-1,3,0]),u=new Float32Array([0,0,2,0,0,2]),m=new e.BufferGeometry;m.setAttribute("position",new e.BufferAttribute(c,3)),m.setAttribute("uv",new e.BufferAttribute(u,2)),this.screen=new e.Mesh(m,this.convolutionMaterial),this.screen.frustumCulled=!1,this.scene.add(this.screen)}render(e,n,t){const r=this.scene,o=this.camera,i=this.renderTargetA,a=this.renderTargetB,s=this.convolutionMaterial,l=s.uniforms;l.depthBuffer.value=n.depthTexture;const c=s.kernel;let u,m,d,f=n;for(m=0,d=c.length-1;m<d;++m)u=0==(1&m)?i:a,l.kernel.value=c[m],l.inputBuffer.value=f.texture,e.setRenderTarget(u),e.render(r,o),f=u;l.kernel.value=c[m],l.inputBuffer.value=f.texture,e.setRenderTarget(this.renderToScreen?null:t),e.render(r,o)}},exports.Caustics=(e,{frames:t=1,causticsOnly:o=!1,ior:m=1.1,backside:d=!1,backsideIOR:f=1.1,worldRadius:h=.3125,color:v=new r.Color("white"),intensity:p=.05,resolution:g=2024,lightSource:x=new r.Vector3(1,1,1),near:w=.1,far:S=0}={})=>{const _={frames:t,ior:m,color:v,causticsOnly:o,backside:d,backsideIOR:f,worldRadius:h,intensity:p,resolution:g,lightSource:x,near:w,far:S},y=new r.Group;y.name="caustics_group";const D=y,P=new r.OrthographicCamera,M=new r.Scene;M.name="caustics_scene";const T=e,C=new r.CameraHelper(P);C.name="caustics_helper";const b=_.resolution,B=i(b,b,c),F=i(b,b,c),R=i(b,b,u),U=i(b,b,u),z=a(),k=a(r.BackSide),I=new l,N=new n.FullScreenQuad(I),j=new r.Mesh(new r.PlaneGeometry(1,1),new s({transparent:!0,color:_.color,causticsTexture:R.texture,causticsTextureB:U.texture,blending:r.CustomBlending,blendSrc:r.OneFactor,blendDst:r.SrcAlphaFactor,depthWrite:!1}));j.name="caustics_plane",j.rotation.x=-Math.PI/2,j.renderOrder=2,y.add(M,j),y.updateWorldMatrix(!1,!0);let O=0;const A=new r.Vector3,E=new r.Frustum,V=new r.Matrix4,W=new r.Plane,G=new r.Vector3,L=new r.Vector3,$=new r.Box3,H=new r.Vector3,Z=[],q=[],K=[],X=[],Y=new r.Vector3;for(let e=0;e<8;e++)Z.push(new r.Vector3),q.push(new r.Vector3),K.push(new r.Vector3),X.push(new r.Vector3);return{scene:M,group:y,helper:C,params:_,update:()=>{if(_.frames===1/0||O++<_.frames){var e;(null==(n=x)?void 0:n.isVector3)?G.copy(x).normalize():G.copy(D.worldToLocal(x.getWorldPosition(A)).normalize()),L.copy(G).multiplyScalar(-1),null==(e=M.parent)||e.matrixWorld.identity(),$.setFromObject(M,!0),Z[0].set($.min.x,$.min.y,$.min.z),Z[1].set($.min.x,$.min.y,$.max.z),Z[2].set($.min.x,$.max.y,$.min.z),Z[3].set($.min.x,$.max.y,$.max.z),Z[4].set($.max.x,$.min.y,$.min.z),Z[5].set($.max.x,$.min.y,$.max.z),Z[6].set($.max.x,$.max.y,$.min.z),Z[7].set($.max.x,$.max.y,$.max.z);for(let e=0;e<8;e++)q[e].copy(Z[e]);$.getCenter(H),Z.map((e=>e.sub(H)));const t=W.set(L,0);Z.map(((e,n)=>t.projectPoint(e,K[n])));const r=K.reduce(((e,n)=>e.add(n)),A.set(0,0,0)).divideScalar(K.length),o=K.map((e=>e.distanceTo(r))).reduce(((e,n)=>Math.max(e,n))),i=Z.map((e=>e.dot(G))).reduce(((e,n)=>Math.max(e,n)));P.position.copy(Y.copy(G).multiplyScalar(i).add(H)),P.lookAt(M.localToWorld(H));const a=V.lookAt(P.position,H,A.set(0,1,0));if(P.left=-o,P.right=o,P.top=o,P.bottom=-o,P.near=_.near,_.far)P.far=_.far;else{const e=A.set(0,o,0).applyMatrix4(a),n=(P.position.y+e.y)/G.y;P.far=n}P.updateProjectionMatrix(),P.updateMatrixWorld();const s=q.map(((e,n)=>e.add(X[n].copy(G).multiplyScalar(-e.y/G.y)))),l=s.reduce(((e,n)=>e.add(n)),A.set(0,0,0)).divideScalar(s.length),c=2*s.map((e=>Math.hypot(e.x-l.x,e.z-l.z))).reduce(((e,n)=>Math.max(e,n)));j.scale.setScalar(c),j.position.copy(l),C.parent&&C.update(),k.viewMatrix.value=z.viewMatrix.value=P.matrixWorldInverse;const u=E.setFromProjectionMatrix(V.multiplyMatrices(P.projectionMatrix,P.matrixWorldInverse)).planes[4];I.cameraMatrixWorld=P.matrixWorld,I.cameraProjectionMatrixInv=P.projectionMatrixInverse,I.lightDir=L,I.lightPlaneNormal=u.normal,I.lightPlaneConstant=u.constant,I.near=P.near,I.far=P.far,I.resolution=_.resolution,I.size=o,I.intensity=_.intensity,I.worldRadius=_.worldRadius,M.visible=!0,T.setRenderTarget(B),T.clear(),M.overrideMaterial=z,T.render(M,P),T.setRenderTarget(F),T.clear(),_.backside&&(M.overrideMaterial=k,T.render(M,P)),M.overrideMaterial=null,I.ior=_.ior,j.material.lightProjMatrix=P.projectionMatrix,j.material.lightViewMatrix=P.matrixWorldInverse,I.normalTexture=B.texture,I.depthTexture=B.depthTexture,T.setRenderTarget(R),T.clear(),N.render(T),I.ior=_.backsideIOR,I.normalTexture=F.texture,I.depthTexture=F.depthTexture,T.setRenderTarget(U),T.clear(),_.backside&&N.render(T),T.setRenderTarget(null),_.causticsOnly&&(M.visible=!1)}var n},normalTarget:B,normalTargetB:F,causticsTarget:R,causticsTargetB:U}},exports.ConvolutionMaterial=p,exports.Grid=({args:e=[1,1],cellColor:n=new r.Color("#000000"),sectionColor:t=new r.Color("#2080ff"),cellSize:o=.5,sectionSize:i=1,followCamera:a=!1,infiniteGrid:s=!1,fadeDistance:l=100,fadeStrength:c=1,cellThickness:u=.5,sectionThickness:m=1,side:d=r.BackSide}={})=>{const h=new f({transparent:!0,side:d,...{cellSize:o,sectionSize:i,cellColor:n,sectionColor:t,cellThickness:u,sectionThickness:m},...{fadeDistance:l,fadeStrength:c,infiniteGrid:s,followCamera:a}}),v=new r.PlaneGeometry(e[0],e[1]),p=new r.Mesh(v,h);p.frustumCulled=!1;const g=new r.Plane,x=new r.Vector3(0,1,0),w=new r.Vector3(0,0,0);return{mesh:p,update:e=>{if(!p.parent)return;g.setFromNormalAndCoplanarPoint(x,w).applyMatrix4(p.matrixWorld);const n=p.material,t=n.uniforms.worldCamProjPosition,r=n.uniforms.worldPlanePosition;g.projectPoint(e.position,t.value),r.value.set(0,0,0).applyMatrix4(p.matrixWorld)}}},exports.MeshDiscardMaterial=m,exports.MeshReflectorMaterial=g,exports.MeshTransmissionMaterial=h,exports.ProgressiveLightMap=class{constructor(e,n,t=1024){this.renderer=e,this.res=t,this.scene=n,this.buffer1Active=!1,this.lights=[],this.meshes=[],this.object=null,this.clearColor=new r.Color,this.clearAlpha=0;const o=/(Android|iPad|iPhone|iPod)/g.test(navigator.userAgent)?r.HalfFloatType:r.FloatType;this.progressiveLightMap1=new r.WebGLRenderTarget(this.res,this.res,{type:o}),this.progressiveLightMap2=new r.WebGLRenderTarget(this.res,this.res,{type:o}),this.discardMat=new m,this.targetMat=new r.MeshLambertMaterial({fog:!1}),this.previousShadowMap={value:this.progressiveLightMap1.texture},this.averagingWindow={value:100},this.targetMat.onBeforeCompile=e=>{e.vertexShader="varying vec2 vUv;\n"+e.vertexShader.slice(0,-1)+"vUv = uv; gl_Position = vec4((uv - 0.5) * 2.0, 1.0, 1.0); }";const n=e.fragmentShader.indexOf("void main() {");e.fragmentShader="varying vec2 vUv;\n"+e.fragmentShader.slice(0,n)+"uniform sampler2D previousShadowMap;\n\tuniform float averagingWindow;\n"+e.fragmentShader.slice(n-1,-1)+"\nvec3 texelOld = texture2D(previousShadowMap, vUv).rgb;\n gl_FragColor.rgb = mix(texelOld, gl_FragColor.rgb, 1.0/ averagingWindow);\n }",e.uniforms.previousShadowMap=this.previousShadowMap,e.uniforms.averagingWindow=this.averagingWindow}}clear(){this.renderer.getClearColor(this.clearColor),this.clearAlpha=this.renderer.getClearAlpha(),this.renderer.setClearColor("black",1),this.renderer.setRenderTarget(this.progressiveLightMap1),this.renderer.clear(),this.renderer.setRenderTarget(this.progressiveLightMap2),this.renderer.clear(),this.renderer.setRenderTarget(null),this.renderer.setClearColor(this.clearColor,this.clearAlpha),this.lights=[],this.meshes=[],this.scene.traverse((e=>{!function(e){return!!e.geometry}(e)?function(e){return e.isLight}(e)&&this.lights.push({object:e,intensity:e.intensity}):this.meshes.push({object:e,material:e.material})}))}prepare(){this.lights.forEach((e=>e.object.intensity=0)),this.meshes.forEach((e=>e.object.material=this.discardMat))}finish(){this.lights.forEach((e=>e.object.intensity=e.intensity)),this.meshes.forEach((e=>e.object.material=e.material))}configure(e){this.object=e}update(e,n=100){if(!this.object)return;this.averagingWindow.value=n,this.object.material=this.targetMat;const t=this.buffer1Active?this.progressiveLightMap1:this.progressiveLightMap2,r=this.buffer1Active?this.progressiveLightMap2:this.progressiveLightMap1,o=this.scene.background;this.scene.background=null,this.renderer.setRenderTarget(t),this.previousShadowMap.value=r.texture,this.buffer1Active=!this.buffer1Active,this.renderer.render(this.scene,e),this.renderer.setRenderTarget(null),this.scene.background=o}},exports.SoftShadowMaterial=d,exports.SpotLightMaterial=v,exports.pcss=({focus:e=0,size:n=25,samples:t=10}={})=>{const o=r.ShaderChunk.shadowmap_pars_fragment;return r.ShaderChunk.shadowmap_pars_fragment=r.ShaderChunk.shadowmap_pars_fragment.replace("#ifdef USE_SHADOWMAP",`#ifdef USE_SHADOWMAP\n\n #define PENUMBRA_FILTER_SIZE float(${n})\n #define RGB_NOISE_FUNCTION(uv) (randRGB(uv))\n vec3 randRGB(vec2 uv) {\n return vec3(\n fract(sin(dot(uv, vec2(12.75613, 38.12123))) * 13234.76575),\n fract(sin(dot(uv, vec2(19.45531, 58.46547))) * 43678.23431),\n fract(sin(dot(uv, vec2(23.67817, 78.23121))) * 93567.23423)\n );\n }\n \n vec3 lowPassRandRGB(vec2 uv) {\n // 3x3 convolution (average)\n // can be implemented as separable with an extra buffer for a total of 6 samples instead of 9\n vec3 result = vec3(0);\n result += RGB_NOISE_FUNCTION(uv + vec2(-1.0, -1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(-1.0, 0.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(-1.0, +1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2( 0.0, -1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2( 0.0, 0.0));\n result += RGB_NOISE_FUNCTION(uv + vec2( 0.0, +1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(+1.0, -1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(+1.0, 0.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(+1.0, +1.0));\n result *= 0.111111111; // 1.0 / 9.0\n return result;\n }\n vec3 highPassRandRGB(vec2 uv) {\n // by subtracting the low-pass signal from the original signal, we're being left with the high-pass signal\n // hp(x) = x - lp(x)\n return RGB_NOISE_FUNCTION(uv) - lowPassRandRGB(uv) + 0.5;\n }\n \n \n vec2 vogelDiskSample(int sampleIndex, int sampleCount, float angle) {\n const float goldenAngle = 2.399963f; // radians\n float r = sqrt(float(sampleIndex) + 0.5f) / sqrt(float(sampleCount));\n float theta = float(sampleIndex) * goldenAngle + angle;\n float sine = sin(theta);\n float cosine = cos(theta);\n return vec2(cosine, sine) * r;\n }\n float penumbraSize( const in float zReceiver, const in float zBlocker ) { // Parallel plane estimation\n return (zReceiver - zBlocker) / zBlocker;\n }\n float findBlocker(sampler2D shadowMap, vec2 uv, float compare, float angle) {\n float texelSize = 1.0 / float(textureSize(shadowMap, 0).x);\n float blockerDepthSum = float(${e});\n float blockers = 0.0;\n \n int j = 0;\n vec2 offset = vec2(0.);\n float depth = 0.;\n \n #pragma unroll_loop_start\n for(int i = 0; i < ${t}; i ++) {\n offset = (vogelDiskSample(j, ${t}, angle) * texelSize) * 2.0 * PENUMBRA_FILTER_SIZE;\n depth = unpackRGBAToDepth( texture2D( shadowMap, uv + offset));\n if (depth < compare) {\n blockerDepthSum += depth;\n blockers++;\n }\n j++;\n }\n #pragma unroll_loop_end\n \n if (blockers > 0.0) {\n return blockerDepthSum / blockers;\n }\n return -1.0;\n }\n \n float vogelFilter(sampler2D shadowMap, vec2 uv, float zReceiver, float filterRadius, float angle) {\n float texelSize = 1.0 / float(textureSize(shadowMap, 0).x);\n float shadow = 0.0f;\n int j = 0;\n vec2 vogelSample = vec2(0.0);\n vec2 offset = vec2(0.0);\n #pragma unroll_loop_start\n for (int i = 0; i < ${t}; i++) {\n vogelSample = vogelDiskSample(j, ${t}, angle) * texelSize;\n offset = vogelSample * (1.0 + filterRadius * float(${n}));\n shadow += step( zReceiver, unpackRGBAToDepth( texture2D( shadowMap, uv + offset ) ) );\n j++;\n }\n #pragma unroll_loop_end\n return shadow * 1.0 / ${t}.0;\n }\n \n float PCSS (sampler2D shadowMap, vec4 coords) {\n vec2 uv = coords.xy;\n float zReceiver = coords.z; // Assumed to be eye-space z in this code\n float angle = highPassRandRGB(gl_FragCoord.xy).r * PI2;\n float avgBlockerDepth = findBlocker(shadowMap, uv, zReceiver, angle);\n if (avgBlockerDepth == -1.0) {\n return 1.0;\n }\n float penumbraRatio = penumbraSize(zReceiver, avgBlockerDepth);\n return vogelFilter(shadowMap, uv, zReceiver, 1.25 * penumbraRatio, angle);\n }`).replace("#if defined( SHADOWMAP_TYPE_PCF )","\nreturn PCSS(shadowMap, shadowCoord);\n#if defined( SHADOWMAP_TYPE_PCF )"),(e,n,t)=>{r.ShaderChunk.shadowmap_pars_fragment=o,function(e,n,t){n.traverse((n=>{n.material&&(e.properties.remove(n.material),n.material.dispose())})),e.info.programs.length=0,e.compile(n,t)}(e,n,t)}},exports.shaderMaterial=o,exports.useFBO=i;
"use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("three"),n=require("three/examples/jsm/postprocessing/Pass"),t=require("three-stdlib"),r=require("lodash-es");function o(e){if(e&&e.__esModule)return e;var n=Object.create(null);return e&&Object.keys(e).forEach((function(t){if("default"!==t){var r=Object.getOwnPropertyDescriptor(e,t);Object.defineProperty(n,t,r.get?r:{enumerable:!0,get:function(){return e[t]}})}})),n.default=e,Object.freeze(n)}var i=o(e);function a(e,n,t,r){const o=Object.entries(e),a=Object.fromEntries(o.map((([e,n])=>[e,{value:n}])));class s extends i.ShaderMaterial{constructor(e){super({...e,uniforms:a,vertexShader:n,fragmentShader:t});for(const[e]of o)Object.defineProperty(this,e,{get:()=>this.uniforms[e].value,set:n=>this.uniforms[e].value=n});Object.assign(this,e),null==r||r(this)}}return s.key=i.MathUtils.generateUUID(),s}function s(e=1024,n=1024,t={samples:0,depth:!1}){var r=e,o=n,a=t,s=a.samples||0,l=a.depth,c=Object.assign({},a);delete c.samples,delete c.depth;var u=new i.WebGLRenderTarget(r,o,Object.assign({minFilter:i.LinearFilter,magFilter:i.LinearFilter,type:i.HalfFloatType},c));return l&&(u.depthTexture=new i.DepthTexture(r,o,i.FloatType)),u.samples=s,u}function l(e=i.FrontSide){const n={value:new i.Matrix4};return Object.assign(new i.MeshNormalMaterial({side:e}),{viewMatrix:n,onBeforeCompile:e=>{e.uniforms.viewMatrix=n,e.fragmentShader="vec3 inverseTransformDirection( in vec3 dir, in mat4 matrix ) {\n return normalize( ( vec4( dir, 0.0 ) * matrix ).xyz );\n }\n"+e.fragmentShader.replace("#include <normal_fragment_maps>","#include <normal_fragment_maps>\n normal = inverseTransformDirection( normal, viewMatrix );\n")}})}const c=a({causticsTexture:null,causticsTextureB:null,color:new i.Color,lightProjMatrix:new i.Matrix4,lightViewMatrix:new i.Matrix4},"varying vec3 vWorldPosition; \n void main() {\n gl_Position = projectionMatrix * viewMatrix * modelMatrix * vec4(position, 1.);\n vec4 worldPosition = modelMatrix * vec4(position, 1.);\n vWorldPosition = worldPosition.xyz;\n }","varying vec3 vWorldPosition;\n uniform vec3 color;\n uniform sampler2D causticsTexture; \n uniform sampler2D causticsTextureB; \n uniform mat4 lightProjMatrix;\n uniform mat4 lightViewMatrix;\n void main() {\n // Apply caustics \n vec4 lightSpacePos = lightProjMatrix * lightViewMatrix * vec4(vWorldPosition, 1.0);\n lightSpacePos.xyz /= lightSpacePos.w;\n lightSpacePos.xyz = lightSpacePos.xyz * 0.5 + 0.5; \n vec3 front = texture2D(causticsTexture, lightSpacePos.xy).rgb;\n vec3 back = texture2D(causticsTextureB, lightSpacePos.xy).rgb;\n gl_FragColor = vec4((front + back) * color, 1.0);\n #include <tonemapping_fragment>\n #include <encodings_fragment>\n }"),u=a({cameraMatrixWorld:new i.Matrix4,cameraProjectionMatrixInv:new i.Matrix4,normalTexture:null,depthTexture:null,lightDir:new i.Vector3(0,1,0),lightPlaneNormal:new i.Vector3(0,1,0),lightPlaneConstant:0,near:.1,far:100,modelMatrix:new i.Matrix4,worldRadius:1/40,ior:1.1,bounces:0,resolution:1024,size:10,intensity:.5},"\n varying vec2 vUv;\n void main() {\n vUv = uv;\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n }"," \n uniform mat4 cameraMatrixWorld;\n uniform mat4 cameraProjectionMatrixInv;\n uniform vec3 lightDir;\n uniform vec3 lightPlaneNormal;\n uniform float lightPlaneConstant;\n uniform float near;\n uniform float far;\n uniform float time;\n uniform float worldRadius;\n uniform float resolution;\n uniform float size;\n uniform float intensity;\n uniform float ior;\n precision highp isampler2D;\n precision highp usampler2D;\n uniform sampler2D normalTexture;\n uniform sampler2D depthTexture;\n uniform float bounces;\n varying vec2 vUv;\n vec3 WorldPosFromDepth(float depth, vec2 coord) {\n float z = depth * 2.0 - 1.0;\n vec4 clipSpacePosition = vec4(coord * 2.0 - 1.0, z, 1.0);\n vec4 viewSpacePosition = cameraProjectionMatrixInv * clipSpacePosition;\n // Perspective division\n viewSpacePosition /= viewSpacePosition.w;\n vec4 worldSpacePosition = cameraMatrixWorld * viewSpacePosition;\n return worldSpacePosition.xyz;\n } \n float sdPlane( vec3 p, vec3 n, float h ) {\n // n must be normalized\n return dot(p,n) + h;\n }\n float planeIntersect( vec3 ro, vec3 rd, vec4 p ) {\n return -(dot(ro,p.xyz)+p.w)/dot(rd,p.xyz);\n }\n vec3 totalInternalReflection(vec3 ro, vec3 rd, vec3 pos, vec3 normal, float ior, out vec3 rayOrigin, out vec3 rayDirection) {\n rayOrigin = ro;\n rayDirection = rd;\n rayDirection = refract(rayDirection, normal, 1.0 / ior);\n rayOrigin = pos + rayDirection * 0.1;\n return rayDirection;\n }\n void main() {\n // Each sample consists of random offset in the x and y direction\n float caustic = 0.0;\n float causticTexelSize = (1.0 / resolution) * size * 2.0;\n float texelsNeeded = worldRadius / causticTexelSize;\n float sampleRadius = texelsNeeded / resolution;\n float sum = 0.0;\n if (texture2D(depthTexture, vUv).x == 1.0) {\n gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0);\n return;\n }\n vec2 offset1 = vec2(-0.5, -0.5);//vec2(rand() - 0.5, rand() - 0.5);\n vec2 offset2 = vec2(-0.5, 0.5);//vec2(rand() - 0.5, rand() - 0.5);\n vec2 offset3 = vec2(0.5, 0.5);//vec2(rand() - 0.5, rand() - 0.5);\n vec2 offset4 = vec2(0.5, -0.5);//vec2(rand() - 0.5, rand() - 0.5);\n vec2 uv1 = vUv + offset1 * sampleRadius;\n vec2 uv2 = vUv + offset2 * sampleRadius;\n vec2 uv3 = vUv + offset3 * sampleRadius;\n vec2 uv4 = vUv + offset4 * sampleRadius;\n vec3 normal1 = texture2D(normalTexture, uv1, -10.0).rgb * 2.0 - 1.0;\n vec3 normal2 = texture2D(normalTexture, uv2, -10.0).rgb * 2.0 - 1.0;\n vec3 normal3 = texture2D(normalTexture, uv3, -10.0).rgb * 2.0 - 1.0;\n vec3 normal4 = texture2D(normalTexture, uv4, -10.0).rgb * 2.0 - 1.0;\n float depth1 = texture2D(depthTexture, uv1, -10.0).x;\n float depth2 = texture2D(depthTexture, uv2, -10.0).x;\n float depth3 = texture2D(depthTexture, uv3, -10.0).x;\n float depth4 = texture2D(depthTexture, uv4, -10.0).x;\n // Sanity check the depths\n if (depth1 == 1.0 || depth2 == 1.0 || depth3 == 1.0 || depth4 == 1.0) {\n gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0);\n return;\n }\n vec3 pos1 = WorldPosFromDepth(depth1, uv1);\n vec3 pos2 = WorldPosFromDepth(depth2, uv2);\n vec3 pos3 = WorldPosFromDepth(depth3, uv3);\n vec3 pos4 = WorldPosFromDepth(depth4, uv4);\n vec3 originPos1 = WorldPosFromDepth(0.0, uv1);\n vec3 originPos2 = WorldPosFromDepth(0.0, uv2);\n vec3 originPos3 = WorldPosFromDepth(0.0, uv3);\n vec3 originPos4 = WorldPosFromDepth(0.0, uv4);\n vec3 endPos1, endPos2, endPos3, endPos4;\n vec3 endDir1, endDir2, endDir3, endDir4;\n totalInternalReflection(originPos1, lightDir, pos1, normal1, ior, endPos1, endDir1);\n totalInternalReflection(originPos2, lightDir, pos2, normal2, ior, endPos2, endDir2);\n totalInternalReflection(originPos3, lightDir, pos3, normal3, ior, endPos3, endDir3);\n totalInternalReflection(originPos4, lightDir, pos4, normal4, ior, endPos4, endDir4);\n float lightPosArea = length(cross(originPos2 - originPos1, originPos3 - originPos1)) + length(cross(originPos3 - originPos1, originPos4 - originPos1));\n float t1 = planeIntersect(endPos1, endDir1, vec4(lightPlaneNormal, lightPlaneConstant));\n float t2 = planeIntersect(endPos2, endDir2, vec4(lightPlaneNormal, lightPlaneConstant));\n float t3 = planeIntersect(endPos3, endDir3, vec4(lightPlaneNormal, lightPlaneConstant));\n float t4 = planeIntersect(endPos4, endDir4, vec4(lightPlaneNormal, lightPlaneConstant));\n vec3 finalPos1 = endPos1 + endDir1 * t1;\n vec3 finalPos2 = endPos2 + endDir2 * t2;\n vec3 finalPos3 = endPos3 + endDir3 * t3;\n vec3 finalPos4 = endPos4 + endDir4 * t4;\n float finalArea = length(cross(finalPos2 - finalPos1, finalPos3 - finalPos1)) + length(cross(finalPos3 - finalPos1, finalPos4 - finalPos1));\n caustic += intensity * (lightPosArea / finalArea);\n // Calculate the area of the triangle in light spaces\n gl_FragColor = vec4(vec3(max(caustic, 0.0)), 1.0);\n }"),m={depth:!0,minFilter:i.LinearFilter,magFilter:i.LinearFilter,type:i.UnsignedByteType},d={minFilter:i.LinearMipmapLinearFilter,magFilter:i.LinearFilter,type:i.FloatType,generateMipmaps:!0},f=a({},"void main() { }","void main() { gl_FragColor = vec4(0.0, 0.0, 0.0, 0.0); discard; }");const v=a({color:new i.Color(0),blend:2,alphaTest:.75,opacity:0,map:null},"varying vec2 vUv;\n void main() {\n gl_Position = projectionMatrix * viewMatrix * modelMatrix * vec4(position, 1.);\n vUv = uv;\n }","varying vec2 vUv;\n uniform sampler2D map;\n uniform vec3 color;\n uniform float opacity;\n uniform float alphaTest;\n uniform float blend;\n void main() {\n vec4 sampledDiffuseColor = texture2D(map, vUv);\n gl_FragColor = vec4(color * sampledDiffuseColor.r * blend, max(0.0, (1.0 - (sampledDiffuseColor.r + sampledDiffuseColor.g + sampledDiffuseColor.b) / alphaTest)) * opacity);\n #include <tonemapping_fragment>\n #include <encodings_fragment>\n }");const h=a({cellSize:.5,sectionSize:1,fadeDistance:100,fadeStrength:1,cellThickness:.5,sectionThickness:1,cellColor:new i.Color,sectionColor:new i.Color,infiniteGrid:!1,followCamera:!1,worldCamProjPosition:new i.Vector3,worldPlanePosition:new i.Vector3},"\n varying vec3 localPosition;\n varying vec4 worldPosition;\n \n uniform vec3 worldCamProjPosition;\n uniform vec3 worldPlanePosition;\n uniform float fadeDistance;\n uniform bool infiniteGrid;\n uniform bool followCamera;\n \n void main() {\n localPosition = position.xzy;\n if (infiniteGrid) localPosition *= 1.0 + fadeDistance;\n \n worldPosition = modelMatrix * vec4(localPosition, 1.0);\n if (followCamera) {\n worldPosition.xyz += (worldCamProjPosition - worldPlanePosition);\n localPosition = (inverse(modelMatrix) * worldPosition).xyz;\n }\n \n gl_Position = projectionMatrix * viewMatrix * worldPosition;\n }\n ",`\n varying vec3 localPosition;\n varying vec4 worldPosition;\n \n uniform vec3 worldCamProjPosition;\n uniform float cellSize;\n uniform float sectionSize;\n uniform vec3 cellColor;\n uniform vec3 sectionColor;\n uniform float fadeDistance;\n uniform float fadeStrength;\n uniform float cellThickness;\n uniform float sectionThickness;\n \n float getGrid(float size, float thickness) {\n vec2 r = localPosition.xz / size;\n vec2 grid = abs(fract(r - 0.5) - 0.5) / fwidth(r);\n float line = min(grid.x, grid.y) + 1.0 - thickness;\n return 1.0 - min(line, 1.0);\n }\n \n void main() {\n float g1 = getGrid(cellSize, cellThickness);\n float g2 = getGrid(sectionSize, sectionThickness);\n \n float dist = distance(worldCamProjPosition, worldPosition.xyz);\n float d = 1.0 - min(dist / fadeDistance, 1.0);\n vec3 color = mix(cellColor, sectionColor, min(1.0, sectionThickness * g2));\n \n gl_FragColor = vec4(color, (g1 + g2) * pow(d, fadeStrength));\n gl_FragColor.a = mix(0.75 * gl_FragColor.a, gl_FragColor.a, g2);\n if (gl_FragColor.a <= 0.0) discard;\n \n #include <tonemapping_fragment>\n #include <${parseInt(i.REVISION.replace(/\D+/g,""))>=154?"colorspace_fragment":"encodings_fragment"}>\n }\n `),p=a({color:new i.Color("black"),opacity:1,thickness:.05},"\n #include <common>\n #include <morphtarget_pars_vertex>\n #include <skinning_pars_vertex>\n uniform float thickness;\n void main() {\n #if defined (USE_SKINNING)\n\t #include <beginnormal_vertex>\n #include <morphnormal_vertex>\n #include <skinbase_vertex>\n #include <skinnormal_vertex>\n #include <defaultnormal_vertex>\n #endif\n #include <begin_vertex>\n\t #include <morphtarget_vertex>\n\t #include <skinning_vertex>\n #include <project_vertex>\n vec4 transformedNormal = vec4(normal, 0.0);\n vec4 transformedPosition = vec4(transformed, 1.0);\n #ifdef USE_INSTANCING\n transformedNormal = instanceMatrix * transformedNormal;\n transformedPosition = instanceMatrix * transformedPosition;\n #endif\n vec3 newPosition = transformedPosition.xyz + transformedNormal.xyz * thickness;\n gl_Position = projectionMatrix * modelViewMatrix * vec4(newPosition, 1.0); \n }",`\n uniform vec3 color;\n uniform float opacity;\n void main(){\n gl_FragColor = vec4(color, opacity);\n #include <tonemapping_fragment>\n #include <${parseInt(i.REVISION.replace(/\D+/g,""))>=154?"colorspace_fragment":"encodings_fragment"}>\n }`);class g extends i.MeshPhysicalMaterial{constructor({samples:e=6,transmissionSampler:n=!1,chromaticAberration:t=.05,transmission:r=0,_transmission:o=1,transmissionMap:a=null,roughness:s=0,thickness:l=0,thicknessMap:c=null,attenuationDistance:u=1/0,attenuationColor:m=new i.Color("white"),anisotropicBlur:d=.1,time:f=0,distortion:v=0,distortionScale:h=.5,temporalDistortion:p=0,buffer:g=null}={}){super(),this.uniforms={chromaticAberration:{value:t},transmission:{value:r},_transmission:{value:o},transmissionMap:{value:a},roughness:{value:s},thickness:{value:l},thicknessMap:{value:c},attenuationDistance:{value:u},attenuationColor:{value:m},anisotropicBlur:{value:d},time:{value:f},distortion:{value:v},distortionScale:{value:h},temporalDistortion:{value:p},buffer:{value:g}},this.onBeforeCompile=t=>{t.uniforms={...t.uniforms,...this.uniforms},n?t.defines.USE_SAMPLER="":t.defines.USE_TRANSMISSION="",t.fragmentShader="\n uniform float chromaticAberration; \n uniform float anisotropicBlur; \n uniform float time;\n uniform float distortion;\n uniform float distortionScale;\n uniform float temporalDistortion;\n uniform sampler2D buffer;\n\n vec3 random3(vec3 c) {\n float j = 4096.0*sin(dot(c,vec3(17.0, 59.4, 15.0)));\n vec3 r;\n r.z = fract(512.0*j);\n j *= .125;\n r.x = fract(512.0*j);\n j *= .125;\n r.y = fract(512.0*j);\n return r-0.5;\n }\n\n float seed = 0.0;\n uint hash( uint x ) {\n x += ( x << 10u );\n x ^= ( x >> 6u );\n x += ( x << 3u );\n x ^= ( x >> 11u );\n x += ( x << 15u );\n return x;\n }\n\n // Compound versions of the hashing algorithm I whipped together.\n uint hash( uvec2 v ) { return hash( v.x ^ hash(v.y) ); }\n uint hash( uvec3 v ) { return hash( v.x ^ hash(v.y) ^ hash(v.z) ); }\n uint hash( uvec4 v ) { return hash( v.x ^ hash(v.y) ^ hash(v.z) ^ hash(v.w) ); }\n\n // Construct a float with half-open range [0:1] using low 23 bits.\n // All zeroes yields 0.0, all ones yields the next smallest representable value below 1.0.\n float floatConstruct( uint m ) {\n const uint ieeeMantissa = 0x007FFFFFu; // binary32 mantissa bitmask\n const uint ieeeOne = 0x3F800000u; // 1.0 in IEEE binary32\n m &= ieeeMantissa; // Keep only mantissa bits (fractional part)\n m |= ieeeOne; // Add fractional part to 1.0\n float f = uintBitsToFloat( m ); // Range [1:2]\n return f - 1.0; // Range [0:1]\n }\n\n // Pseudo-random value in half-open range [0:1].\n float random( float x ) { return floatConstruct(hash(floatBitsToUint(x))); }\n float random( vec2 v ) { return floatConstruct(hash(floatBitsToUint(v))); }\n float random( vec3 v ) { return floatConstruct(hash(floatBitsToUint(v))); }\n float random( vec4 v ) { return floatConstruct(hash(floatBitsToUint(v))); }\n\n float rand() {\n float result = random(vec3(gl_FragCoord.xy, seed));\n seed += 1.0;\n return result;\n }\n\n const float F3 = 0.3333333;\n const float G3 = 0.1666667;\n\n float snoise(vec3 p) {\n vec3 s = floor(p + dot(p, vec3(F3)));\n vec3 x = p - s + dot(s, vec3(G3));\n vec3 e = step(vec3(0.0), x - x.yzx);\n vec3 i1 = e*(1.0 - e.zxy);\n vec3 i2 = 1.0 - e.zxy*(1.0 - e);\n vec3 x1 = x - i1 + G3;\n vec3 x2 = x - i2 + 2.0*G3;\n vec3 x3 = x - 1.0 + 3.0*G3;\n vec4 w, d;\n w.x = dot(x, x);\n w.y = dot(x1, x1);\n w.z = dot(x2, x2);\n w.w = dot(x3, x3);\n w = max(0.6 - w, 0.0);\n d.x = dot(random3(s), x);\n d.y = dot(random3(s + i1), x1);\n d.z = dot(random3(s + i2), x2);\n d.w = dot(random3(s + 1.0), x3);\n w *= w;\n w *= w;\n d *= w;\n return dot(d, vec4(52.0));\n }\n\n float snoiseFractal(vec3 m) {\n return 0.5333333* snoise(m)\n +0.2666667* snoise(2.0*m)\n +0.1333333* snoise(4.0*m)\n +0.0666667* snoise(8.0*m);\n }\n"+t.fragmentShader,t.fragmentShader=t.fragmentShader.replace("#include <transmission_pars_fragment>","\n #ifdef USE_TRANSMISSION\n // Transmission code is based on glTF-Sampler-Viewer\n // https://github.com/KhronosGroup/glTF-Sample-Viewer\n uniform float _transmission;\n uniform float thickness;\n uniform float attenuationDistance;\n uniform vec3 attenuationColor;\n #ifdef USE_TRANSMISSIONMAP\n uniform sampler2D transmissionMap;\n #endif\n #ifdef USE_THICKNESSMAP\n uniform sampler2D thicknessMap;\n #endif\n uniform vec2 transmissionSamplerSize;\n uniform sampler2D transmissionSamplerMap;\n uniform mat4 modelMatrix;\n uniform mat4 projectionMatrix;\n varying vec3 vWorldPosition;\n vec3 getVolumeTransmissionRay( const in vec3 n, const in vec3 v, const in float thickness, const in float ior, const in mat4 modelMatrix ) {\n // Direction of refracted light.\n vec3 refractionVector = refract( - v, normalize( n ), 1.0 / ior );\n // Compute rotation-independant scaling of the model matrix.\n vec3 modelScale;\n modelScale.x = length( vec3( modelMatrix[ 0 ].xyz ) );\n modelScale.y = length( vec3( modelMatrix[ 1 ].xyz ) );\n modelScale.z = length( vec3( modelMatrix[ 2 ].xyz ) );\n // The thickness is specified in local space.\n return normalize( refractionVector ) * thickness * modelScale;\n }\n float applyIorToRoughness( const in float roughness, const in float ior ) {\n // Scale roughness with IOR so that an IOR of 1.0 results in no microfacet refraction and\n // an IOR of 1.5 results in the default amount of microfacet refraction.\n return roughness * clamp( ior * 2.0 - 2.0, 0.0, 1.0 );\n }\n vec4 getTransmissionSample( const in vec2 fragCoord, const in float roughness, const in float ior ) {\n float framebufferLod = log2( transmissionSamplerSize.x ) * applyIorToRoughness( roughness, ior ); \n #ifdef USE_SAMPLER\n #ifdef texture2DLodEXT\n return texture2DLodEXT(transmissionSamplerMap, fragCoord.xy, framebufferLod);\n #else\n return texture2D(transmissionSamplerMap, fragCoord.xy, framebufferLod);\n #endif\n #else\n return texture2D(buffer, fragCoord.xy);\n #endif\n }\n vec3 applyVolumeAttenuation( const in vec3 radiance, const in float transmissionDistance, const in vec3 attenuationColor, const in float attenuationDistance ) {\n if ( isinf( attenuationDistance ) ) {\n // Attenuation distance is +∞, i.e. the transmitted color is not attenuated at all.\n return radiance;\n } else {\n // Compute light attenuation using Beer's law.\n vec3 attenuationCoefficient = -log( attenuationColor ) / attenuationDistance;\n vec3 transmittance = exp( - attenuationCoefficient * transmissionDistance ); // Beer's law\n return transmittance * radiance;\n }\n }\n vec4 getIBLVolumeRefraction( const in vec3 n, const in vec3 v, const in float roughness, const in vec3 diffuseColor,\n const in vec3 specularColor, const in float specularF90, const in vec3 position, const in mat4 modelMatrix,\n const in mat4 viewMatrix, const in mat4 projMatrix, const in float ior, const in float thickness,\n const in vec3 attenuationColor, const in float attenuationDistance ) {\n vec3 transmissionRay = getVolumeTransmissionRay( n, v, thickness, ior, modelMatrix );\n vec3 refractedRayExit = position + transmissionRay;\n // Project refracted vector on the framebuffer, while mapping to normalized device coordinates.\n vec4 ndcPos = projMatrix * viewMatrix * vec4( refractedRayExit, 1.0 );\n vec2 refractionCoords = ndcPos.xy / ndcPos.w;\n refractionCoords += 1.0;\n refractionCoords /= 2.0;\n // Sample framebuffer to get pixel the refracted ray hits.\n vec4 transmittedLight = getTransmissionSample( refractionCoords, roughness, ior );\n vec3 attenuatedColor = applyVolumeAttenuation( transmittedLight.rgb, length( transmissionRay ), attenuationColor, attenuationDistance );\n // Get the specular component.\n vec3 F = EnvironmentBRDF( n, v, specularColor, specularF90, roughness );\n return vec4( ( 1.0 - F ) * attenuatedColor * diffuseColor, transmittedLight.a );\n }\n #endif\n"),t.fragmentShader=t.fragmentShader.replace("#include <transmission_fragment>",` \n // Improve the refraction to use the world pos\n material.transmission = _transmission;\n material.transmissionAlpha = 1.0;\n material.thickness = thickness;\n material.attenuationDistance = attenuationDistance;\n material.attenuationColor = attenuationColor;\n #ifdef USE_TRANSMISSIONMAP\n material.transmission *= texture2D( transmissionMap, vUv ).r;\n #endif\n #ifdef USE_THICKNESSMAP\n material.thickness *= texture2D( thicknessMap, vUv ).g;\n #endif\n \n vec3 pos = vWorldPosition;\n vec3 v = normalize( cameraPosition - pos );\n vec3 n = inverseTransformDirection( normal, viewMatrix );\n vec3 transmission = vec3(0.0);\n float transmissionR, transmissionB, transmissionG;\n float randomCoords = rand();\n float thickness_smear = thickness * max(pow(roughnessFactor, 0.33), anisotropicBlur);\n vec3 distortionNormal = vec3(0.0);\n vec3 temporalOffset = vec3(time, -time, -time) * temporalDistortion;\n if (distortion > 0.0) {\n distortionNormal = distortion * vec3(snoiseFractal(vec3((pos * distortionScale + temporalOffset))), snoiseFractal(vec3(pos.zxy * distortionScale - temporalOffset)), snoiseFractal(vec3(pos.yxz * distortionScale + temporalOffset)));\n }\n for (float i = 0.0; i < ${e}.0; i ++) {\n vec3 sampleNorm = normalize(n + roughnessFactor * roughnessFactor * 2.0 * normalize(vec3(rand() - 0.5, rand() - 0.5, rand() - 0.5)) * pow(rand(), 0.33) + distortionNormal);\n transmissionR = getIBLVolumeRefraction(\n sampleNorm, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90,\n pos, modelMatrix, viewMatrix, projectionMatrix, material.ior, material.thickness + thickness_smear * (i + randomCoords) / float(${e}),\n material.attenuationColor, material.attenuationDistance\n ).r;\n transmissionG = getIBLVolumeRefraction(\n sampleNorm, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90,\n pos, modelMatrix, viewMatrix, projectionMatrix, material.ior * (1.0 + chromaticAberration * (i + randomCoords) / float(${e})) , material.thickness + thickness_smear * (i + randomCoords) / float(${e}),\n material.attenuationColor, material.attenuationDistance\n ).g;\n transmissionB = getIBLVolumeRefraction(\n sampleNorm, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90,\n pos, modelMatrix, viewMatrix, projectionMatrix, material.ior * (1.0 + 2.0 * chromaticAberration * (i + randomCoords) / float(${e})), material.thickness + thickness_smear * (i + randomCoords) / float(${e}),\n material.attenuationColor, material.attenuationDistance\n ).b;\n transmission.r += transmissionR;\n transmission.g += transmissionG;\n transmission.b += transmissionB;\n }\n transmission /= ${e}.0;\n totalDiffuse = mix( totalDiffuse, transmission.rgb, material.transmission );\n`)},Object.keys(this.uniforms).forEach((e=>Object.defineProperty(this,e,{get:()=>this.uniforms[e].value,set:n=>this.uniforms[e].value=n})))}}const x=a({depth:null,opacity:1,attenuation:2.5,anglePower:12,spotPosition:new e.Vector3(0,0,0),lightColor:new e.Color("white"),cameraNear:0,cameraFar:1,resolution:new e.Vector2(0,0),transparent:!0,depthWrite:!1},"\n varying vec3 vNormal;\n varying vec3 vWorldPosition;\n varying float vViewZ;\n varying float vIntensity;\n uniform vec3 spotPosition;\n uniform float attenuation;\n\n void main() {\n // compute intensity\n vNormal = normalize( normalMatrix * normal );\n vec4 worldPosition\t= modelMatrix * vec4( position, 1.0 );\n vWorldPosition = worldPosition.xyz;\n vec4 viewPosition = viewMatrix * worldPosition;\n vViewZ = viewPosition.z;\n float intensity\t= distance(worldPosition.xyz, spotPosition) / attenuation;\n intensity\t= 1.0 - clamp(intensity, 0.0, 1.0);\n vIntensity = intensity;\n // set gl_Position\n gl_Position\t= projectionMatrix * viewPosition;\n\n }","\n #include <packing>\n\n varying vec3 vNormal;\n varying vec3 vWorldPosition;\n uniform vec3 lightColor;\n uniform vec3 spotPosition;\n uniform float attenuation;\n uniform float anglePower;\n uniform sampler2D depth;\n uniform vec2 resolution;\n uniform float cameraNear;\n uniform float cameraFar;\n varying float vViewZ;\n varying float vIntensity;\n uniform float opacity;\n\n float readDepth( sampler2D depthSampler, vec2 coord ) {\n float fragCoordZ = texture2D( depthSampler, coord ).x;\n float viewZ = perspectiveDepthToViewZ(fragCoordZ, cameraNear, cameraFar);\n return viewZ;\n }\n\n void main() {\n float d = 1.0;\n bool isSoft = resolution[0] > 0.0 && resolution[1] > 0.0;\n if (isSoft) {\n vec2 sUv = gl_FragCoord.xy / resolution;\n d = readDepth(depth, sUv);\n }\n float intensity = vIntensity;\n vec3 normal\t= vec3(vNormal.x, vNormal.y, abs(vNormal.z));\n float angleIntensity\t= pow( dot(normal, vec3(0.0, 0.0, 1.0)), anglePower );\n intensity\t*= angleIntensity;\n // fades when z is close to sampled depth, meaning the cone is intersecting existing geometry\n if (isSoft) {\n intensity\t*= smoothstep(0., 1., vViewZ - d);\n }\n gl_FragColor = vec4(lightColor, intensity * opacity);\n\n #include <tonemapping_fragment>\n #include <encodings_fragment>\n }");class w extends e.ShaderMaterial{constructor(n=new e.Vector2){super({uniforms:{inputBuffer:new e.Uniform(null),depthBuffer:new e.Uniform(null),resolution:new e.Uniform(new e.Vector2),texelSize:new e.Uniform(new e.Vector2),halfTexelSize:new e.Uniform(new e.Vector2),kernel:new e.Uniform(0),scale:new e.Uniform(1),cameraNear:new e.Uniform(0),cameraFar:new e.Uniform(1),minDepthThreshold:new e.Uniform(0),maxDepthThreshold:new e.Uniform(1),depthScale:new e.Uniform(0),depthToBlurRatioBias:new e.Uniform(.25)},fragmentShader:"#include <common>\n #include <dithering_pars_fragment> \n uniform sampler2D inputBuffer;\n uniform sampler2D depthBuffer;\n uniform float cameraNear;\n uniform float cameraFar;\n uniform float minDepthThreshold;\n uniform float maxDepthThreshold;\n uniform float depthScale;\n uniform float depthToBlurRatioBias;\n varying vec2 vUv;\n varying vec2 vUv0;\n varying vec2 vUv1;\n varying vec2 vUv2;\n varying vec2 vUv3;\n\n void main() {\n float depthFactor = 0.0;\n \n #ifdef USE_DEPTH\n vec4 depth = texture2D(depthBuffer, vUv);\n depthFactor = smoothstep(minDepthThreshold, maxDepthThreshold, 1.0-(depth.r * depth.a));\n depthFactor *= depthScale;\n depthFactor = max(0.0, min(1.0, depthFactor + 0.25));\n #endif\n \n vec4 sum = texture2D(inputBuffer, mix(vUv0, vUv, depthFactor));\n sum += texture2D(inputBuffer, mix(vUv1, vUv, depthFactor));\n sum += texture2D(inputBuffer, mix(vUv2, vUv, depthFactor));\n sum += texture2D(inputBuffer, mix(vUv3, vUv, depthFactor));\n gl_FragColor = sum * 0.25 ;\n\n #include <dithering_fragment>\n #include <tonemapping_fragment>\n #include <encodings_fragment>\n }",vertexShader:"uniform vec2 texelSize;\n uniform vec2 halfTexelSize;\n uniform float kernel;\n uniform float scale;\n varying vec2 vUv;\n varying vec2 vUv0;\n varying vec2 vUv1;\n varying vec2 vUv2;\n varying vec2 vUv3;\n\n void main() {\n vec2 uv = position.xy * 0.5 + 0.5;\n vUv = uv;\n\n vec2 dUv = (texelSize * vec2(kernel) + halfTexelSize) * scale;\n vUv0 = vec2(uv.x - dUv.x, uv.y + dUv.y);\n vUv1 = vec2(uv.x + dUv.x, uv.y + dUv.y);\n vUv2 = vec2(uv.x + dUv.x, uv.y - dUv.y);\n vUv3 = vec2(uv.x - dUv.x, uv.y - dUv.y);\n\n gl_Position = vec4(position.xy, 1.0, 1.0);\n }",blending:e.NoBlending,depthWrite:!1,depthTest:!1}),this.toneMapped=!1,this.setTexelSize(n.x,n.y),this.kernel=new Float32Array([0,1,2,2,3])}setTexelSize(e,n){this.uniforms.texelSize.value.set(e,n),this.uniforms.halfTexelSize.value.set(e,n).multiplyScalar(.5)}setResolution(e){this.uniforms.resolution.value.copy(e)}}class _ extends e.MeshStandardMaterial{constructor(e={}){super(),this._tDepth={value:null},this._distortionMap={value:null},this._tDiffuse={value:null},this._tDiffuseBlur={value:null},this._textureMatrix={value:null},this._hasBlur={value:!1},this._mirror={value:0},this._mixBlur={value:0},this._blurStrength={value:.5},this._minDepthThreshold={value:.9},this._maxDepthThreshold={value:1},this._depthScale={value:0},this._depthToBlurRatioBias={value:.25},this._distortion={value:1},this._mixContrast={value:1},this._tDepth={value:null},this._distortionMap={value:null},this._tDiffuse={value:null},this._tDiffuseBlur={value:null},this._textureMatrix={value:null},this._hasBlur={value:!1},this._mirror={value:0},this._mixBlur={value:0},this._blurStrength={value:.5},this._minDepthThreshold={value:.9},this._maxDepthThreshold={value:1},this._depthScale={value:0},this._depthToBlurRatioBias={value:.25},this._distortion={value:1},this._mixContrast={value:1},this.setValues(e)}onBeforeCompile(e){var n;null!=(n=e.defines)&&n.USE_UV||(e.defines.USE_UV=""),e.uniforms.hasBlur=this._hasBlur,e.uniforms.tDiffuse=this._tDiffuse,e.uniforms.tDepth=this._tDepth,e.uniforms.distortionMap=this._distortionMap,e.uniforms.tDiffuseBlur=this._tDiffuseBlur,e.uniforms.textureMatrix=this._textureMatrix,e.uniforms.mirror=this._mirror,e.uniforms.mixBlur=this._mixBlur,e.uniforms.mixStrength=this._blurStrength,e.uniforms.minDepthThreshold=this._minDepthThreshold,e.uniforms.maxDepthThreshold=this._maxDepthThreshold,e.uniforms.depthScale=this._depthScale,e.uniforms.depthToBlurRatioBias=this._depthToBlurRatioBias,e.uniforms.distortion=this._distortion,e.uniforms.mixContrast=this._mixContrast,e.vertexShader=`\n uniform mat4 textureMatrix;\n varying vec4 my_vUv;\n ${e.vertexShader}`,e.vertexShader=e.vertexShader.replace("#include <project_vertex>","#include <project_vertex>\n my_vUv = textureMatrix * vec4( position, 1.0 );\n gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );"),e.fragmentShader=`\n uniform sampler2D tDiffuse;\n uniform sampler2D tDiffuseBlur;\n uniform sampler2D tDepth;\n uniform sampler2D distortionMap;\n uniform float distortion;\n uniform float cameraNear;\n\t\t\t uniform float cameraFar;\n uniform bool hasBlur;\n uniform float mixBlur;\n uniform float mirror;\n uniform float mixStrength;\n uniform float minDepthThreshold;\n uniform float maxDepthThreshold;\n uniform float mixContrast;\n uniform float depthScale;\n uniform float depthToBlurRatioBias;\n varying vec4 my_vUv;\n ${e.fragmentShader}`,e.fragmentShader=e.fragmentShader.replace("#include <emissivemap_fragment>","#include <emissivemap_fragment>\n\n float distortionFactor = 0.0;\n #ifdef USE_DISTORTION\n distortionFactor = texture2D(distortionMap, vUv).r * distortion;\n #endif\n\n vec4 new_vUv = my_vUv;\n new_vUv.x += distortionFactor;\n new_vUv.y += distortionFactor;\n\n vec4 base = texture2DProj(tDiffuse, new_vUv);\n vec4 blur = texture2DProj(tDiffuseBlur, new_vUv);\n\n vec4 merge = base;\n\n #ifdef USE_NORMALMAP\n vec2 normal_uv = vec2(0.0);\n vec4 normalColor = texture2D(normalMap, vUv * normalScale);\n vec3 my_normal = normalize( vec3( normalColor.r * 2.0 - 1.0, normalColor.b, normalColor.g * 2.0 - 1.0 ) );\n vec3 coord = new_vUv.xyz / new_vUv.w;\n normal_uv = coord.xy + coord.z * my_normal.xz * 0.05;\n vec4 base_normal = texture2D(tDiffuse, normal_uv);\n vec4 blur_normal = texture2D(tDiffuseBlur, normal_uv);\n merge = base_normal;\n blur = blur_normal;\n #endif\n\n float depthFactor = 0.0001;\n float blurFactor = 0.0;\n\n #ifdef USE_DEPTH\n vec4 depth = texture2DProj(tDepth, new_vUv);\n depthFactor = smoothstep(minDepthThreshold, maxDepthThreshold, 1.0-(depth.r * depth.a));\n depthFactor *= depthScale;\n depthFactor = max(0.0001, min(1.0, depthFactor));\n\n #ifdef USE_BLUR\n blur = blur * min(1.0, depthFactor + depthToBlurRatioBias);\n merge = merge * min(1.0, depthFactor + 0.5);\n #else\n merge = merge * depthFactor;\n #endif\n\n #endif\n\n float reflectorRoughnessFactor = roughness;\n #ifdef USE_ROUGHNESSMAP\n vec4 reflectorTexelRoughness = texture2D( roughnessMap, vUv );\n reflectorRoughnessFactor *= reflectorTexelRoughness.g;\n #endif\n\n #ifdef USE_BLUR\n blurFactor = min(1.0, mixBlur * reflectorRoughnessFactor);\n merge = mix(merge, blur, blurFactor);\n #endif\n\n vec4 newMerge = vec4(0.0, 0.0, 0.0, 1.0);\n newMerge.r = (merge.r - 0.5) * mixContrast + 0.5;\n newMerge.g = (merge.g - 0.5) * mixContrast + 0.5;\n newMerge.b = (merge.b - 0.5) * mixContrast + 0.5;\n\n diffuseColor.rgb = diffuseColor.rgb * ((1.0 - min(1.0, mirror)) + newMerge.rgb * mixStrength);\n ")}get tDiffuse(){return this._tDiffuse.value}set tDiffuse(e){this._tDiffuse.value=e}get tDepth(){return this._tDepth.value}set tDepth(e){this._tDepth.value=e}get distortionMap(){return this._distortionMap.value}set distortionMap(e){this._distortionMap.value=e}get tDiffuseBlur(){return this._tDiffuseBlur.value}set tDiffuseBlur(e){this._tDiffuseBlur.value=e}get textureMatrix(){return this._textureMatrix.value}set textureMatrix(e){this._textureMatrix.value=e}get hasBlur(){return this._hasBlur.value}set hasBlur(e){this._hasBlur.value=e}get mirror(){return this._mirror.value}set mirror(e){this._mirror.value=e}get mixBlur(){return this._mixBlur.value}set mixBlur(e){this._mixBlur.value=e}get mixStrength(){return this._blurStrength.value}set mixStrength(e){this._blurStrength.value=e}get minDepthThreshold(){return this._minDepthThreshold.value}set minDepthThreshold(e){this._minDepthThreshold.value=e}get maxDepthThreshold(){return this._maxDepthThreshold.value}set maxDepthThreshold(e){this._maxDepthThreshold.value=e}get depthScale(){return this._depthScale.value}set depthScale(e){this._depthScale.value=e}get depthToBlurRatioBias(){return this._depthToBlurRatioBias.value}set depthToBlurRatioBias(e){this._depthToBlurRatioBias.value=e}get distortion(){return this._distortion.value}set distortion(e){this._distortion.value=e}get mixContrast(){return this._mixContrast.value}set mixContrast(e){this._mixContrast.value=e}}exports.BlurPass=class{constructor({gl:n,resolution:t,width:r=500,height:o=500,minDepthThreshold:i=0,maxDepthThreshold:a=1,depthScale:s=0,depthToBlurRatioBias:l=.25}){this.renderToScreen=!1,this.renderTargetA=new e.WebGLRenderTarget(t,t,{minFilter:e.LinearFilter,magFilter:e.LinearFilter,stencilBuffer:!1,depthBuffer:!1,type:e.HalfFloatType}),this.renderTargetB=this.renderTargetA.clone(),this.convolutionMaterial=new w,this.convolutionMaterial.setTexelSize(1/r,1/o),this.convolutionMaterial.setResolution(new e.Vector2(r,o)),this.scene=new e.Scene,this.camera=new e.Camera,this.convolutionMaterial.uniforms.minDepthThreshold.value=i,this.convolutionMaterial.uniforms.maxDepthThreshold.value=a,this.convolutionMaterial.uniforms.depthScale.value=s,this.convolutionMaterial.uniforms.depthToBlurRatioBias.value=l,this.convolutionMaterial.defines.USE_DEPTH=s>0;const c=new Float32Array([-1,-1,0,3,-1,0,-1,3,0]),u=new Float32Array([0,0,2,0,0,2]),m=new e.BufferGeometry;m.setAttribute("position",new e.BufferAttribute(c,3)),m.setAttribute("uv",new e.BufferAttribute(u,2)),this.screen=new e.Mesh(m,this.convolutionMaterial),this.screen.frustumCulled=!1,this.scene.add(this.screen)}render(e,n,t){const r=this.scene,o=this.camera,i=this.renderTargetA,a=this.renderTargetB,s=this.convolutionMaterial,l=s.uniforms;l.depthBuffer.value=n.depthTexture;const c=s.kernel;let u,m,d,f=n;for(m=0,d=c.length-1;m<d;++m)u=0==(1&m)?i:a,l.kernel.value=c[m],l.inputBuffer.value=f.texture,e.setRenderTarget(u),e.render(r,o),f=u;l.kernel.value=c[m],l.inputBuffer.value=f.texture,e.setRenderTarget(this.renderToScreen?null:t),e.render(r,o)}},exports.Caustics=(e,{frames:t=1,causticsOnly:r=!1,ior:o=1.1,backside:a=!1,backsideIOR:f=1.1,worldRadius:v=.3125,color:h=new i.Color("white"),intensity:p=.05,resolution:g=2024,lightSource:x=new i.Vector3(1,1,1),near:w=.1,far:_=0}={})=>{const S={frames:t,ior:o,color:h,causticsOnly:r,backside:a,backsideIOR:f,worldRadius:v,intensity:p,resolution:g,lightSource:x,near:w,far:_},y=new i.Group;y.name="caustics_group";const P=y,M=new i.OrthographicCamera,D=new i.Scene;D.name="caustics_scene";const T=e,C=new i.CameraHelper(M);C.name="caustics_helper";const b=S.resolution,B=s(b,b,m),F=s(b,b,m),R=s(b,b,d),U=s(b,b,d),z=l(),k=l(i.BackSide),I=new u,N=new n.FullScreenQuad(I),j=new i.Mesh(new i.PlaneGeometry(1,1),new c({transparent:!0,color:S.color,causticsTexture:R.texture,causticsTextureB:U.texture,blending:i.CustomBlending,blendSrc:i.OneFactor,blendDst:i.SrcAlphaFactor,depthWrite:!1}));j.name="caustics_plane",j.rotation.x=-Math.PI/2,j.renderOrder=2,y.add(D,j),y.updateWorldMatrix(!1,!0);let O=0;const A=new i.Vector3,E=new i.Frustum,V=new i.Matrix4,G=new i.Plane,W=new i.Vector3,L=new i.Vector3,$=new i.Box3,H=new i.Vector3,Z=[],q=[],K=[],X=[],Y=new i.Vector3;for(let e=0;e<8;e++)Z.push(new i.Vector3),q.push(new i.Vector3),K.push(new i.Vector3),X.push(new i.Vector3);return{scene:D,group:y,helper:C,params:S,update:()=>{if(S.frames===1/0||O++<S.frames){var e;(null==(n=x)?void 0:n.isVector3)?W.copy(x).normalize():W.copy(P.worldToLocal(x.getWorldPosition(A)).normalize()),L.copy(W).multiplyScalar(-1),null==(e=D.parent)||e.matrixWorld.identity(),$.setFromObject(D,!0),Z[0].set($.min.x,$.min.y,$.min.z),Z[1].set($.min.x,$.min.y,$.max.z),Z[2].set($.min.x,$.max.y,$.min.z),Z[3].set($.min.x,$.max.y,$.max.z),Z[4].set($.max.x,$.min.y,$.min.z),Z[5].set($.max.x,$.min.y,$.max.z),Z[6].set($.max.x,$.max.y,$.min.z),Z[7].set($.max.x,$.max.y,$.max.z);for(let e=0;e<8;e++)q[e].copy(Z[e]);$.getCenter(H),Z.map((e=>e.sub(H)));const t=G.set(L,0);Z.map(((e,n)=>t.projectPoint(e,K[n])));const r=K.reduce(((e,n)=>e.add(n)),A.set(0,0,0)).divideScalar(K.length),o=K.map((e=>e.distanceTo(r))).reduce(((e,n)=>Math.max(e,n))),i=Z.map((e=>e.dot(W))).reduce(((e,n)=>Math.max(e,n)));M.position.copy(Y.copy(W).multiplyScalar(i).add(H)),M.lookAt(D.localToWorld(H));const a=V.lookAt(M.position,H,A.set(0,1,0));if(M.left=-o,M.right=o,M.top=o,M.bottom=-o,M.near=S.near,S.far)M.far=S.far;else{const e=A.set(0,o,0).applyMatrix4(a),n=(M.position.y+e.y)/W.y;M.far=n}M.updateProjectionMatrix(),M.updateMatrixWorld();const s=q.map(((e,n)=>e.add(X[n].copy(W).multiplyScalar(-e.y/W.y)))),l=s.reduce(((e,n)=>e.add(n)),A.set(0,0,0)).divideScalar(s.length),c=2*s.map((e=>Math.hypot(e.x-l.x,e.z-l.z))).reduce(((e,n)=>Math.max(e,n)));j.scale.setScalar(c),j.position.copy(l),C.parent&&C.update(),k.viewMatrix.value=z.viewMatrix.value=M.matrixWorldInverse;const u=E.setFromProjectionMatrix(V.multiplyMatrices(M.projectionMatrix,M.matrixWorldInverse)).planes[4];I.cameraMatrixWorld=M.matrixWorld,I.cameraProjectionMatrixInv=M.projectionMatrixInverse,I.lightDir=L,I.lightPlaneNormal=u.normal,I.lightPlaneConstant=u.constant,I.near=M.near,I.far=M.far,I.resolution=S.resolution,I.size=o,I.intensity=S.intensity,I.worldRadius=S.worldRadius,D.visible=!0,T.setRenderTarget(B),T.clear(),D.overrideMaterial=z,T.render(D,M),T.setRenderTarget(F),T.clear(),S.backside&&(D.overrideMaterial=k,T.render(D,M)),D.overrideMaterial=null,I.ior=S.ior,j.material.lightProjMatrix=M.projectionMatrix,j.material.lightViewMatrix=M.matrixWorldInverse,I.normalTexture=B.texture,I.depthTexture=B.depthTexture,T.setRenderTarget(R),T.clear(),N.render(T),I.ior=S.backsideIOR,I.normalTexture=F.texture,I.depthTexture=F.depthTexture,T.setRenderTarget(U),T.clear(),S.backside&&N.render(T),T.setRenderTarget(null),S.causticsOnly&&(D.visible=!1)}var n},normalTarget:B,normalTargetB:F,causticsTarget:R,causticsTargetB:U}},exports.ConvolutionMaterial=w,exports.Grid=({args:e=[1,1],cellColor:n=new i.Color("#000000"),sectionColor:t=new i.Color("#2080ff"),cellSize:r=.5,sectionSize:o=1,followCamera:a=!1,infiniteGrid:s=!1,fadeDistance:l=100,fadeStrength:c=1,cellThickness:u=.5,sectionThickness:m=1,side:d=i.BackSide}={})=>{const f=new h({transparent:!0,side:d,...{cellSize:r,sectionSize:o,cellColor:n,sectionColor:t,cellThickness:u,sectionThickness:m},...{fadeDistance:l,fadeStrength:c,infiniteGrid:s,followCamera:a}}),v=new i.PlaneGeometry(e[0],e[1]),p=new i.Mesh(v,f);p.frustumCulled=!1;const g=new i.Plane,x=new i.Vector3(0,1,0),w=new i.Vector3(0,0,0);return{mesh:p,update:e=>{if(!p.parent)return;g.setFromNormalAndCoplanarPoint(x,w).applyMatrix4(p.matrixWorld);const n=p.material,t=n.uniforms.worldCamProjPosition,r=n.uniforms.worldPlanePosition;g.projectPoint(e.position,t.value),r.value.set(0,0,0).applyMatrix4(p.matrixWorld)}}},exports.MeshDiscardMaterial=f,exports.MeshReflectorMaterial=_,exports.MeshTransmissionMaterial=g,exports.Outlines=function({color:e=new i.Color("black"),opacity:n=1,transparent:o=!1,thickness:a=.05,angle:s=Math.PI}){const l=new i.Group;let c={color:e,opacity:n,transparent:o,thickness:a,angle:s};function u(e){const n=l.parent;if(l.clear(),n&&n.geometry){let r;n.skeleton?(r=new i.SkinnedMesh,r.material=new p({side:i.BackSide}),r.bind(n.skeleton,n.bindMatrix),l.add(r)):n.isInstancedMesh?(r=new i.InstancedMesh(n.geometry,new p({side:i.BackSide}),n.count),r.instanceMatrix=n.instanceMatrix,l.add(r)):(r=new i.Mesh,r.material=new p({side:i.BackSide}),l.add(r)),r.geometry=e?t.toCreasedNormals(n.geometry,e):n.geometry}}function m(e){c={...c,...e};const n=l.children[0];if(n){const{transparent:e,thickness:t,color:o,opacity:i}=c;r.assign(n.material,{transparent:e,thickness:t,color:o,opacity:i})}}return{group:l,updateProps(e){var n;const t=null!==(n=e.angle)&&void 0!==n?n:c.angle;t!==c.angle&&u(t),m(e)},render(){u(c.angle),m(c)}}},exports.ProgressiveLightMap=class{constructor(e,n,t=1024){this.renderer=e,this.res=t,this.scene=n,this.buffer1Active=!1,this.lights=[],this.meshes=[],this.object=null,this.clearColor=new i.Color,this.clearAlpha=0;const r=/(Android|iPad|iPhone|iPod)/g.test(navigator.userAgent)?i.HalfFloatType:i.FloatType;this.progressiveLightMap1=new i.WebGLRenderTarget(this.res,this.res,{type:r}),this.progressiveLightMap2=new i.WebGLRenderTarget(this.res,this.res,{type:r}),this.discardMat=new f,this.targetMat=new i.MeshLambertMaterial({fog:!1}),this.previousShadowMap={value:this.progressiveLightMap1.texture},this.averagingWindow={value:100},this.targetMat.onBeforeCompile=e=>{e.vertexShader="varying vec2 vUv;\n"+e.vertexShader.slice(0,-1)+"vUv = uv; gl_Position = vec4((uv - 0.5) * 2.0, 1.0, 1.0); }";const n=e.fragmentShader.indexOf("void main() {");e.fragmentShader="varying vec2 vUv;\n"+e.fragmentShader.slice(0,n)+"uniform sampler2D previousShadowMap;\n\tuniform float averagingWindow;\n"+e.fragmentShader.slice(n-1,-1)+"\nvec3 texelOld = texture2D(previousShadowMap, vUv).rgb;\n gl_FragColor.rgb = mix(texelOld, gl_FragColor.rgb, 1.0/ averagingWindow);\n }",e.uniforms.previousShadowMap=this.previousShadowMap,e.uniforms.averagingWindow=this.averagingWindow}}clear(){this.renderer.getClearColor(this.clearColor),this.clearAlpha=this.renderer.getClearAlpha(),this.renderer.setClearColor("black",1),this.renderer.setRenderTarget(this.progressiveLightMap1),this.renderer.clear(),this.renderer.setRenderTarget(this.progressiveLightMap2),this.renderer.clear(),this.renderer.setRenderTarget(null),this.renderer.setClearColor(this.clearColor,this.clearAlpha),this.lights=[],this.meshes=[],this.scene.traverse((e=>{!function(e){return!!e.geometry}(e)?function(e){return e.isLight}(e)&&this.lights.push({object:e,intensity:e.intensity}):this.meshes.push({object:e,material:e.material})}))}prepare(){this.lights.forEach((e=>e.object.intensity=0)),this.meshes.forEach((e=>e.object.material=this.discardMat))}finish(){this.lights.forEach((e=>e.object.intensity=e.intensity)),this.meshes.forEach((e=>e.object.material=e.material))}configure(e){this.object=e}update(e,n=100){if(!this.object)return;this.averagingWindow.value=n,this.object.material=this.targetMat;const t=this.buffer1Active?this.progressiveLightMap1:this.progressiveLightMap2,r=this.buffer1Active?this.progressiveLightMap2:this.progressiveLightMap1,o=this.scene.background;this.scene.background=null,this.renderer.setRenderTarget(t),this.previousShadowMap.value=r.texture,this.buffer1Active=!this.buffer1Active,this.renderer.render(this.scene,e),this.renderer.setRenderTarget(null),this.scene.background=o}},exports.SoftShadowMaterial=v,exports.SpotLightMaterial=x,exports.pcss=({focus:e=0,size:n=25,samples:t=10}={})=>{const r=i.ShaderChunk.shadowmap_pars_fragment;return i.ShaderChunk.shadowmap_pars_fragment=i.ShaderChunk.shadowmap_pars_fragment.replace("#ifdef USE_SHADOWMAP",`#ifdef USE_SHADOWMAP\n\n #define PENUMBRA_FILTER_SIZE float(${n})\n #define RGB_NOISE_FUNCTION(uv) (randRGB(uv))\n vec3 randRGB(vec2 uv) {\n return vec3(\n fract(sin(dot(uv, vec2(12.75613, 38.12123))) * 13234.76575),\n fract(sin(dot(uv, vec2(19.45531, 58.46547))) * 43678.23431),\n fract(sin(dot(uv, vec2(23.67817, 78.23121))) * 93567.23423)\n );\n }\n \n vec3 lowPassRandRGB(vec2 uv) {\n // 3x3 convolution (average)\n // can be implemented as separable with an extra buffer for a total of 6 samples instead of 9\n vec3 result = vec3(0);\n result += RGB_NOISE_FUNCTION(uv + vec2(-1.0, -1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(-1.0, 0.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(-1.0, +1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2( 0.0, -1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2( 0.0, 0.0));\n result += RGB_NOISE_FUNCTION(uv + vec2( 0.0, +1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(+1.0, -1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(+1.0, 0.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(+1.0, +1.0));\n result *= 0.111111111; // 1.0 / 9.0\n return result;\n }\n vec3 highPassRandRGB(vec2 uv) {\n // by subtracting the low-pass signal from the original signal, we're being left with the high-pass signal\n // hp(x) = x - lp(x)\n return RGB_NOISE_FUNCTION(uv) - lowPassRandRGB(uv) + 0.5;\n }\n \n \n vec2 vogelDiskSample(int sampleIndex, int sampleCount, float angle) {\n const float goldenAngle = 2.399963f; // radians\n float r = sqrt(float(sampleIndex) + 0.5f) / sqrt(float(sampleCount));\n float theta = float(sampleIndex) * goldenAngle + angle;\n float sine = sin(theta);\n float cosine = cos(theta);\n return vec2(cosine, sine) * r;\n }\n float penumbraSize( const in float zReceiver, const in float zBlocker ) { // Parallel plane estimation\n return (zReceiver - zBlocker) / zBlocker;\n }\n float findBlocker(sampler2D shadowMap, vec2 uv, float compare, float angle) {\n float texelSize = 1.0 / float(textureSize(shadowMap, 0).x);\n float blockerDepthSum = float(${e});\n float blockers = 0.0;\n \n int j = 0;\n vec2 offset = vec2(0.);\n float depth = 0.;\n \n #pragma unroll_loop_start\n for(int i = 0; i < ${t}; i ++) {\n offset = (vogelDiskSample(j, ${t}, angle) * texelSize) * 2.0 * PENUMBRA_FILTER_SIZE;\n depth = unpackRGBAToDepth( texture2D( shadowMap, uv + offset));\n if (depth < compare) {\n blockerDepthSum += depth;\n blockers++;\n }\n j++;\n }\n #pragma unroll_loop_end\n \n if (blockers > 0.0) {\n return blockerDepthSum / blockers;\n }\n return -1.0;\n }\n \n float vogelFilter(sampler2D shadowMap, vec2 uv, float zReceiver, float filterRadius, float angle) {\n float texelSize = 1.0 / float(textureSize(shadowMap, 0).x);\n float shadow = 0.0f;\n int j = 0;\n vec2 vogelSample = vec2(0.0);\n vec2 offset = vec2(0.0);\n #pragma unroll_loop_start\n for (int i = 0; i < ${t}; i++) {\n vogelSample = vogelDiskSample(j, ${t}, angle) * texelSize;\n offset = vogelSample * (1.0 + filterRadius * float(${n}));\n shadow += step( zReceiver, unpackRGBAToDepth( texture2D( shadowMap, uv + offset ) ) );\n j++;\n }\n #pragma unroll_loop_end\n return shadow * 1.0 / ${t}.0;\n }\n \n float PCSS (sampler2D shadowMap, vec4 coords) {\n vec2 uv = coords.xy;\n float zReceiver = coords.z; // Assumed to be eye-space z in this code\n float angle = highPassRandRGB(gl_FragCoord.xy).r * PI2;\n float avgBlockerDepth = findBlocker(shadowMap, uv, zReceiver, angle);\n if (avgBlockerDepth == -1.0) {\n return 1.0;\n }\n float penumbraRatio = penumbraSize(zReceiver, avgBlockerDepth);\n return vogelFilter(shadowMap, uv, zReceiver, 1.25 * penumbraRatio, angle);\n }`).replace("#if defined( SHADOWMAP_TYPE_PCF )","\nreturn PCSS(shadowMap, shadowCoord);\n#if defined( SHADOWMAP_TYPE_PCF )"),(e,n,t)=>{i.ShaderChunk.shadowmap_pars_fragment=r,function(e,n,t){n.traverse((n=>{n.material&&(e.properties.remove(n.material),n.material.dispose())})),e.info.programs.length=0,e.compile(n,t)}(e,n,t)}},exports.shaderMaterial=a,exports.useFBO=s;

@@ -7,2 +7,3 @@ export { pcss } from './core/pcss.js';

export { Grid } from './core/Grid.js';
export { Outlines } from './core/Outlines.js';
export { MeshDiscardMaterial } from './materials/MeshDiscardMaterial.js';

@@ -9,0 +10,0 @@ export { MeshTransmissionMaterial } from './materials/MeshTransmissionMaterial.js';

{
"name": "@pmndrs/vanilla",
"version": "1.9.8",
"version": "1.10.0",
"private": false,

@@ -5,0 +5,0 @@ "publishConfig": {

@@ -385,3 +385,5 @@ ![logo](logo.jpg)

```
Usage
```jsx

@@ -418,1 +420,47 @@ grid = Grid({

```
#### Outlines
[![storybook](https://img.shields.io/badge/-storybook-%23ff69b4)](https://pmndrs.github.io/drei-vanilla/?path=/story/gizmos-outlines--outlines-story)
[drei counterpart](https://github.com/pmndrs/drei#outlines)
An ornamental component that extracts the geometry from its parent and displays an inverted-hull outline. Supported parents are `THREE.Mesh`, `THREE.SkinnedMesh` and `THREE.InstancedMesh`.
```tsx
export type OutlinesProps = {
/** Outline color, default: black */
color: THREE.Color
/** Outline opacity, default: 1 */
opacity: number
/** Outline transparency, default: false */
transparent: boolean
/** Outline thickness, default 0.05 */
thickness: number
/** Geometry crease angle (0 === no crease), default: Math.PI */
angle: number
}
```
Usage
```jsx
const outlines = Outlines()
const mesh = new THREE.Mesh(geometry, material)
mesh.add(outlines.group)
// must call render() if added
outlines.render()
scene.add(mesh)
```
Grid function returns the following
```jsx
export type OutlinesType = {
group: THREE.Group
updateProps: (props: Partial<OutlinesProps>) => void
render: () => void
}
```
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc