New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@pmndrs/vanilla

Package Overview
Dependencies
Maintainers
3
Versions
54
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@pmndrs/vanilla - npm Package Compare versions

Comparing version 1.1.0 to 1.2.0

core/shaderMaterial.cjs.js

2

core/index.cjs.js

@@ -1,1 +0,1 @@

"use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("./pcss.cjs.js");require("three"),exports.pcss=e.pcss;
"use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("./pcss.cjs.js"),r=require("./shaderMaterial.cjs.js");require("three"),exports.pcss=e.pcss,exports.shaderMaterial=r.shaderMaterial;
export * from './pcss';
export * from './shaderMaterial';
export { pcss } from './pcss.js';
export { shaderMaterial } from './shaderMaterial.js';
import 'three';
import * as THREE from 'three';
// Inspired by: https://github.com/mrdoob/three.js/blob/dev/examples/webgl_shadowmap_pcss.html
/* Integration and compilation: @N8Programs
Inspired by:
https://github.com/mrdoob/three.js/blob/dev/examples/webgl_shadowmap_pcss.html
https://developer.nvidia.com/gpugems/gpugems2/part-ii-shading-lighting-and-shadows/chapter-17-efficient-soft-edged-shadows-using
https://developer.download.nvidia.com/whitepapers/2008/PCSS_Integration.pdf
https://github.com/mrdoob/three.js/blob/master/examples/webgl_shadowmap_pcss.html [spidersharma03]
https://spline.design/
Concept:
https://www.gamedev.net/tutorials/programming/graphics/contact-hardening-soft-shadows-made-fast-r4906/
Vogel Disk Implementation:
https://www.shadertoy.com/view/4l3yRM [ashalah]
High-Frequency Noise Implementation:
https://www.shadertoy.com/view/tt3fDH [spawner64]
*/

@@ -5,0 +18,0 @@ function reset(gl, scene, camera) {

@@ -1,1 +0,1 @@

"use strict";function e(e){if(e&&e.__esModule)return e;var n=Object.create(null);return e&&Object.keys(e).forEach((function(a){if("default"!==a){var t=Object.getOwnPropertyDescriptor(e,a);Object.defineProperty(n,a,t.get?t:{enumerable:!0,get:function(){return e[a]}})}})),n.default=e,Object.freeze(n)}Object.defineProperty(exports,"__esModule",{value:!0});var n=e(require("three"));exports.pcss=({focus:e=0,size:a=25,samples:t=10}={})=>{const r=n.ShaderChunk.shadowmap_pars_fragment;return n.ShaderChunk.shadowmap_pars_fragment=n.ShaderChunk.shadowmap_pars_fragment.replace("#ifdef USE_SHADOWMAP",`#ifdef USE_SHADOWMAP\n\n #define PENUMBRA_FILTER_SIZE float(${a})\n #define RGB_NOISE_FUNCTION(uv) (randRGB(uv))\n vec3 randRGB(vec2 uv) {\n return vec3(\n fract(sin(dot(uv, vec2(12.75613, 38.12123))) * 13234.76575),\n fract(sin(dot(uv, vec2(19.45531, 58.46547))) * 43678.23431),\n fract(sin(dot(uv, vec2(23.67817, 78.23121))) * 93567.23423)\n );\n }\n \n vec3 lowPassRandRGB(vec2 uv) {\n // 3x3 convolution (average)\n // can be implemented as separable with an extra buffer for a total of 6 samples instead of 9\n vec3 result = vec3(0);\n result += RGB_NOISE_FUNCTION(uv + vec2(-1.0, -1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(-1.0, 0.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(-1.0, +1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2( 0.0, -1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2( 0.0, 0.0));\n result += RGB_NOISE_FUNCTION(uv + vec2( 0.0, +1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(+1.0, -1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(+1.0, 0.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(+1.0, +1.0));\n result *= 0.111111111; // 1.0 / 9.0\n return result;\n }\n vec3 highPassRandRGB(vec2 uv) {\n // by subtracting the low-pass signal from the original signal, we're being left with the high-pass signal\n // hp(x) = x - lp(x)\n return RGB_NOISE_FUNCTION(uv) - lowPassRandRGB(uv) + 0.5;\n }\n \n \n vec2 vogelDiskSample(int sampleIndex, int sampleCount, float angle) {\n const float goldenAngle = 2.399963f; // radians\n float r = sqrt(float(sampleIndex) + 0.5f) / sqrt(float(sampleCount));\n float theta = float(sampleIndex) * goldenAngle + angle;\n float sine = sin(theta);\n float cosine = cos(theta);\n return vec2(cosine, sine) * r;\n }\n float penumbraSize( const in float zReceiver, const in float zBlocker ) { // Parallel plane estimation\n return (zReceiver - zBlocker) / zBlocker;\n }\n float findBlocker(sampler2D shadowMap, vec2 uv, float compare, float angle) {\n float texelSize = 1.0 / float(textureSize(shadowMap, 0).x);\n float blockerDepthSum = float(${e});\n float blockers = 0.0;\n \n int j = 0;\n vec2 offset = vec2(0.);\n float depth = 0.;\n \n #pragma unroll_loop_start\n for(int i = 0; i < ${t}; i ++) {\n offset = (vogelDiskSample(j, ${t}, angle) * texelSize) * 2.0 * PENUMBRA_FILTER_SIZE;\n depth = unpackRGBAToDepth( texture2D( shadowMap, uv + offset));\n if (depth < compare) {\n blockerDepthSum += depth;\n blockers++;\n }\n j++;\n }\n #pragma unroll_loop_end\n \n if (blockers > 0.0) {\n return blockerDepthSum / blockers;\n }\n return -1.0;\n }\n \n float vogelFilter(sampler2D shadowMap, vec2 uv, float zReceiver, float filterRadius, float angle) {\n float texelSize = 1.0 / float(textureSize(shadowMap, 0).x);\n float shadow = 0.0f;\n int j = 0;\n vec2 vogelSample = vec2(0.0);\n vec2 offset = vec2(0.0);\n #pragma unroll_loop_start\n for (int i = 0; i < ${t}; i++) {\n vogelSample = vogelDiskSample(j, ${t}, angle) * texelSize;\n offset = vogelSample * (1.0 + filterRadius * float(${a}));\n shadow += step( zReceiver, unpackRGBAToDepth( texture2D( shadowMap, uv + offset ) ) );\n j++;\n }\n #pragma unroll_loop_end\n return shadow * 1.0 / ${t}.0;\n }\n \n float PCSS (sampler2D shadowMap, vec4 coords) {\n vec2 uv = coords.xy;\n float zReceiver = coords.z; // Assumed to be eye-space z in this code\n float angle = highPassRandRGB(gl_FragCoord.xy).r * PI2;\n float avgBlockerDepth = findBlocker(shadowMap, uv, zReceiver, angle);\n if (avgBlockerDepth == -1.0) {\n return 1.0;\n }\n float penumbraRatio = penumbraSize(zReceiver, avgBlockerDepth);\n return vogelFilter(shadowMap, uv, zReceiver, 1.25 * penumbraRatio, angle);\n }`).replace("#if defined( SHADOWMAP_TYPE_PCF )","\nreturn PCSS(shadowMap, shadowCoord);\n#if defined( SHADOWMAP_TYPE_PCF )"),(e,a,t)=>{n.ShaderChunk.shadowmap_pars_fragment=r,function(e,n,a){n.traverse((n=>{n.material&&(e.properties.remove(n.material),n.material.dispose())})),e.info.programs.length=0,e.compile(n,a)}(e,a,t)}};
"use strict";function n(n){if(n&&n.__esModule)return n;var e=Object.create(null);return n&&Object.keys(n).forEach((function(t){if("default"!==t){var a=Object.getOwnPropertyDescriptor(n,t);Object.defineProperty(e,t,a.get?a:{enumerable:!0,get:function(){return n[t]}})}})),e.default=n,Object.freeze(e)}Object.defineProperty(exports,"__esModule",{value:!0});var e=n(require("three"));function t(n,t,a,o){const r=class extends e.ShaderMaterial{constructor(r={}){const i=Object.entries(n);super({uniforms:i.reduce(((n,[t,a])=>({...n,...e.UniformsUtils.clone({[t]:{value:a}})})),{}),vertexShader:t,fragmentShader:a}),this.key="",i.forEach((([n])=>Object.defineProperty(this,n,{get:()=>this.uniforms[n].value,set:e=>this.uniforms[n].value=e}))),Object.assign(this,r),o&&o(this)}};return r.key=e.MathUtils.generateUUID(),r}const a=t({},"void main() { }","void main() { gl_FragColor = vec4(0.0, 0.0, 0.0, 0.0); discard; }");class o extends e.MeshPhysicalMaterial{constructor(n=6,t=!1){super(),this.uniforms={chromaticAberration:{value:.05},transmission:{value:0},_transmission:{value:1},transmissionMap:{value:null},roughness:{value:0},thickness:{value:0},thicknessMap:{value:null},attenuationDistance:{value:1/0},attenuationColor:{value:new e.Color("white")},anisotropy:{value:.1},time:{value:0},distortion:{value:0},distortionScale:{value:.5},temporalDistortion:{value:0},buffer:{value:null}},this.onBeforeCompile=e=>{e.uniforms={...e.uniforms,...this.uniforms},t?e.defines.USE_SAMPLER="":e.defines.USE_TRANSMISSION="",e.fragmentShader="\n uniform float chromaticAberration; \n uniform float anisotropy; \n uniform float time;\n uniform float distortion;\n uniform float distortionScale;\n uniform float temporalDistortion;\n uniform sampler2D buffer;\n\n vec3 random3(vec3 c) {\n float j = 4096.0*sin(dot(c,vec3(17.0, 59.4, 15.0)));\n vec3 r;\n r.z = fract(512.0*j);\n j *= .125;\n r.x = fract(512.0*j);\n j *= .125;\n r.y = fract(512.0*j);\n return r-0.5;\n }\n\n float seed = 0.0;\n uint hash( uint x ) {\n x += ( x << 10u );\n x ^= ( x >> 6u );\n x += ( x << 3u );\n x ^= ( x >> 11u );\n x += ( x << 15u );\n return x;\n }\n\n // Compound versions of the hashing algorithm I whipped together.\n uint hash( uvec2 v ) { return hash( v.x ^ hash(v.y) ); }\n uint hash( uvec3 v ) { return hash( v.x ^ hash(v.y) ^ hash(v.z) ); }\n uint hash( uvec4 v ) { return hash( v.x ^ hash(v.y) ^ hash(v.z) ^ hash(v.w) ); }\n\n // Construct a float with half-open range [0:1] using low 23 bits.\n // All zeroes yields 0.0, all ones yields the next smallest representable value below 1.0.\n float floatConstruct( uint m ) {\n const uint ieeeMantissa = 0x007FFFFFu; // binary32 mantissa bitmask\n const uint ieeeOne = 0x3F800000u; // 1.0 in IEEE binary32\n m &= ieeeMantissa; // Keep only mantissa bits (fractional part)\n m |= ieeeOne; // Add fractional part to 1.0\n float f = uintBitsToFloat( m ); // Range [1:2]\n return f - 1.0; // Range [0:1]\n }\n\n // Pseudo-random value in half-open range [0:1].\n float random( float x ) { return floatConstruct(hash(floatBitsToUint(x))); }\n float random( vec2 v ) { return floatConstruct(hash(floatBitsToUint(v))); }\n float random( vec3 v ) { return floatConstruct(hash(floatBitsToUint(v))); }\n float random( vec4 v ) { return floatConstruct(hash(floatBitsToUint(v))); }\n\n float rand() {\n float result = random(vec3(gl_FragCoord.xy, seed));\n seed += 1.0;\n return result;\n }\n\n const float F3 = 0.3333333;\n const float G3 = 0.1666667;\n\n float snoise(vec3 p) {\n vec3 s = floor(p + dot(p, vec3(F3)));\n vec3 x = p - s + dot(s, vec3(G3));\n vec3 e = step(vec3(0.0), x - x.yzx);\n vec3 i1 = e*(1.0 - e.zxy);\n vec3 i2 = 1.0 - e.zxy*(1.0 - e);\n vec3 x1 = x - i1 + G3;\n vec3 x2 = x - i2 + 2.0*G3;\n vec3 x3 = x - 1.0 + 3.0*G3;\n vec4 w, d;\n w.x = dot(x, x);\n w.y = dot(x1, x1);\n w.z = dot(x2, x2);\n w.w = dot(x3, x3);\n w = max(0.6 - w, 0.0);\n d.x = dot(random3(s), x);\n d.y = dot(random3(s + i1), x1);\n d.z = dot(random3(s + i2), x2);\n d.w = dot(random3(s + 1.0), x3);\n w *= w;\n w *= w;\n d *= w;\n return dot(d, vec4(52.0));\n }\n\n float snoiseFractal(vec3 m) {\n return 0.5333333* snoise(m)\n +0.2666667* snoise(2.0*m)\n +0.1333333* snoise(4.0*m)\n +0.0666667* snoise(8.0*m);\n }\n"+e.fragmentShader,e.fragmentShader=e.fragmentShader.replace("#include <transmission_pars_fragment>","\n #ifdef USE_TRANSMISSION\n // Transmission code is based on glTF-Sampler-Viewer\n // https://github.com/KhronosGroup/glTF-Sample-Viewer\n uniform float _transmission;\n uniform float thickness;\n uniform float attenuationDistance;\n uniform vec3 attenuationColor;\n #ifdef USE_TRANSMISSIONMAP\n uniform sampler2D transmissionMap;\n #endif\n #ifdef USE_THICKNESSMAP\n uniform sampler2D thicknessMap;\n #endif\n uniform vec2 transmissionSamplerSize;\n uniform sampler2D transmissionSamplerMap;\n uniform mat4 modelMatrix;\n uniform mat4 projectionMatrix;\n varying vec3 vWorldPosition;\n vec3 getVolumeTransmissionRay( const in vec3 n, const in vec3 v, const in float thickness, const in float ior, const in mat4 modelMatrix ) {\n // Direction of refracted light.\n vec3 refractionVector = refract( - v, normalize( n ), 1.0 / ior );\n // Compute rotation-independant scaling of the model matrix.\n vec3 modelScale;\n modelScale.x = length( vec3( modelMatrix[ 0 ].xyz ) );\n modelScale.y = length( vec3( modelMatrix[ 1 ].xyz ) );\n modelScale.z = length( vec3( modelMatrix[ 2 ].xyz ) );\n // The thickness is specified in local space.\n return normalize( refractionVector ) * thickness * modelScale;\n }\n float applyIorToRoughness( const in float roughness, const in float ior ) {\n // Scale roughness with IOR so that an IOR of 1.0 results in no microfacet refraction and\n // an IOR of 1.5 results in the default amount of microfacet refraction.\n return roughness * clamp( ior * 2.0 - 2.0, 0.0, 1.0 );\n }\n vec4 getTransmissionSample( const in vec2 fragCoord, const in float roughness, const in float ior ) {\n float framebufferLod = log2( transmissionSamplerSize.x ) * applyIorToRoughness( roughness, ior ); \n #ifdef USE_SAMPLER\n #ifdef texture2DLodEXT\n return texture2DLodEXT(transmissionSamplerMap, fragCoord.xy, framebufferLod);\n #else\n return texture2D(transmissionSamplerMap, fragCoord.xy, framebufferLod);\n #endif\n #else\n return texture2D(buffer, fragCoord.xy);\n #endif\n }\n vec3 applyVolumeAttenuation( const in vec3 radiance, const in float transmissionDistance, const in vec3 attenuationColor, const in float attenuationDistance ) {\n if ( isinf( attenuationDistance ) ) {\n // Attenuation distance is +∞, i.e. the transmitted color is not attenuated at all.\n return radiance;\n } else {\n // Compute light attenuation using Beer's law.\n vec3 attenuationCoefficient = -log( attenuationColor ) / attenuationDistance;\n vec3 transmittance = exp( - attenuationCoefficient * transmissionDistance ); // Beer's law\n return transmittance * radiance;\n }\n }\n vec4 getIBLVolumeRefraction( const in vec3 n, const in vec3 v, const in float roughness, const in vec3 diffuseColor,\n const in vec3 specularColor, const in float specularF90, const in vec3 position, const in mat4 modelMatrix,\n const in mat4 viewMatrix, const in mat4 projMatrix, const in float ior, const in float thickness,\n const in vec3 attenuationColor, const in float attenuationDistance ) {\n vec3 transmissionRay = getVolumeTransmissionRay( n, v, thickness, ior, modelMatrix );\n vec3 refractedRayExit = position + transmissionRay;\n // Project refracted vector on the framebuffer, while mapping to normalized device coordinates.\n vec4 ndcPos = projMatrix * viewMatrix * vec4( refractedRayExit, 1.0 );\n vec2 refractionCoords = ndcPos.xy / ndcPos.w;\n refractionCoords += 1.0;\n refractionCoords /= 2.0;\n // Sample framebuffer to get pixel the refracted ray hits.\n vec4 transmittedLight = getTransmissionSample( refractionCoords, roughness, ior );\n vec3 attenuatedColor = applyVolumeAttenuation( transmittedLight.rgb, length( transmissionRay ), attenuationColor, attenuationDistance );\n // Get the specular component.\n vec3 F = EnvironmentBRDF( n, v, specularColor, specularF90, roughness );\n return vec4( ( 1.0 - F ) * attenuatedColor * diffuseColor, transmittedLight.a );\n }\n #endif\n"),e.fragmentShader=e.fragmentShader.replace("#include <transmission_fragment>",` \n // Improve the refraction to use the world pos\n material.transmission = _transmission;\n material.transmissionAlpha = 1.0;\n material.thickness = thickness;\n material.attenuationDistance = attenuationDistance;\n material.attenuationColor = attenuationColor;\n #ifdef USE_TRANSMISSIONMAP\n material.transmission *= texture2D( transmissionMap, vUv ).r;\n #endif\n #ifdef USE_THICKNESSMAP\n material.thickness *= texture2D( thicknessMap, vUv ).g;\n #endif\n \n vec3 pos = vWorldPosition;\n vec3 v = normalize( cameraPosition - pos );\n vec3 n = inverseTransformDirection( normal, viewMatrix );\n vec3 transmission = vec3(0.0);\n float transmissionR, transmissionB, transmissionG;\n float randomCoords = rand();\n float thickness_smear = thickness * max(pow(roughnessFactor, 0.33), anisotropy);\n vec3 distortionNormal = vec3(0.0);\n vec3 temporalOffset = vec3(time, -time, -time) * temporalDistortion;\n if (distortion > 0.0) {\n distortionNormal = distortion * vec3(snoiseFractal(vec3((pos * distortionScale + temporalOffset))), snoiseFractal(vec3(pos.zxy * distortionScale - temporalOffset)), snoiseFractal(vec3(pos.yxz * distortionScale + temporalOffset)));\n }\n for (float i = 0.0; i < ${n}.0; i ++) {\n vec3 sampleNorm = normalize(n + roughnessFactor * roughnessFactor * 2.0 * normalize(vec3(rand() - 0.5, rand() - 0.5, rand() - 0.5)) * pow(rand(), 0.33) + distortionNormal);\n transmissionR = getIBLVolumeRefraction(\n sampleNorm, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90,\n pos, modelMatrix, viewMatrix, projectionMatrix, material.ior, material.thickness + thickness_smear * (i + randomCoords) / float(${n}),\n material.attenuationColor, material.attenuationDistance\n ).r;\n transmissionG = getIBLVolumeRefraction(\n sampleNorm, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90,\n pos, modelMatrix, viewMatrix, projectionMatrix, material.ior * (1.0 + chromaticAberration * (i + randomCoords) / float(${n})) , material.thickness + thickness_smear * (i + randomCoords) / float(${n}),\n material.attenuationColor, material.attenuationDistance\n ).g;\n transmissionB = getIBLVolumeRefraction(\n sampleNorm, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90,\n pos, modelMatrix, viewMatrix, projectionMatrix, material.ior * (1.0 + 2.0 * chromaticAberration * (i + randomCoords) / float(${n})), material.thickness + thickness_smear * (i + randomCoords) / float(${n}),\n material.attenuationColor, material.attenuationDistance\n ).b;\n transmission.r += transmissionR;\n transmission.g += transmissionG;\n transmission.b += transmissionB;\n }\n transmission /= ${n}.0;\n totalDiffuse = mix( totalDiffuse, transmission.rgb, material.transmission );\n`)},Object.keys(this.uniforms).forEach((n=>Object.defineProperty(this,n,{get:()=>this.uniforms[n].value,set:e=>this.uniforms[n].value=e})))}}exports.MeshDiscardMaterial=a,exports.MeshTransmissionMaterial=o,exports.pcss=({focus:n=0,size:t=25,samples:a=10}={})=>{const o=e.ShaderChunk.shadowmap_pars_fragment;return e.ShaderChunk.shadowmap_pars_fragment=e.ShaderChunk.shadowmap_pars_fragment.replace("#ifdef USE_SHADOWMAP",`#ifdef USE_SHADOWMAP\n\n #define PENUMBRA_FILTER_SIZE float(${t})\n #define RGB_NOISE_FUNCTION(uv) (randRGB(uv))\n vec3 randRGB(vec2 uv) {\n return vec3(\n fract(sin(dot(uv, vec2(12.75613, 38.12123))) * 13234.76575),\n fract(sin(dot(uv, vec2(19.45531, 58.46547))) * 43678.23431),\n fract(sin(dot(uv, vec2(23.67817, 78.23121))) * 93567.23423)\n );\n }\n \n vec3 lowPassRandRGB(vec2 uv) {\n // 3x3 convolution (average)\n // can be implemented as separable with an extra buffer for a total of 6 samples instead of 9\n vec3 result = vec3(0);\n result += RGB_NOISE_FUNCTION(uv + vec2(-1.0, -1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(-1.0, 0.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(-1.0, +1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2( 0.0, -1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2( 0.0, 0.0));\n result += RGB_NOISE_FUNCTION(uv + vec2( 0.0, +1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(+1.0, -1.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(+1.0, 0.0));\n result += RGB_NOISE_FUNCTION(uv + vec2(+1.0, +1.0));\n result *= 0.111111111; // 1.0 / 9.0\n return result;\n }\n vec3 highPassRandRGB(vec2 uv) {\n // by subtracting the low-pass signal from the original signal, we're being left with the high-pass signal\n // hp(x) = x - lp(x)\n return RGB_NOISE_FUNCTION(uv) - lowPassRandRGB(uv) + 0.5;\n }\n \n \n vec2 vogelDiskSample(int sampleIndex, int sampleCount, float angle) {\n const float goldenAngle = 2.399963f; // radians\n float r = sqrt(float(sampleIndex) + 0.5f) / sqrt(float(sampleCount));\n float theta = float(sampleIndex) * goldenAngle + angle;\n float sine = sin(theta);\n float cosine = cos(theta);\n return vec2(cosine, sine) * r;\n }\n float penumbraSize( const in float zReceiver, const in float zBlocker ) { // Parallel plane estimation\n return (zReceiver - zBlocker) / zBlocker;\n }\n float findBlocker(sampler2D shadowMap, vec2 uv, float compare, float angle) {\n float texelSize = 1.0 / float(textureSize(shadowMap, 0).x);\n float blockerDepthSum = float(${n});\n float blockers = 0.0;\n \n int j = 0;\n vec2 offset = vec2(0.);\n float depth = 0.;\n \n #pragma unroll_loop_start\n for(int i = 0; i < ${a}; i ++) {\n offset = (vogelDiskSample(j, ${a}, angle) * texelSize) * 2.0 * PENUMBRA_FILTER_SIZE;\n depth = unpackRGBAToDepth( texture2D( shadowMap, uv + offset));\n if (depth < compare) {\n blockerDepthSum += depth;\n blockers++;\n }\n j++;\n }\n #pragma unroll_loop_end\n \n if (blockers > 0.0) {\n return blockerDepthSum / blockers;\n }\n return -1.0;\n }\n \n float vogelFilter(sampler2D shadowMap, vec2 uv, float zReceiver, float filterRadius, float angle) {\n float texelSize = 1.0 / float(textureSize(shadowMap, 0).x);\n float shadow = 0.0f;\n int j = 0;\n vec2 vogelSample = vec2(0.0);\n vec2 offset = vec2(0.0);\n #pragma unroll_loop_start\n for (int i = 0; i < ${a}; i++) {\n vogelSample = vogelDiskSample(j, ${a}, angle) * texelSize;\n offset = vogelSample * (1.0 + filterRadius * float(${t}));\n shadow += step( zReceiver, unpackRGBAToDepth( texture2D( shadowMap, uv + offset ) ) );\n j++;\n }\n #pragma unroll_loop_end\n return shadow * 1.0 / ${a}.0;\n }\n \n float PCSS (sampler2D shadowMap, vec4 coords) {\n vec2 uv = coords.xy;\n float zReceiver = coords.z; // Assumed to be eye-space z in this code\n float angle = highPassRandRGB(gl_FragCoord.xy).r * PI2;\n float avgBlockerDepth = findBlocker(shadowMap, uv, zReceiver, angle);\n if (avgBlockerDepth == -1.0) {\n return 1.0;\n }\n float penumbraRatio = penumbraSize(zReceiver, avgBlockerDepth);\n return vogelFilter(shadowMap, uv, zReceiver, 1.25 * penumbraRatio, angle);\n }`).replace("#if defined( SHADOWMAP_TYPE_PCF )","\nreturn PCSS(shadowMap, shadowCoord);\n#if defined( SHADOWMAP_TYPE_PCF )"),(n,t,a)=>{e.ShaderChunk.shadowmap_pars_fragment=o,function(n,e,t){e.traverse((e=>{e.material&&(n.properties.remove(e.material),e.material.dispose())})),n.info.programs.length=0,n.compile(e,t)}(n,t,a)}},exports.shaderMaterial=t;
export * from './core';
export * from './materials';
export { pcss } from './core/pcss.js';
export { shaderMaterial } from './core/shaderMaterial.js';
export { MeshDiscardMaterial } from './materials/MeshDiscardMaterial.js';
export { MeshTransmissionMaterial } from './materials/MeshTransmissionMaterial.js';
{
"name": "@pmndrs/vanilla",
"version": "1.1.0",
"version": "1.2.0",
"private": false,

@@ -5,0 +5,0 @@ "publishConfig": {

@@ -32,5 +32,16 @@ ![logo](logo.jpg)

</tr>
<tr>
<td valign="top">
<ul>
<li><a href="#materials">Materials</a></li>
<ul>
<li><a href="#discardmaterial">MeshDiscardMaterial</a></li>
<li><a href="#meshtransmissionmaterial">MeshTransmissionMaterial</a></li>
</ul>
</ul>
</td>
</tr>
</table>
# Cameras
# Shaders

@@ -68,1 +79,43 @@ #### pcss

```
# Materials
#### MeshDiscardMaterial
A material that discards fragments. It can be used to render nothing efficiently, but still have a mesh in the scene graph that throws shadows and can be raycast.
```javascript
const mesh = new THREE.Mesh(geometry, new MeshDiscardMaterial())
```
#### MeshTransmissionMaterial
<p>
<a href="https://codesandbox.io/s/hmgdjq"><img width="20%" src="https://codesandbox.io/api/v1/sandboxes/hmgdjq/screenshot.png" alt="Demo"/></a>
</p>
An improved THREE.MeshPhysicalMaterial. It acts like a normal PhysicalMaterial in terms of transmission support, thickness, ior, roughness, etc., but has chromatic aberration, noise-based roughness blur, (primitive) anisotropy support, and unlike the original it can "see" other transmissive or transparent objects which leads to improved visuals.
Although it should be faster than MPM keep in mind that it can still be expensive as it causes an additional render pass of the scene. Low samples and low resolution will make it faster. If you use roughness consider using a tiny resolution, for instance 32x32 pixels, it will still look good but perform much faster.
For performance and visual reasons the host mesh gets removed from the render-stack temporarily. If you have other objects that you don't want to see reflected in the material just add them to the parent mesh as children.
```typescript
/* Transmission, default: 1 */
transmission?: number
/* Thickness (refraction), default: 0 */
thickness?: number
/* Roughness (blur), default: 0 */
roughness?: number
/* Chromatic aberration, default: 0.03 */
chromaticAberration?: number
/* Anisotropy, default: 0.1 */
anisotropy?: number
/* Distortion, default: 0 */
distortion?: number
/* Distortion scale, default: 0.5 */
distortionScale: number
/* Temporal distortion (speed of movement), default: 0.0 */
temporalDistortion: number
}
```
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc