screen-space-reflections
Advanced tools
Comparing version 2.1.1 to 2.1.2
1348
dist/index.js
import { Pass, RenderPass, DepthPass, Effect, Selection } from 'postprocessing'; | ||
import { ShaderChunk, Matrix4, WebGLRenderTarget, NearestFilter, HalfFloatType, ShaderMaterial, UniformsUtils, FrontSide, VideoTexture, Vector2, LinearFilter, Uniform, FramebufferTexture, RGBAFormat, Matrix3, TangentSpaceNormalMap, GLSL3, WebGLMultipleRenderTargets, Vector3, Quaternion } from 'three'; | ||
import { ShaderMaterial, Uniform, Vector2, Matrix3, TangentSpaceNormalMap, GLSL3, Matrix4, WebGLRenderTarget, LinearFilter, WebGLMultipleRenderTargets, ShaderChunk, Color, Vector3, Quaternion, HalfFloatType, VideoTexture, DataTexture, RGBAFormat, FloatType, FramebufferTexture } from 'three'; | ||
@@ -21,339 +21,12 @@ function _extends() { | ||
var id = 0; | ||
var accumulatedCompose = "#define GLSLIFY 1\nalpha=samples<2.||movement<FLOAT_EPSILON ?(0.05+alpha): 0.;alpha=clamp(alpha,0.,1.);if(samples>512.&&alpha==1.){outputColor=accumulatedColor;}else{float samplesMultiplier=pow(samples/32.,4.)+1.;if(samples>1.&&alpha>1.-FLOAT_EPSILON){outputColor=accumulatedColor*(1.-1./(samples*samplesMultiplier))+inputColor/(samples*samplesMultiplier);}else{outputColor=inputColor;}}"; // eslint-disable-line | ||
function _classPrivateFieldLooseKey(name) { | ||
return "__private_" + id++ + "_" + name; | ||
} | ||
var boxBlur = "#define GLSLIFY 1\nuniform float blurMix;uniform float blurSharpness;uniform int blurKernelSize;vec3 denoise(vec3 center,sampler2D tex,vec2 uv,vec2 texSize){vec3 color;float total;vec3 col;float weight;for(int x=-blurKernelSize;x<=blurKernelSize;x++){for(int y=-blurKernelSize;y<=blurKernelSize;y++){col=textureLod(tex,uv+vec2(x,y)/texSize,0.).rgb;weight=1.0-abs(dot(col-center,vec3(0.25)));weight=pow(weight,blurSharpness);color+=col*weight;total+=weight;}}return color/total;}"; // eslint-disable-line | ||
function _classPrivateFieldLooseBase(receiver, privateKey) { | ||
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { | ||
throw new TypeError("attempted to use private field on non-instance"); | ||
} | ||
var finalSSRShader = "#define GLSLIFY 1\n#define MODE_DEFAULT 0\n#define MODE_REFLECTIONS 1\n#define MODE_RAW_REFLECTION 2\n#define MODE_BLURRED_REFLECTIONS 3\n#define MODE_INPUT 4\n#define MODE_BLUR_MIX 5\n#define FLOAT_EPSILON 0.00001\nuniform sampler2D inputTexture;uniform sampler2D reflectionsTexture;uniform float samples;\n#include <boxBlur>\nvoid mainImage(const in vec4 inputColor,const in vec2 uv,out vec4 outputColor){vec4 reflectionsTexel=texture2D(reflectionsTexture,vUv);vec3 reflectionClr=reflectionsTexel.xyz;if(blurMix>FLOAT_EPSILON){ivec2 size=textureSize(reflectionsTexture,0);vec2 pxSize=vec2(float(size.x),float(size.y));vec3 blurredReflectionsColor=denoise(reflectionsTexel.rgb,reflectionsTexture,vUv,pxSize);reflectionClr=mix(reflectionClr,blurredReflectionsColor.rgb,blurMix);}\n#if RENDER_MODE == MODE_DEFAULT\noutputColor=vec4(inputColor.rgb+reflectionClr,1.);\n#endif\n#if RENDER_MODE == MODE_REFLECTIONS\noutputColor=vec4(reflectionClr,1.);\n#endif\n#if RENDER_MODE == MODE_RAW_REFLECTION\noutputColor=vec4(reflectionsTexel.xyz,1.);\n#endif\n#if RENDER_MODE == MODE_BLURRED_REFLECTIONS\noutputColor=vec4(blurredReflectionsTexel.xyz,1.);\n#endif\n#if RENDER_MODE == MODE_INPUT\noutputColor=vec4(inputColor.xyz,1.);\n#endif\n#if RENDER_MODE == MODE_BLUR_MIX\noutputColor=vec4(vec3(blurMix),1.);\n#endif\n}"; // eslint-disable-line | ||
return receiver; | ||
} | ||
var helperFunctions = "#define GLSLIFY 1\nvec3 getViewPosition(const float depth){float clipW=_projectionMatrix[2][3]*depth+_projectionMatrix[3][3];vec4 clipPosition=vec4((vec3(vUv,depth)-0.5)*2.0,1.0);clipPosition*=clipW;return(_inverseProjectionMatrix*clipPosition).xyz;}float getViewZ(const in float depth){\n#ifdef PERSPECTIVE_CAMERA\nreturn perspectiveDepthToViewZ(depth,cameraNear,cameraFar);\n#else\nreturn orthographicDepthToViewZ(depth,cameraNear,cameraFar);\n#endif\n}vec3 screenSpaceToWorldSpace(const vec2 uv,const float depth){vec4 ndc=vec4((uv.x-0.5)*2.0,(uv.y-0.5)*2.0,(depth-0.5)*2.0,1.0);vec4 clip=_inverseProjectionMatrix*ndc;vec4 view=cameraMatrixWorld*(clip/clip.w);return view.xyz;}\n#define Scale (vec3(0.8, 0.8, 0.8))\n#define K (19.19)\nvec3 hash(vec3 a){a=fract(a*Scale);a+=dot(a,a.yxz+K);return fract((a.xxy+a.yxx)*a.zyx);}float fresnel_dielectric_cos(float cosi,float eta){float c=abs(cosi);float g=eta*eta-1.0+c*c;float result;if(g>0.0){g=sqrt(g);float A=(g-c)/(g+c);float B=(c*(g+c)-1.0)/(c*(g-c)+1.0);result=0.5*A*A*(1.0+B*B);}else{result=1.0;}return result;}float fresnel_dielectric(vec3 Incoming,vec3 Normal,float eta){float cosine=dot(Incoming,Normal);return min(1.0,5.0*fresnel_dielectric_cos(cosine,eta));}float czm_luminance(vec3 rgb){const vec3 W=vec3(0.2125,0.7154,0.0721);return dot(rgb,W);}"; // eslint-disable-line | ||
var vertexShader = "#define GLSLIFY 1\nvarying vec2 vUv;void main(){vUv=position.xy*0.5+0.5;gl_Position=vec4(position.xy,1.0,1.0);}"; // eslint-disable-line | ||
var trCompose = "#define GLSLIFY 1\nalpha=(velocityDisocclusion<FLOAT_EPSILON)?(alpha+0.05):(alpha-0.25);alpha=clamp(alpha,0.,1.);if(!canReproject||(samples>512.&&alpha==1.)||(length(accumulatedColor)>FLOAT_EPSILON&&length(inputColor)==0.)){accumulatedColor=undoColorTransform(accumulatedColor);float alphaVal=canReproject ? alpha : 0.;gl_FragColor=vec4(accumulatedColor,alpha);return;}if(alpha<1.){outputColor=mix(accumulatedColor,inputColor,(1.-alpha*alpha)*temporalResolveCorrection);}else if(1./samples>=1.-temporalResolveMix){outputColor=mix(inputColor,accumulatedColor,temporalResolveMix);}else{float mixVal=(1./samples)/EULER;if(alpha<FLOAT_EPSILON&&samples<15.)mixVal+=0.3;outputColor=mix(accumulatedColor,inputColor,mixVal);}"; // eslint-disable-line | ||
var temporalResolve = "#define GLSLIFY 1\nuniform sampler2D inputTexture;uniform sampler2D accumulatedTexture;uniform sampler2D velocityTexture;uniform sampler2D lastVelocityTexture;uniform sampler2D depthTexture;uniform float temporalResolveCorrectionMix;varying vec2 vUv;\n#include <packing>\n#define min3(a, b, c) min(a, min(b, c))\n#define min4(a, b, c, d) min(a, min3(b, c, d))\n#define min5(a, b, c, d, e) min(a, min4(b, c, d, e))\n#define min6(a, b, c, d, e, f) min(a, min5(b, c, d, e, f))\n#define min7(a, b, c, d, e, f, g) min(a, min6(b, c, d, e, f, g))\n#define min8(a, b, c, d, e, f, g, h) min(a, min7(b, c, d, e, f, g, h))\n#define min9(a, b, c, d, e, f, g, h, i) min(a, min8(b, c, d, e, f, g, h, i))\n#define max3(a, b, c) max(a, max(b, c))\n#define max4(a, b, c, d) max(a, max3(b, c, d))\n#define max5(a, b, c, d, e) max(a, max4(b, c, d, e))\n#define max6(a, b, c, d, e, f) max(a, max5(b, c, d, e, f))\n#define max7(a, b, c, d, e, f, g) max(a, max6(b, c, d, e, f, g))\n#define max8(a, b, c, d, e, f, g, h) max(a, max7(b, c, d, e, f, g, h))\n#define max9(a, b, c, d, e, f, g, h, i) max(a, max8(b, c, d, e, f, g, h, i))\nvec2 getVelocity(sampler2D tex,vec2 uv,vec2 texSize){float closestDepth=100.0;vec2 closestUVOffset;for(int j=-1;j<=1;++j){for(int i=-1;i<=1;++i){vec2 uvOffset=vec2(i,j)/texSize;float neighborDepth=unpackRGBAToDepth(textureLod(depthTexture,vUv+uvOffset,0.));if(neighborDepth<closestDepth){closestUVOffset=uvOffset;closestDepth=neighborDepth;}}}return textureLod(velocityTexture,vUv+closestUVOffset,0.).xy;}void main(){vec4 inputTexel=texture2D(inputTexture,vUv);vec4 accumulatedTexel;vec3 outputColor;ivec2 size=textureSize(inputTexture,0);vec2 pxSize=vec2(float(size.x),float(size.y));vec2 velUv=texture2D(velocityTexture,vUv).xy;vec2 reprojectedUv=vUv-velUv;vec2 lastVelUv=texture2D(lastVelocityTexture,reprojectedUv).xy;float velocityLength=length(lastVelUv-velUv);float velocityDisocclusion=(velocityLength-0.000005)*10.;velocityDisocclusion*=velocityDisocclusion;\n#ifdef DILATION\nvelUv=getVelocity(velocityTexture,vUv,pxSize);reprojectedUv=vUv-velUv;\n#endif\nvec3 averageNeighborColor;bool didReproject=true;float movement=length(velUv)*100.;if(movement>0.){vec2 px=1./pxSize;vec3 c02=texture2D(inputTexture,vUv+vec2(-px.x,px.y)).rgb;vec3 c12=texture2D(inputTexture,vUv+vec2(0.,px.y)).rgb;vec3 c22=texture2D(inputTexture,vUv+vec2(px.x,px.y)).rgb;vec3 c01=texture2D(inputTexture,vUv+vec2(-px.x,0.)).rgb;vec3 c11=inputTexel.rgb;vec3 c21=texture2D(inputTexture,vUv+vec2(px.x,0.)).rgb;vec3 c00=texture2D(inputTexture,vUv+vec2(-px.x,-px.y)).rgb;vec3 c10=texture2D(inputTexture,vUv+vec2(0.,-px.y)).rgb;vec3 c20=texture2D(inputTexture,vUv+vec2(px.x,-px.y)).rgb;averageNeighborColor=c02+c12+c22+c01+c11+c21+c00+c10+c20;averageNeighborColor/=9.;if(reprojectedUv.x>=0.&&reprojectedUv.x<=1.&&reprojectedUv.y>=0.&&reprojectedUv.y<=1.){accumulatedTexel=texture2D(accumulatedTexture,reprojectedUv);vec3 minNeighborColor=min9(c02,c12,c22,c01,c11,c21,c00,c10,c20);vec3 maxNeighborColor=max9(c02,c12,c22,c01,c11,c21,c00,c10,c20);vec3 clampedColor=clamp(accumulatedTexel.rgb,minNeighborColor,maxNeighborColor);float mixFactor=temporalResolveCorrectionMix*(1.+movement);mixFactor=min(mixFactor,1.);accumulatedTexel.rgb=mix(accumulatedTexel.rgb,clampedColor,mixFactor);}else{accumulatedTexel.rgb=inputTexel.rgb;didReproject=false;}}else{accumulatedTexel=texture2D(accumulatedTexture,vUv);}\n#include <custom_compose_shader>\ngl_FragColor=vec4(vec3(outputColor),alpha);}"; // eslint-disable-line | ||
// this shader is from: https://github.com/gkjohnson/threejs-sandbox | ||
// a second set of bone information from the previou frame | ||
const prev_skinning_pars_vertex = | ||
/* glsl */ | ||
` | ||
#ifdef USE_SKINNING | ||
#ifdef BONE_TEXTURE | ||
uniform sampler2D prevBoneTexture; | ||
mat4 getPrevBoneMatrix( const in float i ) { | ||
float j = i * 4.0; | ||
float x = mod( j, float( boneTextureSize ) ); | ||
float y = floor( j / float( boneTextureSize ) ); | ||
float dx = 1.0 / float( boneTextureSize ); | ||
float dy = 1.0 / float( boneTextureSize ); | ||
y = dy * ( y + 0.5 ); | ||
vec4 v1 = texture2D( prevBoneTexture, vec2( dx * ( x + 0.5 ), y ) ); | ||
vec4 v2 = texture2D( prevBoneTexture, vec2( dx * ( x + 1.5 ), y ) ); | ||
vec4 v3 = texture2D( prevBoneTexture, vec2( dx * ( x + 2.5 ), y ) ); | ||
vec4 v4 = texture2D( prevBoneTexture, vec2( dx * ( x + 3.5 ), y ) ); | ||
mat4 bone = mat4( v1, v2, v3, v4 ); | ||
return bone; | ||
} | ||
#else | ||
uniform mat4 prevBoneMatrices[ MAX_BONES ]; | ||
mat4 getPrevBoneMatrix( const in float i ) { | ||
mat4 bone = prevBoneMatrices[ int(i) ]; | ||
return bone; | ||
} | ||
#endif | ||
#endif | ||
`; // Returns the body of the vertex shader for the velocity buffer and | ||
// outputs the position of the current and last frame positions | ||
const velocity_vertex = | ||
/* glsl */ | ||
` | ||
vec3 transformed; | ||
// Get the normal | ||
${ShaderChunk.skinbase_vertex} | ||
${ShaderChunk.beginnormal_vertex} | ||
${ShaderChunk.skinnormal_vertex} | ||
${ShaderChunk.defaultnormal_vertex} | ||
// Get the current vertex position | ||
transformed = vec3( position ); | ||
${ShaderChunk.skinning_vertex} | ||
newPosition = velocityMatrix * vec4( transformed, 1.0 ); | ||
// Get the previous vertex position | ||
transformed = vec3( position ); | ||
${ShaderChunk.skinbase_vertex.replace(/mat4 /g, "").replace(/getBoneMatrix/g, "getPrevBoneMatrix")} | ||
${ShaderChunk.skinning_vertex.replace(/vec4 /g, "")} | ||
prevPosition = prevVelocityMatrix * vec4( transformed, 1.0 ); | ||
gl_Position = newPosition; | ||
`; | ||
const VelocityShader = { | ||
uniforms: { | ||
prevVelocityMatrix: { | ||
value: new Matrix4() | ||
}, | ||
velocityMatrix: { | ||
value: new Matrix4() | ||
}, | ||
prevBoneTexture: { | ||
value: null | ||
}, | ||
interpolateGeometry: { | ||
value: 0 | ||
}, | ||
intensity: { | ||
value: 1 | ||
}, | ||
boneTexture: { | ||
value: null | ||
}, | ||
alphaTest: { | ||
value: 0.0 | ||
}, | ||
map: { | ||
value: null | ||
}, | ||
alphaMap: { | ||
value: null | ||
}, | ||
opacity: { | ||
value: 1.0 | ||
} | ||
}, | ||
vertexShader: | ||
/* glsl */ | ||
` | ||
${ShaderChunk.skinning_pars_vertex} | ||
${prev_skinning_pars_vertex} | ||
uniform mat4 velocityMatrix; | ||
uniform mat4 prevVelocityMatrix; | ||
uniform float interpolateGeometry; | ||
varying vec4 prevPosition; | ||
varying vec4 newPosition; | ||
void main() { | ||
${velocity_vertex} | ||
} | ||
`, | ||
fragmentShader: | ||
/* glsl */ | ||
` | ||
uniform float intensity; | ||
varying vec4 prevPosition; | ||
varying vec4 newPosition; | ||
void main() { | ||
#ifdef NEEDS_FULL_MOVEMENT | ||
gl_FragColor = vec4(1., 1., 1., 1. ); | ||
return; | ||
#endif | ||
vec2 pos0 = (prevPosition.xy / prevPosition.w) * 0.5 + 0.5; | ||
vec2 pos1 = (newPosition.xy / newPosition.w) * 0.5 + 0.5; | ||
vec2 vel = pos1 - pos0; | ||
gl_FragColor = vec4( vel, 0., 1. ); | ||
} | ||
`, | ||
defines: { | ||
MAX_BONES: 256 | ||
} | ||
}; | ||
var _cachedMaterials$1 = /*#__PURE__*/_classPrivateFieldLooseKey("cachedMaterials"); | ||
var _setVelocityMaterialInScene = /*#__PURE__*/_classPrivateFieldLooseKey("setVelocityMaterialInScene"); | ||
var _unsetVelocityMaterialInScene = /*#__PURE__*/_classPrivateFieldLooseKey("unsetVelocityMaterialInScene"); | ||
class VelocityPass extends Pass { | ||
constructor(scene, camera) { | ||
super("VelocityPass"); | ||
Object.defineProperty(this, _unsetVelocityMaterialInScene, { | ||
value: _unsetVelocityMaterialInScene2 | ||
}); | ||
Object.defineProperty(this, _setVelocityMaterialInScene, { | ||
value: _setVelocityMaterialInScene2 | ||
}); | ||
Object.defineProperty(this, _cachedMaterials$1, { | ||
writable: true, | ||
value: new WeakMap() | ||
}); | ||
this._scene = scene; | ||
this._camera = camera; | ||
this.renderTarget = new WebGLRenderTarget(typeof window !== "undefined" ? window.innerWidth : 2000, typeof window !== "undefined" ? window.innerHeight : 1000, { | ||
minFilter: NearestFilter, | ||
magFilter: NearestFilter, | ||
type: HalfFloatType | ||
}); | ||
} | ||
setSize(width, height) { | ||
this.renderTarget.setSize(width, height); | ||
} | ||
render(renderer) { | ||
_classPrivateFieldLooseBase(this, _setVelocityMaterialInScene)[_setVelocityMaterialInScene](); | ||
renderer.setRenderTarget(this.renderTarget); | ||
renderer.clear(); | ||
renderer.render(this._scene, this._camera); | ||
_classPrivateFieldLooseBase(this, _unsetVelocityMaterialInScene)[_unsetVelocityMaterialInScene](); | ||
} | ||
} | ||
function _setVelocityMaterialInScene2() { | ||
this._scene.traverse(c => { | ||
if (c.material) { | ||
const originalMaterial = c.material; | ||
let [cachedOriginalMaterial, velocityMaterial] = _classPrivateFieldLooseBase(this, _cachedMaterials$1)[_cachedMaterials$1].get(c) || []; | ||
if (!_classPrivateFieldLooseBase(this, _cachedMaterials$1)[_cachedMaterials$1].has(c) || originalMaterial !== cachedOriginalMaterial) { | ||
velocityMaterial = new ShaderMaterial({ | ||
uniforms: UniformsUtils.clone(VelocityShader.uniforms), | ||
vertexShader: VelocityShader.vertexShader, | ||
fragmentShader: VelocityShader.fragmentShader, | ||
side: FrontSide | ||
}); | ||
_classPrivateFieldLooseBase(this, _cachedMaterials$1)[_cachedMaterials$1].set(c, [originalMaterial, velocityMaterial]); | ||
} | ||
const needsUpdatedReflections = c.material.userData.needsUpdatedReflections || c.material.map instanceof VideoTexture; // mark the material as "ANIMATED" so that, when using temporal resolve, we get updated reflections | ||
if (needsUpdatedReflections && !Object.keys(velocityMaterial.defines).includes("NEEDS_FULL_MOVEMENT")) { | ||
velocityMaterial.defines.NEEDS_FULL_MOVEMENT = ""; | ||
velocityMaterial.needsUpdate = true; | ||
} else if (!needsUpdatedReflections && Object.keys(velocityMaterial.defines).includes("NEEDS_FULL_MOVEMENT")) { | ||
delete velocityMaterial.defines.NEEDS_FULL_MOVEMENT; | ||
velocityMaterial.needsUpdate = true; | ||
} | ||
velocityMaterial.uniforms.velocityMatrix.value.multiplyMatrices(this._camera.projectionMatrix, c.modelViewMatrix); | ||
c.material = velocityMaterial; | ||
} | ||
}); | ||
} | ||
function _unsetVelocityMaterialInScene2() { | ||
this._scene.traverse(c => { | ||
if (c.material) { | ||
c.material.uniforms.prevVelocityMatrix.value.multiplyMatrices(this._camera.projectionMatrix, c.modelViewMatrix); | ||
const [originalMaterial] = _classPrivateFieldLooseBase(this, _cachedMaterials$1)[_cachedMaterials$1].get(c); | ||
c.material = originalMaterial; | ||
} | ||
}); | ||
} | ||
const zeroVec2 = new Vector2(); | ||
var _velocityPass = /*#__PURE__*/_classPrivateFieldLooseKey("velocityPass"); | ||
class TemporalResolvePass extends Pass { | ||
constructor(scene, camera, customComposeShader, options = {}) { | ||
super("TemporalResolvePass"); | ||
Object.defineProperty(this, _velocityPass, { | ||
writable: true, | ||
value: null | ||
}); | ||
const width = options.width || typeof window !== "undefined" ? window.innerWidth : 2000; | ||
const height = options.height || typeof window !== "undefined" ? window.innerHeight : 1000; | ||
this.renderTarget = new WebGLRenderTarget(width, height, { | ||
minFilter: LinearFilter, | ||
magFilter: LinearFilter, | ||
type: HalfFloatType, | ||
depthBuffer: false | ||
}); | ||
_classPrivateFieldLooseBase(this, _velocityPass)[_velocityPass] = new VelocityPass(scene, camera); | ||
const fragmentShader = temporalResolve.replace("#include <custom_compose_shader>", customComposeShader); | ||
this.fullscreenMaterial = new ShaderMaterial({ | ||
type: "TemporalResolveMaterial", | ||
uniforms: { | ||
inputTexture: new Uniform(null), | ||
accumulatedTexture: new Uniform(null), | ||
velocityTexture: new Uniform(_classPrivateFieldLooseBase(this, _velocityPass)[_velocityPass].renderTarget.texture), | ||
lastVelocityTexture: new Uniform(null), | ||
depthTexture: new Uniform(null), | ||
temporalResolveMix: new Uniform(0), | ||
temporalResolveCorrectionMix: new Uniform(0) | ||
}, | ||
vertexShader, | ||
fragmentShader | ||
}); // this.fullscreenMaterial.defines.DILATION = "" | ||
this.setupAccumulatedTexture(width, height); | ||
} | ||
dispose() { | ||
this.renderTarget.dispose(); | ||
this.accumulatedTexture.dispose(); | ||
this.fullscreenMaterial.dispose(); | ||
_classPrivateFieldLooseBase(this, _velocityPass)[_velocityPass].dispose(); | ||
} | ||
setSize(width, height) { | ||
this.renderTarget.setSize(width, height); | ||
_classPrivateFieldLooseBase(this, _velocityPass)[_velocityPass].setSize(width, height); | ||
this.setupAccumulatedTexture(width, height); | ||
} | ||
setupAccumulatedTexture(width, height) { | ||
if (this.accumulatedTexture) this.accumulatedTexture.dispose(); | ||
this.accumulatedTexture = new FramebufferTexture(width, height, RGBAFormat); | ||
this.accumulatedTexture.minFilter = LinearFilter; | ||
this.accumulatedTexture.magFilter = LinearFilter; | ||
this.accumulatedTexture.type = HalfFloatType; | ||
this.lastVelocityTexture = new FramebufferTexture(width, height, RGBAFormat); | ||
this.lastVelocityTexture.minFilter = NearestFilter; | ||
this.lastVelocityTexture.magFilter = NearestFilter; | ||
this.lastVelocityTexture.type = HalfFloatType; | ||
this.fullscreenMaterial.uniforms.accumulatedTexture.value = this.accumulatedTexture; | ||
this.fullscreenMaterial.uniforms.lastVelocityTexture.value = this.lastVelocityTexture; | ||
this.fullscreenMaterial.needsUpdate = true; | ||
} | ||
render(renderer) { | ||
_classPrivateFieldLooseBase(this, _velocityPass)[_velocityPass].render(renderer); | ||
renderer.setRenderTarget(this.renderTarget); | ||
renderer.render(this.scene, this.camera); // save the render target's texture for use in next frame | ||
renderer.copyFramebufferToTexture(zeroVec2, this.accumulatedTexture); | ||
renderer.setRenderTarget(_classPrivateFieldLooseBase(this, _velocityPass)[_velocityPass].renderTarget); | ||
renderer.copyFramebufferToTexture(zeroVec2, this.lastVelocityTexture); | ||
} | ||
} | ||
var bilateralBlur = "#define GLSLIFY 1\nconst float KERNEL_RADIUS=5.;uniform float g_Sharpness;uniform vec2 g_InvResolutionDirection;uniform float kernelRadius;float getViewZ(const float depth){return perspectiveDepthToViewZ(depth,cameraNear,cameraFar);}vec4 BlurFunction(sampler2D texSource,sampler2D texLinearDepth,vec2 uv,float r,vec4 center_c,float center_d,inout float w_total,in float radius){vec4 c=texture2D(texSource,uv);float d=getViewZ(1./unpackRGBAToDepth(texture2D(texLinearDepth,uv)));float BlurSigma=radius*0.5;float BlurFalloff=1.0/(2.0*BlurSigma*BlurSigma);float ddiff=(d-center_d)*g_Sharpness*10.;float w=exp2(-r*r*BlurFalloff-ddiff*ddiff);w_total+=w;return c*w;}vec4 blur(sampler2D blurTexture,sampler2D depthTexture){vec4 center_c=texture2D(blurTexture,vUv);float center_d=getViewZ(1./unpackRGBAToDepth(texture2D(depthTexture,vUv)));float radius=kernelRadius;vec4 c_total=center_c;float w_total=1.0;vec2 uv;for(float r=1.;r<=radius;++r){uv=vUv+g_InvResolutionDirection*r;c_total+=BlurFunction(blurTexture,depthTexture,uv,r,center_c,center_d,w_total,radius);}for(float r=1.;r<=radius;++r){uv=vUv-g_InvResolutionDirection*r;c_total+=BlurFunction(blurTexture,depthTexture,uv,r,center_c,center_d,w_total,radius);}return c_total/w_total;}"; // eslint-disable-line | ||
var finalSSRShader = "#define GLSLIFY 1\n#define MODE_DEFAULT 0\n#define MODE_REFLECTIONS 1\n#define MODE_RAW_REFLECTION 2\n#define MODE_BLURRED_REFLECTIONS 3\n#define MODE_INPUT 4\n#define MODE_BLUR_MIX 5\n#define FLOAT_EPSILON 0.00001\n#define SQRT_3 1.7320508075688772 + FLOAT_EPSILON\nuniform sampler2D inputTexture;uniform sampler2D reflectionsTexture;\n#ifdef ENABLE_BLUR\nuniform sampler2D depthTexture;\n#endif\nuniform float samples;uniform float blurMix;\n#include <bilateralBlur>\nvoid mainImage(const in vec4 inputColor,const in vec2 uv,out vec4 outputColor){vec4 reflectionsTexel=texture2D(reflectionsTexture,vUv);vec3 reflectionClr=reflectionsTexel.xyz;\n#ifdef ENABLE_BLUR\nvec4 blurredReflectionsTexel=blur(reflectionsTexture,depthTexture);reflectionClr=mix(reflectionClr,blurredReflectionsTexel.xyz,blurMix);\n#endif\n#if RENDER_MODE == MODE_DEFAULT\noutputColor=vec4(inputColor.rgb+reflectionClr,1.);\n#endif\n#if RENDER_MODE == MODE_REFLECTIONS\noutputColor=vec4(reflectionClr,1.);\n#endif\n#if RENDER_MODE == MODE_RAW_REFLECTION\noutputColor=vec4(reflectionsTexel.xyz,1.);\n#endif\n#if RENDER_MODE == MODE_BLURRED_REFLECTIONS\n#ifdef ENABLE_BLUR\noutputColor=vec4(blurredReflectionsTexel.xyz,1.);\n#endif\n#endif\n#if RENDER_MODE == MODE_INPUT\noutputColor=vec4(inputColor.xyz,1.);\n#endif\n#if RENDER_MODE == MODE_BLUR_MIX\n#ifdef ENABLE_BLUR\noutputColor=vec4(vec3(blurMix),1.);\n#endif\n#endif\n}"; // eslint-disable-line | ||
var customTRComposeShader = "#define GLSLIFY 1\nfloat alpha=min(inputTexel.a,accumulatedTexel.a);alpha=didReproject&&(samples<4.||velocityDisocclusion<FLOAT_EPSILON)?(0.05+alpha): 0.;if(maxSamples!=0.&&samples>maxSamples&&alpha>1.-FLOAT_EPSILON){gl_FragColor=accumulatedTexel;return;}if(!didReproject){gl_FragColor=vec4(averageNeighborColor,alpha);return;}if(length(accumulatedTexel.rgb)>FLOAT_EPSILON&&length(inputTexel.rgb)==0.){gl_FragColor=accumulatedTexel;return;}if(alpha<1.){outputColor=mix(accumulatedTexel.rgb,inputTexel.rgb,(1.-alpha*alpha)*temporalResolveCorrectionMix);}else if(samples>4.&&movement<FLOAT_EPSILON&&length(accumulatedTexel.rgb)<FLOAT_EPSILON){outputColor=accumulatedTexel.rgb;}else if(1./samples>=1.-temporalResolveMix){outputColor=accumulatedTexel.rgb*temporalResolveMix+inputTexel.rgb*(1.-temporalResolveMix);}else{float mixVal=(1./samples)/EULER;if(alpha<FLOAT_EPSILON&&samples<15.)mixVal+=0.3;outputColor=mix(accumulatedTexel.rgb,inputTexel.rgb,mixVal);}"; // eslint-disable-line | ||
var customBasicComposeShader = "#define GLSLIFY 1\naccumulatedTexel=textureLod(accumulatedTexture,vUv,0.);float alpha=min(inputTexel.a,accumulatedTexel.a);alpha=samples<2.||movement<FLOAT_EPSILON ?(0.05+alpha): 0.;if(maxSamples!=0.&&samples>maxSamples&&alpha>1.-FLOAT_EPSILON){outputColor=accumulatedTexel.rgb;}else{float samplesMultiplier=pow(samples/32.,4.)+1.;if(samples>1.&&alpha>1.-FLOAT_EPSILON){outputColor=accumulatedTexel.rgb*(1.-1./(samples*samplesMultiplier))+inputTexel.rgb/(samples*samplesMultiplier);}else{outputColor=inputTexel.rgb;}}"; // eslint-disable-line | ||
var helperFunctions = "#define GLSLIFY 1\nvec3 getViewPosition(const float depth){float clipW=_projectionMatrix[2][3]*depth+_projectionMatrix[3][3];vec4 clipPosition=vec4((vec3(vUv,depth)-0.5)*2.0,1.0);clipPosition*=clipW;return(_inverseProjectionMatrix*clipPosition).xyz;}float getViewZ(const in float depth){\n#ifdef PERSPECTIVE_CAMERA\nreturn perspectiveDepthToViewZ(depth,cameraNear,cameraFar);\n#else\nreturn orthographicDepthToViewZ(depth,cameraNear,cameraFar);\n#endif\n}vec3 screenSpaceToWorldSpace(const vec2 uv,const float depth){vec4 ndc=vec4((uv.x-0.5)*2.0,(uv.y-0.5)*2.0,(depth-0.5)*2.0,1.0);vec4 clip=_inverseProjectionMatrix*ndc;vec4 view=cameraMatrixWorld*(clip/clip.w);return view.xyz;}\n#define Scale (vec3(0.8, 0.8, 0.8))\n#define K (19.19)\nvec3 hash(vec3 a){a=fract(a*Scale);a+=dot(a,a.yxz+K);return fract((a.xxy+a.yxx)*a.zyx);}float fresnel_dielectric_cos(float cosi,float eta){float c=abs(cosi);float g=eta*eta-1.0+c*c;float result;if(g>0.0){g=sqrt(g);float A=(g-c)/(g+c);float B=(c*(g+c)-1.0)/(c*(g-c)+1.0);result=0.5*A*A*(1.0+B*B);}else{result=1.0;}return result;}float fresnel_dielectric(vec3 Incoming,vec3 Normal,float eta){float cosine=dot(Incoming,Normal);return min(1.0,5.0*fresnel_dielectric_cos(cosine,eta));}float czm_luminance(vec3 rgb){const vec3 W=vec3(0.2125,0.7154,0.0721);return dot(rgb,W);}"; // eslint-disable-line | ||
// WebGL2: will render normals to RGB channel of "gNormal" buffer, roughness to A channel of "gNormal" buffer, depth to RGBA channel of "gDepth" buffer | ||
@@ -490,4 +163,6 @@ // and velocity to "gVelocity" buffer | ||
var fragmentShader = "#define GLSLIFY 1\nvarying vec2 vUv;uniform sampler2D inputTexture;uniform sampler2D accumulatedTexture;uniform sampler2D normalTexture;uniform sampler2D depthTexture;uniform mat4 _projectionMatrix;uniform mat4 _inverseProjectionMatrix;uniform mat4 cameraMatrixWorld;uniform float cameraNear;uniform float cameraFar;uniform float rayStep;uniform float intensity;uniform float maxDepthDifference;uniform float roughnessFadeOut;uniform float maxRoughness;uniform float maxDepth;uniform float rayFadeOut;uniform float thickness;uniform float ior;uniform float samples;\n#ifdef ENABLE_JITTERING\nuniform float jitter;uniform float jitterRough;uniform float jitterSpread;\n#endif\n#define FLOAT_EPSILON 0.00001\n#define EARLY_OUT_COLOR vec4(0., 0., 0., 1.)\nconst vec2 INVALID_RAY_COORDS=vec2(-1.);float _maxDepthDifference;float nearMinusFar;float nearMulFar;float farMinusNear;\n#include <packing>\n#include <helperFunctions>\nvec2 BinarySearch(inout vec3 dir,inout vec3 hitPos,inout float rayHitDepthDifference);vec2 RayMarch(vec3 dir,inout vec3 hitPos,inout float rayHitDepthDifference);float fastGetViewZ(const in float depth);void main(){vec4 depthTexel=textureLod(depthTexture,vUv,0.);if(dot(depthTexel.rgb,depthTexel.rgb)<FLOAT_EPSILON){gl_FragColor=EARLY_OUT_COLOR;return;}float unpackedDepth=unpackRGBAToDepth(depthTexel);if(unpackedDepth>maxDepth){gl_FragColor=EARLY_OUT_COLOR;return;}vec4 normalTexel=textureLod(normalTexture,vUv,0.);float roughness=normalTexel.a;if(roughness>maxRoughness||(roughness>1.-FLOAT_EPSILON&&roughnessFadeOut>1.-FLOAT_EPSILON)){gl_FragColor=EARLY_OUT_COLOR;return;}_maxDepthDifference=maxDepthDifference*0.01;nearMinusFar=cameraNear-cameraFar;nearMulFar=cameraNear*cameraFar;farMinusNear=cameraFar-cameraNear;float specular=1.-roughness;specular*=specular;normalTexel.rgb=unpackRGBToNormal(normalTexel.rgb);float depth=fastGetViewZ(unpackedDepth);vec3 viewNormal=normalTexel.xyz;vec3 viewPos=getViewPosition(depth);vec3 worldPos=screenSpaceToWorldSpace(vUv,unpackedDepth);vec3 jitt=vec3(0.);\n#ifdef ENABLE_JITTERING\nvec3 randomJitter=hash(5.*(samples*worldPos))-0.5;float spread=((2.-specular)+roughness*jitterRough)*jitterSpread;float jitterMix=jitter+jitterRough*roughness;if(jitterMix>1.)jitterMix=1.;jitt=mix(vec3(0.),randomJitter*spread,jitterMix);\n#endif\njitt=mix(jitt,vec3(0.),0.5);viewNormal+=jitt;vec3 reflected=normalize(reflect(normalize(viewPos),normalize(viewNormal)));vec3 rayDir=reflected*-viewPos.z;vec3 hitPos=viewPos;float rayHitDepthDifference;vec2 coords=RayMarch(rayDir,hitPos,rayHitDepthDifference);if(coords.x==-1.){gl_FragColor=EARLY_OUT_COLOR;return;}vec2 coordsNDC=(coords*2.0-1.0);float screenFade=0.1;float maxDimension=min(1.0,max(abs(coordsNDC.x),abs(coordsNDC.y)));float screenEdgefactor=1.0-(max(0.0,maxDimension-screenFade)/(1.0-screenFade));screenEdgefactor=max(0.,screenEdgefactor);vec4 SSRTexel=textureLod(inputTexture,coords.xy,0.);vec4 SSRTexelReflected=textureLod(accumulatedTexture,coords.xy,0.);vec3 SSR=SSRTexel.rgb+SSRTexelReflected.rgb;float roughnessFactor=mix(specular,1.,max(0.,1.-roughnessFadeOut));vec3 finalSSR=SSR*screenEdgefactor*roughnessFactor;if(rayFadeOut!=0.){vec3 hitWorldPos=screenSpaceToWorldSpace(coords,rayHitDepthDifference);float reflectionDistance=distance(hitWorldPos,worldPos);reflectionDistance+=1.;float opacity=1./(reflectionDistance*rayFadeOut*0.1);if(opacity>1.)opacity=1.;finalSSR*=opacity;}float fresnelFactor=fresnel_dielectric(normalize(viewPos),viewNormal,ior);finalSSR=finalSSR*fresnelFactor*intensity;finalSSR=min(vec3(1.),finalSSR);float alpha=hitPos.z==1. ? SSRTexel.a : SSRTexelReflected.a;gl_FragColor=vec4(finalSSR,alpha);}vec2 RayMarch(vec3 dir,inout vec3 hitPos,inout float rayHitDepthDifference){dir=normalize(dir);dir*=rayStep;float depth;vec4 projectedCoord;vec4 lastProjectedCoord;float unpackedDepth;float stepMultiplier=1.;vec4 depthTexel;for(int i=0;i<MAX_STEPS;i++){hitPos+=dir*stepMultiplier;projectedCoord=_projectionMatrix*vec4(hitPos,1.0);projectedCoord.xy/=projectedCoord.w;projectedCoord.xy=projectedCoord.xy*0.5+0.5;if(projectedCoord.x>1.||projectedCoord.y>1.){hitPos-=dir*stepMultiplier;stepMultiplier*=0.5;continue;}depthTexel=textureLod(depthTexture,projectedCoord.xy,0.);unpackedDepth=unpackRGBAToDepth(depthTexel);depth=fastGetViewZ(unpackedDepth);rayHitDepthDifference=depth-hitPos.z;if(rayHitDepthDifference>=0.&&rayHitDepthDifference<thickness){\n#if NUM_BINARY_SEARCH_STEPS == 0\nif(dot(depthTexel.rgb,depthTexel.rgb)<FLOAT_EPSILON)return INVALID_RAY_COORDS;\n#else\nreturn BinarySearch(dir,hitPos,rayHitDepthDifference);\n#endif\n}lastProjectedCoord=projectedCoord;}\n#ifndef STRETCH_MISSED_RAYS\nreturn INVALID_RAY_COORDS;\n#endif\nrayHitDepthDifference=unpackedDepth;hitPos.z=1.;return projectedCoord.xy;}vec2 BinarySearch(inout vec3 dir,inout vec3 hitPos,inout float rayHitDepthDifference){float depth;vec4 projectedCoord;vec2 lastMinProjectedCoordXY;float unpackedDepth;vec4 depthTexel;for(int i=0;i<NUM_BINARY_SEARCH_STEPS;i++){projectedCoord=_projectionMatrix*vec4(hitPos,1.0);projectedCoord.xy/=projectedCoord.w;projectedCoord.xy=projectedCoord.xy*0.5+0.5;if((lastMinProjectedCoordXY.x>1.||lastMinProjectedCoordXY.y>1.)&&(projectedCoord.x>1.||projectedCoord.y>1.))return INVALID_RAY_COORDS;depthTexel=textureLod(depthTexture,projectedCoord.xy,0.);unpackedDepth=unpackRGBAToDepth(depthTexel);depth=fastGetViewZ(unpackedDepth);rayHitDepthDifference=depth-hitPos.z;dir*=0.5;if(rayHitDepthDifference>0.0){hitPos-=dir;}else{hitPos+=dir;lastMinProjectedCoordXY=projectedCoord.xy;}}if(dot(depthTexel.rgb,depthTexel.rgb)<FLOAT_EPSILON)return INVALID_RAY_COORDS;if(abs(rayHitDepthDifference)>_maxDepthDifference)return INVALID_RAY_COORDS;projectedCoord=_projectionMatrix*vec4(hitPos,1.0);projectedCoord.xy/=projectedCoord.w;projectedCoord.xy=projectedCoord.xy*0.5+0.5;rayHitDepthDifference=unpackedDepth;return projectedCoord.xy;}float fastGetViewZ(const in float depth){\n#ifdef PERSPECTIVE_CAMERA\nreturn nearMulFar/(farMinusNear*depth-cameraFar);\n#else\nreturn depth*nearMinusFar-cameraNear;\n#endif\n}"; // eslint-disable-line | ||
var fragmentShader = "#define GLSLIFY 1\nvarying vec2 vUv;uniform sampler2D inputTexture;uniform sampler2D accumulatedTexture;uniform sampler2D normalTexture;uniform sampler2D depthTexture;uniform mat4 _projectionMatrix;uniform mat4 _inverseProjectionMatrix;uniform mat4 cameraMatrixWorld;uniform float cameraNear;uniform float cameraFar;uniform float rayDistance;uniform float intensity;uniform float maxDepthDifference;uniform float roughnessFadeOut;uniform float maxRoughness;uniform float rayFadeOut;uniform float thickness;uniform float ior;uniform float samples;uniform float jitter;uniform float jitterRough;uniform float jitterSpread;\n#define FLOAT_EPSILON 0.00001\n#define EARLY_OUT_COLOR vec4(0., 0., 0., 1.)\nconst vec2 INVALID_RAY_COORDS=vec2(-1.);float _maxDepthDifference;float nearMinusFar;float nearMulFar;float farMinusNear;\n#include <packing>\n#include <helperFunctions>\nvec2 BinarySearch(inout vec3 dir,inout vec3 hitPos,inout float rayHitDepthDifference);vec2 RayMarch(vec3 dir,inout vec3 hitPos,inout float rayHitDepthDifference);float fastGetViewZ(const in float depth);void main(){vec4 depthTexel=textureLod(depthTexture,vUv,0.);if(dot(depthTexel.rgb,depthTexel.rgb)<FLOAT_EPSILON){gl_FragColor=EARLY_OUT_COLOR;return;}float unpackedDepth=unpackRGBAToDepth(depthTexel);vec4 normalTexel=textureLod(normalTexture,vUv,0.);float roughness=normalTexel.a;if(roughness>maxRoughness||(roughness>1.-FLOAT_EPSILON&&roughnessFadeOut>1.-FLOAT_EPSILON)){gl_FragColor=EARLY_OUT_COLOR;return;}float specular=1.-roughness;_maxDepthDifference=maxDepthDifference*0.01;nearMinusFar=cameraNear-cameraFar;nearMulFar=cameraNear*cameraFar;farMinusNear=cameraFar-cameraNear;normalTexel.rgb=unpackRGBToNormal(normalTexel.rgb);float depth=fastGetViewZ(unpackedDepth);vec3 viewNormal=normalTexel.xyz;vec3 viewPos=getViewPosition(depth);vec3 worldPos=screenSpaceToWorldSpace(vUv,unpackedDepth);vec3 jitt=vec3(0.);if(jitterSpread!=0.&&(jitterRough!=0.||jitter==0.)){vec3 randomJitter=hash(5.*(samples*worldPos))-0.5;float spread=((2.-specular)+roughness*jitterRough)*jitterSpread;float jitterMix=jitter+jitterRough*roughness;if(jitterMix>1.)jitterMix=1.;jitt=mix(vec3(0.),randomJitter*spread,jitterMix);}viewNormal+=jitt;vec3 reflected=reflect(normalize(viewPos),normalize(viewNormal));vec3 rayDir=reflected*-viewPos.z;vec3 hitPos=viewPos;float rayHitDepthDifference;vec2 coords=RayMarch(rayDir,hitPos,rayHitDepthDifference);if(coords.x==-1.){gl_FragColor=EARLY_OUT_COLOR;return;}vec2 coordsNDC=(coords*2.0-1.0);float screenFade=0.1;float maxDimension=min(1.0,max(abs(coordsNDC.x),abs(coordsNDC.y)));float screenEdgefactor=1.0-(max(0.0,maxDimension-screenFade)/(1.0-screenFade));screenEdgefactor=max(0.,screenEdgefactor);vec4 SSRTexel=textureLod(inputTexture,coords.xy,0.);vec4 SSRTexelReflected=textureLod(accumulatedTexture,coords.xy,0.);vec3 SSR=SSRTexel.rgb+SSRTexelReflected.rgb;float roughnessFactor=mix(specular,1.,max(0.,1.-roughnessFadeOut));vec3 finalSSR=SSR*screenEdgefactor*roughnessFactor;if(rayFadeOut!=0.){vec3 hitWorldPos=screenSpaceToWorldSpace(coords,rayHitDepthDifference);float reflectionDistance=distance(hitWorldPos,worldPos);reflectionDistance+=1.;float opacity=1./(reflectionDistance*rayFadeOut*0.1);if(opacity>1.)opacity=1.;finalSSR*=opacity;}float fresnelFactor=fresnel_dielectric(normalize(viewPos),viewNormal,ior);finalSSR=finalSSR*fresnelFactor*intensity;finalSSR=min(vec3(1.),finalSSR);float alpha=hitPos.z==1. ? SSRTexel.a : SSRTexelReflected.a;gl_FragColor=vec4(finalSSR,alpha);}vec2 RayMarch(vec3 dir,inout vec3 hitPos,inout float rayHitDepthDifference){dir=normalize(dir);dir*=rayDistance/float(MAX_STEPS);float depth;vec4 projectedCoord;vec4 lastProjectedCoord;float unpackedDepth;float stepMultiplier=1.;vec4 depthTexel;for(int i=0;i<MAX_STEPS;i++){hitPos+=dir*stepMultiplier;projectedCoord=_projectionMatrix*vec4(hitPos,1.0);projectedCoord.xy/=projectedCoord.w;projectedCoord.xy=projectedCoord.xy*0.5+0.5;if(projectedCoord.x>1.||projectedCoord.y>1.){hitPos-=dir*stepMultiplier;stepMultiplier*=0.5;}else{depthTexel=textureLod(depthTexture,projectedCoord.xy,0.);unpackedDepth=unpackRGBAToDepth(depthTexel);depth=fastGetViewZ(unpackedDepth);rayHitDepthDifference=depth-hitPos.z;if(rayHitDepthDifference>=0.&&rayHitDepthDifference<thickness){\n#if NUM_BINARY_SEARCH_STEPS == 0\nif(dot(depthTexel.rgb,depthTexel.rgb)<FLOAT_EPSILON)return INVALID_RAY_COORDS;\n#else\nreturn BinarySearch(dir,hitPos,rayHitDepthDifference);\n#endif\n}lastProjectedCoord=projectedCoord;}}\n#ifndef ALLOW_MISSED_RAYS\nreturn INVALID_RAY_COORDS;\n#endif\nrayHitDepthDifference=unpackedDepth;hitPos.z=1.;return projectedCoord.xy;}vec2 BinarySearch(inout vec3 dir,inout vec3 hitPos,inout float rayHitDepthDifference){float depth;vec4 projectedCoord;vec2 lastMinProjectedCoordXY;float unpackedDepth;vec4 depthTexel;for(int i=0;i<NUM_BINARY_SEARCH_STEPS;i++){projectedCoord=_projectionMatrix*vec4(hitPos,1.0);projectedCoord.xy/=projectedCoord.w;projectedCoord.xy=projectedCoord.xy*0.5+0.5;if((lastMinProjectedCoordXY.x>1.||lastMinProjectedCoordXY.y>1.)&&(projectedCoord.x>1.||projectedCoord.y>1.))return INVALID_RAY_COORDS;depthTexel=textureLod(depthTexture,projectedCoord.xy,0.);unpackedDepth=unpackRGBAToDepth(depthTexel);depth=fastGetViewZ(unpackedDepth);rayHitDepthDifference=depth-hitPos.z;dir*=0.5;if(rayHitDepthDifference>0.0){hitPos-=dir;}else{hitPos+=dir;lastMinProjectedCoordXY=projectedCoord.xy;}}if(dot(depthTexel.rgb,depthTexel.rgb)<FLOAT_EPSILON)return INVALID_RAY_COORDS;if(abs(rayHitDepthDifference)>_maxDepthDifference)return INVALID_RAY_COORDS;projectedCoord=_projectionMatrix*vec4(hitPos,1.0);projectedCoord.xy/=projectedCoord.w;projectedCoord.xy=projectedCoord.xy*0.5+0.5;rayHitDepthDifference=unpackedDepth;return projectedCoord.xy;}float fastGetViewZ(const in float depth){\n#ifdef PERSPECTIVE_CAMERA\nreturn nearMulFar/(farMinusNear*depth-cameraFar);\n#else\nreturn depth*nearMinusFar-cameraNear;\n#endif\n}"; // eslint-disable-line | ||
var vertexShader = "#define GLSLIFY 1\nvarying vec2 vUv;void main(){vUv=position.xy*0.5+0.5;gl_Position=vec4(position.xy,1.0,1.0);}"; // eslint-disable-line | ||
class ReflectionsMaterial extends ShaderMaterial { | ||
@@ -507,14 +182,13 @@ constructor() { | ||
cameraFar: new Uniform(0), | ||
rayStep: new Uniform(0.1), | ||
intensity: new Uniform(1), | ||
roughnessFadeOut: new Uniform(1), | ||
rayDistance: new Uniform(0), | ||
intensity: new Uniform(0), | ||
roughnessFadeOut: new Uniform(0), | ||
rayFadeOut: new Uniform(0), | ||
thickness: new Uniform(10), | ||
ior: new Uniform(1.45), | ||
maxDepthDifference: new Uniform(1), | ||
maxDepth: new Uniform(1), | ||
jitter: new Uniform(0.5), | ||
jitterRough: new Uniform(0.5), | ||
jitterSpread: new Uniform(1), | ||
maxRoughness: new Uniform(1), | ||
thickness: new Uniform(0), | ||
ior: new Uniform(0), | ||
maxDepthDifference: new Uniform(0), | ||
jitter: new Uniform(0), | ||
jitterRough: new Uniform(0), | ||
jitterSpread: new Uniform(0), | ||
maxRoughness: new Uniform(0), | ||
samples: new Uniform(0) | ||
@@ -536,2 +210,18 @@ }, | ||
const getVisibleChildren = object => { | ||
const queue = [object]; | ||
const objects = []; | ||
while (queue.length !== 0) { | ||
const mesh = queue.shift(); | ||
if (mesh.material) objects.push(mesh); | ||
for (const c of mesh.children) { | ||
if (c.visible) queue.push(c); | ||
} | ||
} | ||
return objects; | ||
}; | ||
const isWebGL2Available = () => { | ||
@@ -546,45 +236,11 @@ try { | ||
var _ssrEffect = /*#__PURE__*/_classPrivateFieldLooseKey("ssrEffect"); | ||
var _cachedMaterials = /*#__PURE__*/_classPrivateFieldLooseKey("cachedMaterials"); | ||
var _USE_MRT = /*#__PURE__*/_classPrivateFieldLooseKey("USE_MRT"); | ||
var _webgl1DepthPass = /*#__PURE__*/_classPrivateFieldLooseKey("webgl1DepthPass"); | ||
var _keepMaterialMapUpdated = /*#__PURE__*/_classPrivateFieldLooseKey("keepMaterialMapUpdated"); | ||
var _setMRTMaterialInScene = /*#__PURE__*/_classPrivateFieldLooseKey("setMRTMaterialInScene"); | ||
var _unsetMRTMaterialInScene = /*#__PURE__*/_classPrivateFieldLooseKey("unsetMRTMaterialInScene"); | ||
class ReflectionsPass extends Pass { | ||
constructor(ssrEffect, options = {}) { | ||
super("ReflectionsPass"); | ||
Object.defineProperty(this, _unsetMRTMaterialInScene, { | ||
value: _unsetMRTMaterialInScene2 | ||
}); | ||
Object.defineProperty(this, _setMRTMaterialInScene, { | ||
value: _setMRTMaterialInScene2 | ||
}); | ||
Object.defineProperty(this, _keepMaterialMapUpdated, { | ||
value: _keepMaterialMapUpdated2 | ||
}); | ||
Object.defineProperty(this, _ssrEffect, { | ||
writable: true, | ||
value: void 0 | ||
}); | ||
Object.defineProperty(this, _cachedMaterials, { | ||
writable: true, | ||
value: new WeakMap() | ||
}); | ||
Object.defineProperty(this, _USE_MRT, { | ||
writable: true, | ||
value: false | ||
}); | ||
Object.defineProperty(this, _webgl1DepthPass, { | ||
writable: true, | ||
value: null | ||
}); | ||
_classPrivateFieldLooseBase(this, _ssrEffect)[_ssrEffect] = ssrEffect; | ||
this.ssrEffect = void 0; | ||
this.cachedMaterials = new WeakMap(); | ||
this.USE_MRT = false; | ||
this.webgl1DepthPass = null; | ||
this.visibleMeshes = []; | ||
this.ssrEffect = ssrEffect; | ||
this._scene = ssrEffect._scene; | ||
@@ -602,6 +258,6 @@ this._camera = ssrEffect._camera; | ||
this.renderPass = new RenderPass(this._scene, this._camera); | ||
_classPrivateFieldLooseBase(this, _USE_MRT)[_USE_MRT] = options.USE_MRT && isWebGL2Available(); | ||
this.USE_MRT = options.USE_MRT && isWebGL2Available(); | ||
if (_classPrivateFieldLooseBase(this, _USE_MRT)[_USE_MRT]) { | ||
// buffers: normal, depth, velocity (3), roughness will be written to the alpha channel of the normal buffer | ||
if (this.USE_MRT) { | ||
// buffers: normal, depth (2), roughness will be written to the alpha channel of the normal buffer | ||
this.gBuffersRenderTarget = new WebGLMultipleRenderTargets(width, height, 2, { | ||
@@ -615,11 +271,9 @@ minFilter: LinearFilter, | ||
// depth pass | ||
_classPrivateFieldLooseBase(this, _webgl1DepthPass)[_webgl1DepthPass] = new DepthPass(this._scene, this._camera); | ||
_classPrivateFieldLooseBase(this, _webgl1DepthPass)[_webgl1DepthPass].renderTarget.minFilter = LinearFilter; | ||
_classPrivateFieldLooseBase(this, _webgl1DepthPass)[_webgl1DepthPass].renderTarget.magFilter = LinearFilter; | ||
_classPrivateFieldLooseBase(this, _webgl1DepthPass)[_webgl1DepthPass].renderTarget.texture.minFilter = LinearFilter; | ||
_classPrivateFieldLooseBase(this, _webgl1DepthPass)[_webgl1DepthPass].renderTarget.texture.magFilter = LinearFilter; | ||
this.webgl1DepthPass = new DepthPass(this._scene, this._camera); | ||
this.webgl1DepthPass.renderTarget.minFilter = LinearFilter; | ||
this.webgl1DepthPass.renderTarget.magFilter = LinearFilter; | ||
this.webgl1DepthPass.renderTarget.texture.minFilter = LinearFilter; | ||
this.webgl1DepthPass.renderTarget.texture.magFilter = LinearFilter; | ||
this.webgl1DepthPass.setSize(typeof window !== "undefined" ? window.innerWidth : 2000, typeof window !== "undefined" ? window.innerHeight : 1000); // render normals (in the rgb channel) and roughness (in the alpha channel) in gBuffersRenderTarget | ||
_classPrivateFieldLooseBase(this, _webgl1DepthPass)[_webgl1DepthPass].setSize(typeof window !== "undefined" ? window.innerWidth : 2000, typeof window !== "undefined" ? window.innerHeight : 1000); // render normals (in the rgb channel) and roughness (in the alpha channel) in gBuffersRenderTarget | ||
this.gBuffersRenderTarget = new WebGLRenderTarget(width, height, { | ||
@@ -630,3 +284,3 @@ minFilter: LinearFilter, | ||
this.normalTexture = this.gBuffersRenderTarget.texture; | ||
this.depthTexture = _classPrivateFieldLooseBase(this, _webgl1DepthPass)[_webgl1DepthPass].texture; | ||
this.depthTexture = this.webgl1DepthPass.texture; | ||
} // set up uniforms | ||
@@ -637,3 +291,3 @@ | ||
this.fullscreenMaterial.uniforms.depthTexture.value = this.depthTexture; | ||
this.fullscreenMaterial.uniforms.accumulatedTexture.value = _classPrivateFieldLooseBase(this, _ssrEffect)[_ssrEffect].temporalResolvePass.accumulatedTexture; | ||
this.fullscreenMaterial.uniforms.accumulatedTexture.value = this.ssrEffect.temporalResolvePass.accumulatedTexture; | ||
this.fullscreenMaterial.uniforms.cameraMatrixWorld.value = this._camera.matrixWorld; | ||
@@ -645,5 +299,5 @@ this.fullscreenMaterial.uniforms._projectionMatrix.value = this._camera.projectionMatrix; | ||
setSize(width, height) { | ||
this.renderTarget.setSize(width * _classPrivateFieldLooseBase(this, _ssrEffect)[_ssrEffect].resolutionScale, height * _classPrivateFieldLooseBase(this, _ssrEffect)[_ssrEffect].resolutionScale); | ||
this.gBuffersRenderTarget.setSize(width * _classPrivateFieldLooseBase(this, _ssrEffect)[_ssrEffect].resolutionScale, height * _classPrivateFieldLooseBase(this, _ssrEffect)[_ssrEffect].resolutionScale); | ||
this.fullscreenMaterial.uniforms.accumulatedTexture.value = _classPrivateFieldLooseBase(this, _ssrEffect)[_ssrEffect].temporalResolvePass.accumulatedTexture; | ||
this.renderTarget.setSize(width * this.ssrEffect.resolutionScale, height * this.ssrEffect.resolutionScale); | ||
this.gBuffersRenderTarget.setSize(width * this.ssrEffect.resolutionScale, height * this.ssrEffect.resolutionScale); | ||
this.fullscreenMaterial.uniforms.accumulatedTexture.value = this.ssrEffect.temporalResolvePass.accumulatedTexture; | ||
this.fullscreenMaterial.needsUpdate = true; | ||
@@ -656,3 +310,3 @@ } | ||
this.renderPass.dispose(); | ||
if (!_classPrivateFieldLooseBase(this, _USE_MRT)[_USE_MRT]) _classPrivateFieldLooseBase(this, _webgl1DepthPass)[_webgl1DepthPass].dispose(); | ||
if (!this.USE_MRT) this.webgl1DepthPass.dispose(); | ||
this.fullscreenMaterial.dispose(); | ||
@@ -664,103 +318,136 @@ this.normalTexture = null; | ||
render(renderer, inputBuffer) { | ||
_classPrivateFieldLooseBase(this, _setMRTMaterialInScene)[_setMRTMaterialInScene](); | ||
keepMaterialMapUpdated(mrtMaterial, originalMaterial, prop, define) { | ||
if (this.ssrEffect[define]) { | ||
if (originalMaterial[prop] !== mrtMaterial[prop]) { | ||
mrtMaterial[prop] = originalMaterial[prop]; | ||
mrtMaterial.uniforms[prop].value = originalMaterial[prop]; | ||
renderer.setRenderTarget(this.gBuffersRenderTarget); | ||
this.renderPass.render(renderer, this.gBuffersRenderTarget); | ||
if (originalMaterial[prop]) { | ||
mrtMaterial.defines[define] = ""; | ||
} else { | ||
delete mrtMaterial.defines[define]; | ||
} | ||
_classPrivateFieldLooseBase(this, _unsetMRTMaterialInScene)[_unsetMRTMaterialInScene](); // render depth and velocity in seperate passes | ||
mrtMaterial.needsUpdate = true; | ||
} | ||
} else if (mrtMaterial[prop] !== undefined) { | ||
mrtMaterial[prop] = undefined; | ||
mrtMaterial.uniforms[prop].value = undefined; | ||
delete mrtMaterial.defines[define]; | ||
mrtMaterial.needsUpdate = true; | ||
} | ||
} | ||
setMRTMaterialInScene() { | ||
this.visibleMeshes = getVisibleChildren(this._scene); | ||
if (!_classPrivateFieldLooseBase(this, _USE_MRT)[_USE_MRT]) _classPrivateFieldLooseBase(this, _webgl1DepthPass)[_webgl1DepthPass].renderPass.render(renderer, _classPrivateFieldLooseBase(this, _webgl1DepthPass)[_webgl1DepthPass].renderTarget); | ||
this.fullscreenMaterial.uniforms.inputTexture.value = inputBuffer.texture; | ||
this.fullscreenMaterial.uniforms.samples.value = _classPrivateFieldLooseBase(this, _ssrEffect)[_ssrEffect].samples; | ||
this.fullscreenMaterial.uniforms.cameraNear.value = this._camera.near; | ||
this.fullscreenMaterial.uniforms.cameraFar.value = this._camera.far; | ||
renderer.setRenderTarget(this.renderTarget); | ||
renderer.render(this.scene, this.camera); | ||
} | ||
for (const c of this.visibleMeshes) { | ||
if (c.material) { | ||
const originalMaterial = c.material; | ||
let [cachedOriginalMaterial, mrtMaterial] = this.cachedMaterials.get(c) || []; | ||
} | ||
if (originalMaterial !== cachedOriginalMaterial) { | ||
if (mrtMaterial) mrtMaterial.dispose(); | ||
mrtMaterial = new MRTMaterial(); | ||
if (this.USE_MRT) mrtMaterial.defines.USE_MRT = ""; | ||
mrtMaterial.normalScale = originalMaterial.normalScale; | ||
mrtMaterial.uniforms.normalScale.value = originalMaterial.normalScale; | ||
const map = originalMaterial.map || originalMaterial.normalMap || originalMaterial.roughnessMap || originalMaterial.metalnessMap; | ||
if (map) mrtMaterial.uniforms.uvTransform.value = map.matrix; | ||
this.cachedMaterials.set(c, [originalMaterial, mrtMaterial]); | ||
} // update the child's MRT material | ||
function _keepMaterialMapUpdated2(mrtMaterial, originalMaterial, prop, define) { | ||
if (_classPrivateFieldLooseBase(this, _ssrEffect)[_ssrEffect][define]) { | ||
if (originalMaterial[prop] !== mrtMaterial[prop]) { | ||
mrtMaterial[prop] = originalMaterial[prop]; | ||
mrtMaterial.uniforms[prop].value = originalMaterial[prop]; | ||
if (originalMaterial[prop]) { | ||
mrtMaterial.defines[define] = ""; | ||
} else { | ||
delete mrtMaterial.defines[define]; | ||
this.keepMaterialMapUpdated(mrtMaterial, originalMaterial, "normalMap", "USE_NORMALMAP"); | ||
this.keepMaterialMapUpdated(mrtMaterial, originalMaterial, "roughnessMap", "USE_ROUGHNESSMAP"); | ||
mrtMaterial.uniforms.roughness.value = this.ssrEffect.selection.size === 0 || this.ssrEffect.selection.has(c) ? originalMaterial.roughness || 0 : 10e10; | ||
c.material = mrtMaterial; | ||
} | ||
mrtMaterial.needsUpdate = true; | ||
} | ||
} else if (mrtMaterial[prop] !== undefined) { | ||
mrtMaterial[prop] = undefined; | ||
mrtMaterial.uniforms[prop].value = undefined; | ||
delete mrtMaterial.defines[define]; | ||
mrtMaterial.needsUpdate = true; | ||
} | ||
} | ||
function _setMRTMaterialInScene2() { | ||
this._scene.traverse(c => { | ||
if (c.material) { | ||
const originalMaterial = c.material; | ||
let [cachedOriginalMaterial, mrtMaterial] = _classPrivateFieldLooseBase(this, _cachedMaterials)[_cachedMaterials].get(c) || []; | ||
unsetMRTMaterialInScene() { | ||
for (const c of this.visibleMeshes) { | ||
var _c$material; | ||
if (!_classPrivateFieldLooseBase(this, _cachedMaterials)[_cachedMaterials].has(c) || originalMaterial !== cachedOriginalMaterial) { | ||
if (mrtMaterial) mrtMaterial.dispose(); | ||
mrtMaterial = new MRTMaterial(); | ||
if (_classPrivateFieldLooseBase(this, _USE_MRT)[_USE_MRT]) mrtMaterial.defines.USE_MRT = ""; | ||
mrtMaterial.normalScale = originalMaterial.normalScale; | ||
mrtMaterial.uniforms.normalScale.value = originalMaterial.normalScale; | ||
const map = originalMaterial.map || originalMaterial.normalMap || originalMaterial.roughnessMap || originalMaterial.metalnessMap; | ||
if (map) mrtMaterial.uniforms.uvTransform.value = map.matrix; | ||
if (((_c$material = c.material) == null ? void 0 : _c$material.type) === "MRTMaterial") { | ||
c.visible = true; // set material back to the original one | ||
_classPrivateFieldLooseBase(this, _cachedMaterials)[_cachedMaterials].set(c, [originalMaterial, mrtMaterial]); | ||
} // update the child's MRT material | ||
const [originalMaterial] = this.cachedMaterials.get(c); | ||
c.material = originalMaterial; | ||
} | ||
} | ||
} | ||
render(renderer, inputBuffer) { | ||
this.setMRTMaterialInScene(); | ||
renderer.setRenderTarget(this.gBuffersRenderTarget); | ||
this.renderPass.render(renderer, this.gBuffersRenderTarget); | ||
this.unsetMRTMaterialInScene(); // render depth and velocity in seperate passes | ||
_classPrivateFieldLooseBase(this, _keepMaterialMapUpdated)[_keepMaterialMapUpdated](mrtMaterial, originalMaterial, "normalMap", "USE_NORMALMAP"); | ||
if (!this.USE_MRT) this.webgl1DepthPass.renderPass.render(renderer, this.webgl1DepthPass.renderTarget); | ||
this.fullscreenMaterial.uniforms.inputTexture.value = inputBuffer.texture; | ||
this.fullscreenMaterial.uniforms.samples.value = this.ssrEffect.samples; | ||
this.fullscreenMaterial.uniforms.cameraNear.value = this._camera.near; | ||
this.fullscreenMaterial.uniforms.cameraFar.value = this._camera.far; | ||
renderer.setRenderTarget(this.renderTarget); | ||
renderer.render(this.scene, this.camera); | ||
} | ||
_classPrivateFieldLooseBase(this, _keepMaterialMapUpdated)[_keepMaterialMapUpdated](mrtMaterial, originalMaterial, "roughnessMap", "USE_ROUGHNESSMAP"); | ||
mrtMaterial.uniforms.roughness.value = _classPrivateFieldLooseBase(this, _ssrEffect)[_ssrEffect].selection.size === 0 || _classPrivateFieldLooseBase(this, _ssrEffect)[_ssrEffect].selection.has(c) ? originalMaterial.roughness || 0 : 10e10; | ||
c.material = mrtMaterial; | ||
} | ||
}); | ||
} | ||
function _unsetMRTMaterialInScene2() { | ||
this._scene.traverse(c => { | ||
if (c.material?.type === "MRTMaterial") { | ||
// set material back to the original one | ||
const [originalMaterial] = _classPrivateFieldLooseBase(this, _cachedMaterials)[_cachedMaterials].get(c); | ||
/** | ||
* Options of the SSR effect | ||
* @typedef {Object} SSROptions | ||
* @property {boolean} [temporalResolve=true] whether you want to use Temporal Resolving to re-use reflections from the last frames; this will reduce noise tremendously but may result in "smearing" | ||
* @property {Number} [temporalResolveMix] a value between 0 and 1 to set how much the last frame's reflections should be blended in; higher values will result in less noisy reflections when moving the camera but a more smeary look | ||
* @property {Number} [resolutionScale] resolution of the SSR effect, a resolution of 0.5 means the effect will be rendered at half resolution | ||
* @property {Number} [velocityResolutionScale] resolution of the velocity buffer, a resolution of 0.5 means velocity will be rendered at half resolution | ||
* @property {Number} [width] width of the SSREffect | ||
* @property {Number} [height] height of the SSREffect | ||
* @property {Number} [blurMix] how much the blurred reflections should be mixed with the raw reflections | ||
* @property {Number} [blurSharpness] exponent of the Box Blur filter; higher values will result in more sharpness | ||
* @property {Number} [blurKernelSize] kernel size of the Box Blur Filter; higher kernel sizes will result in blurrier reflections with more artifacts | ||
* @property {Number} [rayDistance] maximum distance a reflection ray can travel to find what it reflects | ||
* @property {Number} [intensity] intensity of the reflections | ||
* @property {Number} [colorExponent] exponent by which reflections will be potentiated when composing the current frame's reflections and the accumulated reflections into a final reflection; higher values will make reflections clearer by highlighting darker spots less | ||
* @property {Number} [maxRoughness] maximum roughness a texel can have to have reflections calculated for it | ||
* @property {Number} [jitter] how intense jittering should be | ||
* @property {Number} [jitterSpread] how much the jittered rays should be spread; higher values will give a rougher look regarding the reflections but are more expensive to compute with | ||
* @property {Number} [jitterRough] how intense jittering should be in relation to a material's roughness | ||
* @property {Number} [roughnessFadeOut] how intense reflections should be on rough spots; a higher value will make reflections fade out quicker on rough spots | ||
* @property {Number} [rayFadeOut] how much reflections will fade out by distance | ||
* @property {Number} [MAX_STEPS] number of steps a reflection ray can maximally do to find an object it intersected (and thus reflects) | ||
* @property {Number} [NUM_BINARY_SEARCH_STEPS] once we had our ray intersect something, we need to find the exact point in space it intersected and thus it reflects; this can be done through binary search with the given number of maximum steps | ||
* @property {Number} [maxDepthDifference] maximum depth difference between a ray and the particular depth at its screen position after refining with binary search; higher values will result in better performance | ||
* @property {Number} [thickness] maximum depth difference between a ray and the particular depth at its screen position before refining with binary search; higher values will result in better performance | ||
* @property {Number} [ior] Index of Refraction, used for calculating fresnel; reflections tend to be more intense the steeper the angle between them and the viewer is, the ior parameter sets how much the intensity varies | ||
* @property {boolean} [CLAMP_RADIUS] how many surrounding pixels will be used for neighborhood clamping; a higher value can reduce noise when moving the camera but will result in less performance | ||
* @property {boolean} [ALLOW_MISSED_RAYS] if there should still be reflections for rays for which a reflecting point couldn't be found; enabling this will result in stretched looking reflections which can look good or bad depending on the angle | ||
* @property {boolean} [USE_MRT] WebGL2 only - whether to use multiple render targets when rendering the G-buffers (normals, depth and roughness); using them can improve performance as they will render all information to multiple buffers for each fragment in one run; this setting can't be changed during run-time | ||
* @property {boolean} [USE_NORMALMAP] if roughness maps should be taken account of when calculating reflections | ||
* @property {boolean} [USE_ROUGHNESSMAP] if normal maps should be taken account of when calculating reflections | ||
*/ | ||
c.material = originalMaterial; | ||
} | ||
}); | ||
} | ||
const finalFragmentShader = finalSSRShader.replace("#include <helperFunctions>", helperFunctions).replace("#include <bilateralBlur>", bilateralBlur); | ||
/** | ||
* The options of the SSR effect | ||
* @type {SSROptions} | ||
*/ | ||
const defaultSSROptions = { | ||
temporalResolve: true, | ||
temporalResolveMix: 0.9, | ||
temporalResolveCorrectionMix: 1, | ||
maxSamples: 256, | ||
temporalResolveCorrection: 1, | ||
resolutionScale: 1, | ||
width: typeof window !== "undefined" ? window.innerWidth : 2000, | ||
velocityResolutionScale: 1, | ||
width: typeof window !== "undefined" ? window.innerWidth : 1000, | ||
height: typeof window !== "undefined" ? window.innerHeight : 1000, | ||
ENABLE_BLUR: false, | ||
blurMix: 0.5, | ||
blurKernelSize: 8, | ||
blurSharpness: 0.5, | ||
rayStep: 0.1, | ||
blurSharpness: 10, | ||
blurKernelSize: 1, | ||
rayDistance: 10, | ||
intensity: 1, | ||
maxRoughness: 0.1, | ||
ENABLE_JITTERING: false, | ||
jitter: 0.1, | ||
jitterSpread: 0.1, | ||
colorExponent: 1, | ||
maxRoughness: 1, | ||
jitter: 0, | ||
jitterSpread: 0, | ||
jitterRough: 0, | ||
@@ -772,113 +459,384 @@ roughnessFadeOut: 1, | ||
maxDepthDifference: 10, | ||
maxDepth: 1, | ||
thickness: 10, | ||
ior: 1.45, | ||
STRETCH_MISSED_RAYS: true, | ||
CLAMP_RADIUS: 1, | ||
ALLOW_MISSED_RAYS: true, | ||
USE_MRT: true, | ||
USE_NORMALMAP: true, | ||
USE_ROUGHNESSMAP: true | ||
}; // all the properties for which we don't have to resample | ||
}; | ||
const noResetSamplesProperties = ["ENABLE_BLUR", "blurSharpness", "blurKernelSize", "blurMix"]; | ||
var temporalResolve = "#define GLSLIFY 1\nuniform sampler2D inputTexture;uniform sampler2D accumulatedTexture;uniform sampler2D velocityTexture;uniform sampler2D lastVelocityTexture;uniform float temporalResolveCorrection;uniform vec2 invTexSize;uniform float colorExponent;varying vec2 vUv;\n#include <packing>\n#ifdef DILATION\nvec4 getDilatedTexture(sampler2D tex,vec2 uv,vec2 invTexSize){float closestDepth=0.;vec2 closestNeighborUv;vec2 neighborUv;float neighborDepth;for(int x=-1;x<=1;x++){for(int y=-1;y<=1;y++){neighborUv=vUv+vec2(x,y)*invTexSize;neighborDepth=textureLod(tex,neighborUv,0.).b;if(neighborDepth>closestDepth){closestNeighborUv=neighborUv;closestDepth=neighborDepth;}}}return textureLod(tex,closestNeighborUv,0.);}\n#endif\nvec3 transformColorExponent;vec3 undoColorTransformExponent;vec3 transformColor(vec3 color){if(colorExponent==1.)return color;return pow(color,transformColorExponent);}vec3 undoColorTransform(vec3 color){if(colorExponent==1.)return color;return pow(color,undoColorTransformExponent);}void main(){transformColorExponent=vec3(1./colorExponent);undoColorTransformExponent=vec3(colorExponent);vec4 inputTexel=textureLod(inputTexture,vUv,0.);vec3 inputColor=transformColor(inputTexel.rgb);vec3 accumulatedColor;vec3 outputColor;vec4 velocity;vec2 lastVelUv;\n#ifdef DILATION\nvelocity=getDilatedTexture(velocityTexture,vUv,invTexSize);\n#else\nvelocity=textureLod(velocityTexture,vUv,0.);\n#endif\nvec2 velUv=velocity.xy;vec2 reprojectedUv=vUv-velUv;float velocityLength=length(lastVelUv-velUv);\n#ifdef DILATION\nlastVelUv=getDilatedTexture(lastVelocityTexture,reprojectedUv,invTexSize).xy;\n#else\nlastVelUv=textureLod(lastVelocityTexture,reprojectedUv,0.).xy;\n#endif\nfloat velocityDisocclusion=(velocityLength-0.000005)*10.;velocityDisocclusion*=velocityDisocclusion;bool canReproject=reprojectedUv.x>=0.&&reprojectedUv.x<=1.&&reprojectedUv.y>=0.&&reprojectedUv.y<=1.;float movement=length(velUv)*100.;bool isMoving=velocityDisocclusion>0.001||movement>0.001;float alpha=inputTexel.a;if(isMoving){vec3 minNeighborColor=inputColor;vec3 maxNeighborColor=inputColor;vec2 neighborUv;vec3 col;vec3 boxBlurredColor;for(int x=-CLAMP_RADIUS;x<=CLAMP_RADIUS;x++){for(int y=-CLAMP_RADIUS;y<=CLAMP_RADIUS;y++){if(x!=0||y!=0){neighborUv=vUv+vec2(x,y)*invTexSize;col=textureLod(inputTexture,neighborUv,0.).xyz;col=transformColor(col);if(canReproject){minNeighborColor=min(col,minNeighborColor);maxNeighborColor=max(col,maxNeighborColor);}else{boxBlurredColor+=col;}}}}if(canReproject){vec4 accumulatedTexel=textureLod(accumulatedTexture,reprojectedUv,0.);accumulatedColor=transformColor(accumulatedTexel.rgb);vec3 clampedColor=clamp(accumulatedColor,minNeighborColor,maxNeighborColor);float mixFactor=temporalResolveCorrection*(1.+movement);mixFactor=min(mixFactor,1.);accumulatedColor=mix(accumulatedColor,clampedColor,mixFactor);}else{float pxRadius=pow(float(CLAMP_RADIUS*2+1),2.);accumulatedColor=boxBlurredColor/pxRadius;}}else{accumulatedColor=transformColor(textureLod(accumulatedTexture,vUv,0.).rgb);}if(velocity.r>1.-FLOAT_EPSILON&&velocity.g>1.-FLOAT_EPSILON){alpha=0.;velocityDisocclusion=10.0e10;movement=10.0e10;}\n#include <custom_compose_shader>\ngl_FragColor=vec4(undoColorTransform(outputColor),1.);}"; // eslint-disable-line | ||
var _lastSize = /*#__PURE__*/_classPrivateFieldLooseKey("lastSize"); | ||
// this shader is from: https://github.com/gkjohnson/threejs-sandbox | ||
// a second set of bone information from the previou frame | ||
var _lastCameraTransform = /*#__PURE__*/_classPrivateFieldLooseKey("lastCameraTransform"); | ||
const prev_skinning_pars_vertex = | ||
/* glsl */ | ||
` | ||
#ifdef USE_SKINNING | ||
#ifdef BONE_TEXTURE | ||
uniform sampler2D prevBoneTexture; | ||
mat4 getPrevBoneMatrix( const in float i ) { | ||
float j = i * 4.0; | ||
float x = mod( j, float( boneTextureSize ) ); | ||
float y = floor( j / float( boneTextureSize ) ); | ||
float dx = 1.0 / float( boneTextureSize ); | ||
float dy = 1.0 / float( boneTextureSize ); | ||
y = dy * ( y + 0.5 ); | ||
vec4 v1 = texture2D( prevBoneTexture, vec2( dx * ( x + 0.5 ), y ) ); | ||
vec4 v2 = texture2D( prevBoneTexture, vec2( dx * ( x + 1.5 ), y ) ); | ||
vec4 v3 = texture2D( prevBoneTexture, vec2( dx * ( x + 2.5 ), y ) ); | ||
vec4 v4 = texture2D( prevBoneTexture, vec2( dx * ( x + 3.5 ), y ) ); | ||
mat4 bone = mat4( v1, v2, v3, v4 ); | ||
return bone; | ||
} | ||
#else | ||
uniform mat4 prevBoneMatrices[ MAX_BONES ]; | ||
mat4 getPrevBoneMatrix( const in float i ) { | ||
mat4 bone = prevBoneMatrices[ int(i) ]; | ||
return bone; | ||
} | ||
#endif | ||
#endif | ||
`; // Returns the body of the vertex shader for the velocity buffer and | ||
// outputs the position of the current and last frame positions | ||
var _makeOptionsReactive = /*#__PURE__*/_classPrivateFieldLooseKey("makeOptionsReactive"); | ||
const velocity_vertex = | ||
/* glsl */ | ||
` | ||
vec3 transformed; | ||
class SSREffect extends Effect { | ||
constructor(scene, camera, _options = defaultSSROptions) { | ||
super("SSREffect", finalFragmentShader, { | ||
type: "FinalSSRMaterial", | ||
uniforms: new Map([["inputTexture", new Uniform(null)], ["reflectionsTexture", new Uniform(null)], ["depthTexture", new Uniform(null)], ["samples", new Uniform(0)], ["blurMix", new Uniform(0)], ["g_Sharpness", new Uniform(0)], ["g_InvResolutionDirection", new Uniform(new Vector2())], ["kernelRadius", new Uniform(0)]]), | ||
defines: new Map([["RENDER_MODE", "0"]]) | ||
// Get the normal | ||
${ShaderChunk.skinbase_vertex} | ||
${ShaderChunk.beginnormal_vertex} | ||
${ShaderChunk.skinnormal_vertex} | ||
${ShaderChunk.defaultnormal_vertex} | ||
// Get the current vertex position | ||
transformed = vec3( position ); | ||
${ShaderChunk.skinning_vertex} | ||
newPosition = velocityMatrix * vec4( transformed, 1.0 ); | ||
// Get the previous vertex position | ||
transformed = vec3( position ); | ||
${ShaderChunk.skinbase_vertex.replace(/mat4 /g, "").replace(/getBoneMatrix/g, "getPrevBoneMatrix")} | ||
${ShaderChunk.skinning_vertex.replace(/vec4 /g, "")} | ||
prevPosition = prevVelocityMatrix * vec4( transformed, 1.0 ); | ||
gl_Position = newPosition; | ||
`; | ||
class MeshVelocityMaterial extends ShaderMaterial { | ||
constructor() { | ||
super({ | ||
uniforms: { | ||
prevVelocityMatrix: { | ||
value: new Matrix4() | ||
}, | ||
velocityMatrix: { | ||
value: new Matrix4() | ||
}, | ||
prevBoneTexture: { | ||
value: null | ||
}, | ||
interpolateGeometry: { | ||
value: 0 | ||
}, | ||
intensity: { | ||
value: 1 | ||
}, | ||
boneTexture: { | ||
value: null | ||
}, | ||
alphaTest: { | ||
value: 0.0 | ||
}, | ||
map: { | ||
value: null | ||
}, | ||
alphaMap: { | ||
value: null | ||
}, | ||
opacity: { | ||
value: 1.0 | ||
} | ||
}, | ||
vertexShader: | ||
/* glsl */ | ||
` | ||
#define MAX_BONES 1024 | ||
${ShaderChunk.skinning_pars_vertex} | ||
${prev_skinning_pars_vertex} | ||
uniform mat4 velocityMatrix; | ||
uniform mat4 prevVelocityMatrix; | ||
uniform float interpolateGeometry; | ||
varying vec4 prevPosition; | ||
varying vec4 newPosition; | ||
void main() { | ||
${velocity_vertex} | ||
}`, | ||
fragmentShader: | ||
/* glsl */ | ||
` | ||
uniform float intensity; | ||
varying vec4 prevPosition; | ||
varying vec4 newPosition; | ||
void main() { | ||
#ifdef FULL_MOVEMENT | ||
gl_FragColor = vec4( 1., 1., 1. - gl_FragCoord.z, 0. ); | ||
return; | ||
#endif | ||
vec2 pos0 = (prevPosition.xy / prevPosition.w) * 0.5 + 0.5; | ||
vec2 pos1 = (newPosition.xy / newPosition.w) * 0.5 + 0.5; | ||
vec2 vel = pos1 - pos0; | ||
gl_FragColor = vec4( vel, 1. - gl_FragCoord.z, 0. ); | ||
}` | ||
}); | ||
Object.defineProperty(this, _makeOptionsReactive, { | ||
value: _makeOptionsReactive2 | ||
}); | ||
this.samples = 0; | ||
this.selection = new Selection(); | ||
Object.defineProperty(this, _lastSize, { | ||
writable: true, | ||
value: void 0 | ||
}); | ||
Object.defineProperty(this, _lastCameraTransform, { | ||
writable: true, | ||
value: { | ||
position: new Vector3(), | ||
quaternion: new Quaternion() | ||
} | ||
}); | ||
this.isMeshVelocityMaterial = true; | ||
} | ||
} | ||
const backgroundColor = new Color(0); | ||
const updateProperties = ["visible", "wireframe", "side"]; | ||
class VelocityPass extends Pass { | ||
constructor(scene, camera) { | ||
var _window, _window2; | ||
super("VelocityPass"); | ||
this.cachedMaterials = new WeakMap(); | ||
this.lastCameraTransform = { | ||
position: new Vector3(), | ||
quaternion: new Quaternion() | ||
}; | ||
this.visibleMeshes = []; | ||
this.renderedMeshesThisFrame = 0; | ||
this.renderedMeshesLastFrame = 0; | ||
this._scene = scene; | ||
this._camera = camera; | ||
_options = _extends({}, defaultSSROptions, _options); // set up passes | ||
// temporal resolve pass | ||
this.renderTarget = new WebGLRenderTarget(((_window = window) == null ? void 0 : _window.innerWidth) || 1000, ((_window2 = window) == null ? void 0 : _window2.innerHeight) || 1000, { | ||
minFilter: LinearFilter, | ||
magFilter: LinearFilter, | ||
type: HalfFloatType | ||
}); | ||
} | ||
this.temporalResolvePass = new TemporalResolvePass(scene, camera, "", _options); | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.samples = new Uniform(0); | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.maxSamples = new Uniform(0); | ||
this.temporalResolvePass.fullscreenMaterial.defines.EULER = 2.718281828459045; | ||
this.temporalResolvePass.fullscreenMaterial.defines.FLOAT_EPSILON = 0.00001; | ||
this.uniforms.get("reflectionsTexture").value = this.temporalResolvePass.renderTarget.texture; // reflections pass | ||
setVelocityMaterialInScene() { | ||
this.renderedMeshesThisFrame = 0; | ||
this.visibleMeshes = getVisibleChildren(this._scene); | ||
this.reflectionsPass = new ReflectionsPass(this, _options); | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.inputTexture.value = this.reflectionsPass.renderTarget.texture; | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.depthTexture.value = this.reflectionsPass.depthTexture; | ||
_classPrivateFieldLooseBase(this, _lastSize)[_lastSize] = { | ||
width: _options.width, | ||
height: _options.height, | ||
resolutionScale: _options.resolutionScale | ||
}; | ||
for (const c of this.visibleMeshes) { | ||
var _c$skeleton2; | ||
_classPrivateFieldLooseBase(this, _lastCameraTransform)[_lastCameraTransform].position.copy(camera.position); | ||
const originalMaterial = c.material; | ||
let [cachedOriginalMaterial, velocityMaterial] = this.cachedMaterials.get(c) || []; | ||
_classPrivateFieldLooseBase(this, _lastCameraTransform)[_lastCameraTransform].quaternion.copy(camera.quaternion); | ||
if (originalMaterial !== cachedOriginalMaterial) { | ||
var _c$skeleton; | ||
this.setSize(_options.width, _options.height); | ||
velocityMaterial = new MeshVelocityMaterial(); | ||
velocityMaterial.lastMatrixWorld = new Matrix4(); | ||
if ((_c$skeleton = c.skeleton) != null && _c$skeleton.boneTexture) this.saveBoneTexture(c); | ||
this.cachedMaterials.set(c, [originalMaterial, velocityMaterial]); | ||
} | ||
_classPrivateFieldLooseBase(this, _makeOptionsReactive)[_makeOptionsReactive](_options); | ||
velocityMaterial.uniforms.velocityMatrix.value.multiplyMatrices(this._camera.projectionMatrix, c.modelViewMatrix); | ||
if (c.userData.needsUpdatedReflections || originalMaterial.map instanceof VideoTexture) { | ||
if (!("FULL_MOVEMENT" in velocityMaterial.defines)) velocityMaterial.needsUpdate = true; | ||
velocityMaterial.defines.FULL_MOVEMENT = ""; | ||
} else { | ||
if ("FULL_MOVEMENT" in velocityMaterial.defines) { | ||
delete velocityMaterial.defines.FULL_MOVEMENT; | ||
velocityMaterial.needsUpdate = true; | ||
} | ||
} | ||
const childMovedThisFrame = !c.matrixWorld.equals(velocityMaterial.lastMatrixWorld); | ||
c.visible = this.cameraMovedThisFrame || childMovedThisFrame || c.skeleton || "FULL_MOVEMENT" in velocityMaterial.defines; | ||
c.material = velocityMaterial; | ||
if (!c.visible) continue; | ||
this.renderedMeshesThisFrame++; | ||
for (const prop of updateProperties) velocityMaterial[prop] = originalMaterial[prop]; | ||
if ((_c$skeleton2 = c.skeleton) != null && _c$skeleton2.boneTexture) { | ||
velocityMaterial.defines.USE_SKINNING = ""; | ||
velocityMaterial.defines.BONE_TEXTURE = ""; | ||
velocityMaterial.uniforms.boneTexture.value = c.skeleton.boneTexture; | ||
} | ||
} | ||
} | ||
saveBoneTexture(object) { | ||
let boneTexture = object.material.uniforms.prevBoneTexture.value; | ||
if (boneTexture && boneTexture.image.width === object.skeleton.boneTexture.width) { | ||
boneTexture = object.material.uniforms.prevBoneTexture.value; | ||
boneTexture.image.data.set(object.skeleton.boneTexture.image.data); | ||
} else { | ||
var _boneTexture; | ||
(_boneTexture = boneTexture) == null ? void 0 : _boneTexture.dispose(); | ||
const boneMatrices = object.skeleton.boneTexture.image.data.slice(); | ||
const size = object.skeleton.boneTexture.image.width; | ||
boneTexture = new DataTexture(boneMatrices, size, size, RGBAFormat, FloatType); | ||
object.material.uniforms.prevBoneTexture.value = boneTexture; | ||
boneTexture.needsUpdate = true; | ||
} | ||
} | ||
unsetVelocityMaterialInScene() { | ||
for (const c of this.visibleMeshes) { | ||
if (c.material.isMeshVelocityMaterial) { | ||
var _c$skeleton3; | ||
c.visible = true; | ||
c.material.lastMatrixWorld.copy(c.matrixWorld); | ||
c.material.uniforms.prevVelocityMatrix.value.multiplyMatrices(this._camera.projectionMatrix, c.modelViewMatrix); | ||
if ((_c$skeleton3 = c.skeleton) != null && _c$skeleton3.boneTexture) this.saveBoneTexture(c); | ||
c.material = this.cachedMaterials.get(c)[0]; | ||
} | ||
} | ||
} | ||
setSize(width, height) { | ||
if (width === _classPrivateFieldLooseBase(this, _lastSize)[_lastSize].width && height === _classPrivateFieldLooseBase(this, _lastSize)[_lastSize].height && this.resolutionScale === _classPrivateFieldLooseBase(this, _lastSize)[_lastSize].resolutionScale) return; | ||
this.temporalResolvePass.setSize(width, height); | ||
this.reflectionsPass.setSize(width, height); | ||
_classPrivateFieldLooseBase(this, _lastSize)[_lastSize] = { | ||
width, | ||
height, | ||
resolutionScale: this.resolutionScale | ||
}; | ||
this.renderTarget.setSize(width, height); | ||
} | ||
checkNeedsResample() { | ||
const moveDist = _classPrivateFieldLooseBase(this, _lastCameraTransform)[_lastCameraTransform].position.distanceToSquared(this._camera.position); | ||
renderVelocity(renderer) { | ||
renderer.setRenderTarget(this.renderTarget); | ||
const rotateDist = 8 * (1 - _classPrivateFieldLooseBase(this, _lastCameraTransform)[_lastCameraTransform].quaternion.dot(this._camera.quaternion)); | ||
if (this.renderedMeshesThisFrame > 0) { | ||
const { | ||
background | ||
} = this._scene; | ||
this._scene.background = backgroundColor; | ||
renderer.render(this._scene, this._camera); | ||
this._scene.background = background; | ||
} else { | ||
renderer.clearColor(); | ||
} | ||
} | ||
checkCameraMoved() { | ||
const moveDist = this.lastCameraTransform.position.distanceToSquared(this._camera.position); | ||
const rotateDist = 8 * (1 - this.lastCameraTransform.quaternion.dot(this._camera.quaternion)); | ||
if (moveDist > 0.000001 || rotateDist > 0.000001) { | ||
this.samples = 1; | ||
this.lastCameraTransform.position.copy(this._camera.position); | ||
this.lastCameraTransform.quaternion.copy(this._camera.quaternion); | ||
return true; | ||
} | ||
_classPrivateFieldLooseBase(this, _lastCameraTransform)[_lastCameraTransform].position.copy(this._camera.position); | ||
return false; | ||
} | ||
_classPrivateFieldLooseBase(this, _lastCameraTransform)[_lastCameraTransform].quaternion.copy(this._camera.quaternion); | ||
render(renderer) { | ||
this.cameraMovedThisFrame = this.checkCameraMoved(); | ||
this.setVelocityMaterialInScene(); | ||
if (this.renderedMeshesThisFrame > 0 || this.renderedMeshesLastFrame > 0) this.renderVelocity(renderer); | ||
this.unsetVelocityMaterialInScene(); | ||
this.renderedMeshesLastFrame = this.renderedMeshesThisFrame; | ||
} | ||
} | ||
const zeroVec2 = new Vector2(); | ||
class TemporalResolvePass extends Pass { | ||
constructor(scene, camera, customComposeShader, options = {}) { | ||
super("TemporalResolvePass"); | ||
this.velocityPass = null; | ||
this.velocityResolutionScale = 1; | ||
this._scene = scene; | ||
const width = options.width || typeof window !== "undefined" ? window.innerWidth : 2000; | ||
const height = options.height || typeof window !== "undefined" ? window.innerHeight : 1000; | ||
this.renderTarget = new WebGLRenderTarget(width, height, { | ||
minFilter: LinearFilter, | ||
magFilter: LinearFilter, | ||
type: HalfFloatType, | ||
depthBuffer: false | ||
}); | ||
this.velocityPass = new VelocityPass(scene, camera); | ||
const fragmentShader = temporalResolve.replace("#include <custom_compose_shader>", customComposeShader); | ||
this.fullscreenMaterial = new ShaderMaterial({ | ||
type: "TemporalResolveMaterial", | ||
uniforms: { | ||
inputTexture: new Uniform(null), | ||
accumulatedTexture: new Uniform(null), | ||
velocityTexture: new Uniform(this.velocityPass.renderTarget.texture), | ||
lastVelocityTexture: new Uniform(null), | ||
depthTexture: new Uniform(null), | ||
temporalResolveMix: new Uniform(0), | ||
temporalResolveCorrection: new Uniform(0), | ||
colorExponent: new Uniform(1), | ||
invTexSize: new Uniform(new Vector2()) | ||
}, | ||
defines: { | ||
CLAMP_RADIUS: 1 | ||
}, | ||
vertexShader, | ||
fragmentShader | ||
}); | ||
this.fullscreenMaterial.defines.DILATION = ""; | ||
if (!scene.userData.velocityTexture) { | ||
scene.userData.velocityTexture = this.velocityPass.renderTarget.texture; | ||
} | ||
this.setupAccumulatedTexture(width, height); | ||
} | ||
dispose() { | ||
super.dispose(); | ||
this.reflectionsPass.dispose(); | ||
this.temporalResolvePass.dispose(); | ||
if (this._scene.userData.velocityTexture === this.velocityPass.renderTarget.texture) { | ||
delete this._scene.userData.velocityTexture; | ||
} | ||
this.renderTarget.dispose(); | ||
this.accumulatedTexture.dispose(); | ||
this.fullscreenMaterial.dispose(); | ||
this.velocityPass.dispose(); | ||
} | ||
update(renderer, inputBuffer) { | ||
this.samples++; | ||
this.checkNeedsResample(); // update uniforms | ||
setSize(width, height) { | ||
this.renderTarget.setSize(width, height); | ||
this.velocityPass.setSize(width * this.velocityResolutionScale, height * this.velocityResolutionScale); | ||
this.fullscreenMaterial.uniforms.invTexSize.value.set(1 / width, 1 / height); | ||
this.setupAccumulatedTexture(width, height); | ||
} | ||
this.uniforms.get("samples").value = this.samples; // render reflections of current frame | ||
setupAccumulatedTexture(width, height) { | ||
if (this.accumulatedTexture) this.accumulatedTexture.dispose(); | ||
if (this.lastVelocityTexture) this.lastVelocityTexture.dispose(); | ||
this.accumulatedTexture = new FramebufferTexture(width, height, RGBAFormat); | ||
this.accumulatedTexture.minFilter = LinearFilter; | ||
this.accumulatedTexture.magFilter = LinearFilter; | ||
this.accumulatedTexture.type = HalfFloatType; | ||
this.lastVelocityTexture = new FramebufferTexture(width * this.velocityResolutionScale, height * this.velocityResolutionScale, RGBAFormat); | ||
this.lastVelocityTexture.minFilter = LinearFilter; | ||
this.lastVelocityTexture.magFilter = LinearFilter; | ||
this.lastVelocityTexture.type = HalfFloatType; | ||
this.fullscreenMaterial.uniforms.accumulatedTexture.value = this.accumulatedTexture; | ||
this.fullscreenMaterial.uniforms.lastVelocityTexture.value = this.lastVelocityTexture; | ||
this.fullscreenMaterial.needsUpdate = true; | ||
} | ||
this.reflectionsPass.render(renderer, inputBuffer); // compose reflection of last and current frame into one reflection | ||
render(renderer) { | ||
this.velocityPass.render(renderer); | ||
renderer.setRenderTarget(this.renderTarget); | ||
renderer.render(this.scene, this.camera); // save the render target's texture for use in next frame | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.samples.value = this.samples; | ||
this.temporalResolvePass.render(renderer); | ||
renderer.copyFramebufferToTexture(zeroVec2, this.accumulatedTexture); | ||
renderer.setRenderTarget(this.velocityPass.renderTarget); | ||
renderer.copyFramebufferToTexture(zeroVec2, this.lastVelocityTexture); | ||
} | ||
@@ -888,140 +846,222 @@ | ||
function _makeOptionsReactive2(options) { | ||
// this can't be toggled during run-time | ||
if (options.ENABLE_BLUR) { | ||
this.uniforms.get("depthTexture").value = this.reflectionsPass.depthTexture; | ||
this.defines.set("ENABLE_BLUR", ""); | ||
this.reflectionsPass.fullscreenMaterial.defines.ENABLE_BLUR = ""; | ||
const finalFragmentShader = finalSSRShader.replace("#include <helperFunctions>", helperFunctions).replace("#include <boxBlur>", boxBlur); // all the properties for which we don't have to resample | ||
const noResetSamplesProperties = ["blurMix", "blurSharpness", "blurKernelSize"]; | ||
class SSREffect extends Effect { | ||
/** | ||
* @param {THREE.Scene} scene The scene of the SSR effect | ||
* @param {THREE.Camera} camera The camera with which SSR is being rendered | ||
* @param {SSROptions} [options] The optional options for the SSR effect | ||
*/ | ||
constructor(scene, camera, options = defaultSSROptions) { | ||
super("SSREffect", finalFragmentShader, { | ||
type: "FinalSSRMaterial", | ||
uniforms: new Map([["inputTexture", new Uniform(null)], ["reflectionsTexture", new Uniform(null)], ["samples", new Uniform(0)], ["blurMix", new Uniform(0)], ["blurSharpness", new Uniform(0)], ["blurKernelSize", new Uniform(0)]]), | ||
defines: new Map([["RENDER_MODE", "0"]]) | ||
}); | ||
this.samples = 0; | ||
this.selection = new Selection(); | ||
this.lastSize = void 0; | ||
this.lastCameraTransform = { | ||
position: new Vector3(), | ||
quaternion: new Quaternion() | ||
}; | ||
this._scene = scene; | ||
this._camera = camera; | ||
options = _extends({}, defaultSSROptions, options); // set up passes | ||
// temporal resolve pass | ||
this.temporalResolvePass = new TemporalResolvePass(scene, camera, "", options); | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.samples = new Uniform(0); | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.colorExponent = new Uniform(1); | ||
this.temporalResolvePass.fullscreenMaterial.defines.EULER = 2.718281828459045; | ||
this.temporalResolvePass.fullscreenMaterial.defines.FLOAT_EPSILON = 0.00001; | ||
this.uniforms.get("reflectionsTexture").value = this.temporalResolvePass.renderTarget.texture; // reflections pass | ||
this.reflectionsPass = new ReflectionsPass(this, options); | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.inputTexture.value = this.reflectionsPass.renderTarget.texture; | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.depthTexture.value = this.reflectionsPass.depthTexture; | ||
this.lastSize = { | ||
width: options.width, | ||
height: options.height, | ||
resolutionScale: options.resolutionScale, | ||
velocityResolutionScale: options.velocityResolutionScale | ||
}; | ||
this.lastCameraTransform.position.copy(camera.position); | ||
this.lastCameraTransform.quaternion.copy(camera.quaternion); | ||
this.setSize(options.width, options.height); | ||
this.makeOptionsReactive(options); | ||
} | ||
const dpr = window.devicePixelRatio; | ||
let needsUpdate = false; | ||
const reflectionPassFullscreenMaterialUniforms = this.reflectionsPass.fullscreenMaterial.uniforms; | ||
const reflectionPassFullscreenMaterialUniformsKeys = Object.keys(reflectionPassFullscreenMaterialUniforms); | ||
makeOptionsReactive(options) { | ||
const dpr = window.devicePixelRatio; | ||
let needsUpdate = false; | ||
const reflectionPassFullscreenMaterialUniforms = this.reflectionsPass.fullscreenMaterial.uniforms; | ||
const reflectionPassFullscreenMaterialUniformsKeys = Object.keys(reflectionPassFullscreenMaterialUniforms); | ||
for (const key of Object.keys(options)) { | ||
Object.defineProperty(this, key, { | ||
get() { | ||
return options[key]; | ||
}, | ||
for (const key of Object.keys(options)) { | ||
Object.defineProperty(this, key, { | ||
get() { | ||
return options[key]; | ||
}, | ||
set(value) { | ||
if (options[key] === value && needsUpdate) return; | ||
options[key] = value; | ||
if (!noResetSamplesProperties.includes(key)) this.samples = 1; | ||
set(value) { | ||
if (options[key] === value && needsUpdate) return; | ||
options[key] = value; | ||
switch (key) { | ||
case "resolutionScale": | ||
this.setSize(options.width, options.height); | ||
break; | ||
if (!noResetSamplesProperties.includes(key)) { | ||
this.samples = 0; | ||
this.setSize(options.width, options.height, true); | ||
} | ||
case "width": | ||
if (value === undefined) return; | ||
this.setSize(value * dpr, options.height); | ||
this.uniforms.get("g_InvResolutionDirection").value.set(1 / (value * dpr), 1 / options.height); | ||
break; | ||
switch (key) { | ||
case "resolutionScale": | ||
this.setSize(options.width, options.height); | ||
break; | ||
case "height": | ||
if (value === undefined) return; | ||
this.setSize(options.width, value * dpr); | ||
this.uniforms.get("g_InvResolutionDirection").value.set(1 / options.width, 1 / (value * dpr)); | ||
break; | ||
case "velocityResolutionScale": | ||
this.temporalResolvePass.velocityResolutionScale = value; | ||
this.setSize(options.width, options.height, true); | ||
break; | ||
case "maxSamples": | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.maxSamples.value = this.maxSamples; | ||
break; | ||
case "width": | ||
if (value === undefined) return; | ||
this.setSize(value * dpr, options.height); | ||
break; | ||
case "blurMix": | ||
this.uniforms.get("blurMix").value = value; | ||
break; | ||
case "height": | ||
if (value === undefined) return; | ||
this.setSize(options.width, value * dpr); | ||
break; | ||
case "blurSharpness": | ||
this.uniforms.get("g_Sharpness").value = value; | ||
break; | ||
case "blurMix": | ||
this.uniforms.get("blurMix").value = value; | ||
break; | ||
case "blurKernelSize": | ||
this.uniforms.get("kernelRadius").value = value; | ||
break; | ||
// defines | ||
case "blurSharpness": | ||
this.uniforms.get("blurSharpness").value = value; | ||
break; | ||
case "MAX_STEPS": | ||
this.reflectionsPass.fullscreenMaterial.defines.MAX_STEPS = parseInt(value); | ||
this.reflectionsPass.fullscreenMaterial.needsUpdate = needsUpdate; | ||
break; | ||
case "blurKernelSize": | ||
this.uniforms.get("blurKernelSize").value = value; | ||
break; | ||
// defines | ||
case "NUM_BINARY_SEARCH_STEPS": | ||
this.reflectionsPass.fullscreenMaterial.defines.NUM_BINARY_SEARCH_STEPS = parseInt(value); | ||
this.reflectionsPass.fullscreenMaterial.needsUpdate = needsUpdate; | ||
break; | ||
case "MAX_STEPS": | ||
this.reflectionsPass.fullscreenMaterial.defines.MAX_STEPS = parseInt(value); | ||
this.reflectionsPass.fullscreenMaterial.needsUpdate = needsUpdate; | ||
break; | ||
case "ENABLE_JITTERING": | ||
if (value) { | ||
this.reflectionsPass.fullscreenMaterial.defines.ENABLE_JITTERING = ""; | ||
} else { | ||
delete this.reflectionsPass.fullscreenMaterial.defines.ENABLE_JITTERING; | ||
} | ||
case "NUM_BINARY_SEARCH_STEPS": | ||
this.reflectionsPass.fullscreenMaterial.defines.NUM_BINARY_SEARCH_STEPS = parseInt(value); | ||
this.reflectionsPass.fullscreenMaterial.needsUpdate = needsUpdate; | ||
break; | ||
this.reflectionsPass.fullscreenMaterial.needsUpdate = needsUpdate; | ||
break; | ||
case "ALLOW_MISSED_RAYS": | ||
if (value) { | ||
this.reflectionsPass.fullscreenMaterial.defines.ALLOW_MISSED_RAYS = ""; | ||
} else { | ||
delete this.reflectionsPass.fullscreenMaterial.defines.ALLOW_MISSED_RAYS; | ||
} | ||
case "STRETCH_MISSED_RAYS": | ||
if (value) { | ||
this.reflectionsPass.fullscreenMaterial.defines.STRETCH_MISSED_RAYS = ""; | ||
} else { | ||
delete this.reflectionsPass.fullscreenMaterial.defines.STRETCH_MISSED_RAYS; | ||
} | ||
this.reflectionsPass.fullscreenMaterial.needsUpdate = needsUpdate; | ||
break; | ||
this.reflectionsPass.fullscreenMaterial.needsUpdate = needsUpdate; | ||
break; | ||
case "CLAMP_RADIUS": | ||
this.temporalResolvePass.fullscreenMaterial.defines.CLAMP_RADIUS = Math.round(value); | ||
this.temporalResolvePass.fullscreenMaterial.needsUpdate = needsUpdate; | ||
break; | ||
case "USE_NORMALMAP": | ||
case "USE_ROUGHNESSMAP": | ||
break; | ||
case "temporalResolve": | ||
const composeShader = value ? trCompose : accumulatedCompose; | ||
let fragmentShader = temporalResolve; // if we are not using temporal reprojection, then cut out the part that's doing the reprojection | ||
case "temporalResolve": | ||
const composeShader = value ? customTRComposeShader : customBasicComposeShader; | ||
let fragmentShader = temporalResolve; // if we are not using temporal reprojection, then cut out the part that's doing the reprojection | ||
if (!value) { | ||
const removePart = fragmentShader.slice(fragmentShader.indexOf("// REPROJECT_START"), fragmentShader.indexOf("// REPROJECT_END") + "// REPROJECT_END".length); | ||
fragmentShader = temporalResolve.replace(removePart, ""); | ||
} | ||
if (!value) { | ||
const removePart = fragmentShader.slice(fragmentShader.indexOf("// REPROJECT_START"), fragmentShader.indexOf("// REPROJECT_END") + "// REPROJECT_END".length); | ||
fragmentShader = temporalResolve.replace(removePart, ""); | ||
} | ||
fragmentShader = fragmentShader.replace("#include <custom_compose_shader>", composeShader); | ||
fragmentShader = | ||
/* glsl */ | ||
` | ||
fragmentShader = fragmentShader.replace("#include <custom_compose_shader>", composeShader); | ||
fragmentShader = | ||
/* glsl */ | ||
` | ||
uniform float samples; | ||
uniform float maxSamples; | ||
uniform float temporalResolveMix; | ||
` + fragmentShader; | ||
this.temporalResolvePass.fullscreenMaterial.fragmentShader = fragmentShader; | ||
this.temporalResolvePass.fullscreenMaterial.needsUpdate = true; | ||
break; | ||
this.temporalResolvePass.fullscreenMaterial.fragmentShader = fragmentShader; | ||
this.temporalResolvePass.fullscreenMaterial.needsUpdate = true; | ||
break; | ||
case "temporalResolveMix": | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.temporalResolveMix.value = value; | ||
break; | ||
case "temporalResolveMix": | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.temporalResolveMix.value = value; | ||
break; | ||
case "temporalResolveCorrectionMix": | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.temporalResolveCorrectionMix.value = value; | ||
break; | ||
// must be a uniform | ||
case "temporalResolveCorrection": | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.temporalResolveCorrection.value = value; | ||
break; | ||
default: | ||
if (reflectionPassFullscreenMaterialUniformsKeys.includes(key)) { | ||
reflectionPassFullscreenMaterialUniforms[key].value = value; | ||
} | ||
case "colorExponent": | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.colorExponent.value = value; | ||
break; | ||
// must be a uniform | ||
default: | ||
if (reflectionPassFullscreenMaterialUniformsKeys.includes(key)) { | ||
reflectionPassFullscreenMaterialUniforms[key].value = value; | ||
} | ||
} | ||
} | ||
} | ||
}); // apply all uniforms and defines | ||
}); // apply all uniforms and defines | ||
this[key] = options[key]; | ||
this[key] = options[key]; | ||
} | ||
needsUpdate = true; | ||
} | ||
needsUpdate = true; | ||
setSize(width, height, force = false) { | ||
if (!force && width === this.lastSize.width && height === this.lastSize.height && this.resolutionScale === this.lastSize.resolutionScale && this.velocityResolutionScale === this.lastSize.velocityResolutionScale) return; | ||
this.temporalResolvePass.setSize(width, height); | ||
this.reflectionsPass.setSize(width, height); | ||
this.lastSize = { | ||
width, | ||
height, | ||
resolutionScale: this.resolutionScale, | ||
velocityResolutionScale: this.velocityResolutionScale | ||
}; | ||
} | ||
checkNeedsResample() { | ||
const moveDist = this.lastCameraTransform.position.distanceToSquared(this._camera.position); | ||
const rotateDist = 8 * (1 - this.lastCameraTransform.quaternion.dot(this._camera.quaternion)); | ||
if (moveDist > 0.000001 || rotateDist > 0.000001) { | ||
this.samples = 1; | ||
this.lastCameraTransform.position.copy(this._camera.position); | ||
this.lastCameraTransform.quaternion.copy(this._camera.quaternion); | ||
} | ||
} | ||
dispose() { | ||
super.dispose(); | ||
this.reflectionsPass.dispose(); | ||
this.temporalResolvePass.dispose(); | ||
} | ||
update(renderer, inputBuffer) { | ||
this.samples++; | ||
this.checkNeedsResample(); // update uniforms | ||
this.uniforms.get("samples").value = this.samples; // render reflections of current frame | ||
this.reflectionsPass.render(renderer, inputBuffer); // compose reflection of last and current frame into one reflection | ||
this.temporalResolvePass.fullscreenMaterial.uniforms.samples.value = this.samples; | ||
this.temporalResolvePass.render(renderer); | ||
} | ||
} | ||
export { SSREffect, defaultSSROptions }; |
@@ -10,3 +10,4 @@ { | ||
"dev": "echo no tests yet", | ||
"prepare": "husky install" | ||
"prepare": "husky install", | ||
"markdown": "npx jsdoc2md src/SSROptions.js" | ||
}, | ||
@@ -30,2 +31,3 @@ "keywords": [ | ||
"@babel/plugin-proposal-class-properties": "^7.18.6", | ||
"@babel/plugin-proposal-optional-chaining": "^7.18.9", | ||
"@babel/plugin-transform-runtime": "^7.18.6", | ||
@@ -43,2 +45,3 @@ "@babel/preset-env": "^7.18.6", | ||
"husky": ">=6", | ||
"jsdoc-to-markdown": "^7.1.1", | ||
"lint-staged": ">=10", | ||
@@ -71,3 +74,3 @@ "prettier": "^2.7.1", | ||
}, | ||
"version": "2.1.1", | ||
"version": "2.1.2", | ||
"lint-staged": { | ||
@@ -74,0 +77,0 @@ "*.{js,css,md}": "prettier --write" |
124
readme.md
@@ -58,17 +58,16 @@ # three.js Screen Space Reflections | ||
temporalResolveMix: 0.9, | ||
temporalResolveCorrectionMix: 1, | ||
maxSamples: 256, | ||
temporalResolveCorrection: 1, | ||
resolutionScale: 1, | ||
width: typeof window !== "undefined" ? window.innerWidth : 2000, | ||
velocityResolutionScale: 1, | ||
width: typeof window !== "undefined" ? window.innerWidth : 1000, | ||
height: typeof window !== "undefined" ? window.innerHeight : 1000, | ||
ENABLE_BLUR: false, | ||
blurMix: 0.5, | ||
blurKernelSize: 8, | ||
blurSharpness: 0.5, | ||
rayStep: 0.1, | ||
blurSharpness: 10, | ||
blurKernelSize: 1, | ||
rayDistance: 10, | ||
intensity: 1, | ||
maxRoughness: 0.1, | ||
ENABLE_JITTERING: false, | ||
jitter: 0.1, | ||
jitterSpread: 0.1, | ||
colorExponent: 1, | ||
maxRoughness: 1, | ||
jitter: 0, | ||
jitterSpread: 0, | ||
jitterRough: 0, | ||
@@ -79,7 +78,7 @@ roughnessFadeOut: 1, | ||
NUM_BINARY_SEARCH_STEPS: 5, | ||
maxDepthDifference: 3, | ||
maxDepth: 1, | ||
maxDepthDifference: 10, | ||
thickness: 10, | ||
ior: 1.45, | ||
STRETCH_MISSED_RAYS: true, | ||
CLAMP_RADIUS: 1, | ||
ALLOW_MISSED_RAYS: true, | ||
USE_MRT: true, | ||
@@ -95,57 +94,33 @@ USE_NORMALMAP: true, | ||
<summary>Description of the properties</summary> | ||
| Name | Type | | Description | | ||
| --- | --- | --- | --- | | ||
| temporalResolve | <code>boolean</code> | | whether you want to use Temporal Resolving to re-use reflections from the last frames; this will reduce noise tremendously but may result in "smearing" || temporalResolveMix | <code>Number</code> | | a value between 0 and 1 to set how much the last frame's reflections should be blended in; higher values will result in less noisy reflections when moving the camera but a more smeary look | | ||
| resolutionScale | <code>Number</code> | | resolution of the SSR effect, a resolution of 0.5 means the effect will be rendered at half resolution | | ||
| velocityResolutionScale | <code>Number</code> | | resolution of the velocity buffer, a resolution of 0.5 means velocity will be rendered at half resolution | | ||
| width | <code>Number</code> | | width of the SSREffect | | ||
| height | <code>Number</code> | | height of the SSREffect | | ||
| blurMix | <code>Number</code> | | how much the blurred reflections should be mixed with the raw reflections | | ||
| blurSharpness | <code>Number</code> | | exponent of the Box Blur filter; higher values will result in more sharpness | | ||
| blurKernelSize | <code>Number</code> | | kernel size of the Box Blur Filter; higher kernel sizes will result in blurrier reflections with more artifacts | | ||
| rayDistance | <code>Number</code> | | maximum distance a reflection ray can travel to find what it reflects | | ||
| intensity | <code>Number</code> | | intensity of the reflections | | ||
| colorExponent | <code>Number</code> | | exponent by which reflections will be potentiated when composing the current frame's reflections and the accumulated reflections into a final reflection; higher values will make reflections clearer by highlighting darker spots less | | ||
| maxRoughness | <code>Number</code> | | maximum roughness a texel can have to have reflections calculated for it | | ||
| jitter | <code>Number</code> | | how intense jittering should be | | ||
| jitterSpread | <code>Number</code> | | how much the jittered rays should be spread; higher values will give a rougher look regarding the reflections but are more expensive to compute with | | ||
| jitterRough | <code>Number</code> | | how intense jittering should be in relation to a material's roughness | | ||
| roughnessFadeOut | <code>Number</code> | | how intense reflections should be on rough spots; a higher value will make reflections fade out quicker on rough spots | | ||
| rayFadeOut | <code>Number</code> | | how much reflections will fade out by distance | | ||
| MAX_STEPS | <code>Number</code> | | number of steps a reflection ray can maximally do to find an object it intersected (and thus reflects) | | ||
| NUM_BINARY_SEARCH_STEPS | <code>Number</code> | | once we had our ray intersect something, we need to find the exact point in space it intersected and thus it reflects; this can be done through binary search with the given number of maximum steps | | ||
| maxDepthDifference | <code>Number</code> | | maximum depth difference between a ray and the particular depth at its screen position after refining with binary search; higher values will result in better performance | | ||
| thickness | <code>Number</code> | | maximum depth difference between a ray and the particular depth at its screen position before refining with binary search; higher values will result in better performance | | ||
| ior | <code>Number</code> | | Index of Refraction, used for calculating fresnel; reflections tend to be more intense the steeper the angle between them and the viewer is, the ior parameter sets how much the intensity varies | | ||
| CLAMP_RADIUS | <code>boolean</code> | | how many surrounding pixels will be used for neighborhood clamping; a higher value can reduce noise when moving the camera but will result in less performance | | ||
| ALLOW_MISSED_RAYS | <code>boolean</code> | | if there should still be reflections for rays for which a reflecting point couldn't be found; enabling this will result in stretched looking reflections which can look good or bad depending on the angle | | ||
| USE_MRT | <code>boolean</code> | | WebGL2 only - whether to use multiple render targets when rendering the G-buffers (normals, depth and roughness); using them can improve performance as they will render all information to multiple buffers for each fragment in one run; this setting can't be changed during run-time | | ||
| USE_NORMALMAP | <code>boolean</code> | | if roughness maps should be taken account of when calculating reflections | | ||
| USE_ROUGHNESSMAP | <code>boolean</code> | | if normal maps should be taken account of when calculating reflections | | ||
- `width`: width of the SSREffect | ||
- `height`: height of the SSREffect | ||
- `temporalResolve`: whether you want to use Temporal Resolving to re-use reflections from the last frames; this will reduce noise tremendously but may result in "smearing" | ||
- `temporalResolveMix`: a value between 0 and 1 to set how much the last frame's reflections should be blended in; higher values will result in less noisy reflections when moving the camera but a more smeary look | ||
- `temporalResolveCorrectionMix`: a value between 0 and 1 to set how much the reprojected reflection should be corrected; higher values will reduce smearing but will result in less flickering at reflection edges | ||
- `maxSamples`: the maximum number of samples for reflections; settings it to 0 means unlimited samples; setting it to a value like 6 can help make camera movements less disruptive when calculating reflections | ||
- `ENABLE_BLUR`: whether to blur the reflections and blend these blurred reflections with the raw ones depending on the blurMix value | ||
- `blurMix`: how much the blurred reflections should be mixed with the raw reflections | ||
- `blurSharpness`: the sharpness of the Bilateral Filter used to blur reflections | ||
- `blurKernelSize`: the kernel size of the Bilateral Blur Filter; higher kernel sizes will result in blurrier reflections with more artifacts | ||
- `rayStep`: how much the reflection ray should travel in each of its iteration; higher values will give deeper reflections but with more artifacts | ||
- `intensity`: the intensity of the reflections | ||
- `maxRoughness`: the maximum roughness a texel can have to have reflections calculated for it | ||
- `ENABLE_JITTERING`: whether jittering is enabled; jittering will randomly jitter the reflections resulting in a more noisy but overall more realistic look, enabling jittering can be expensive depending on the view angle | ||
- `jitter`: how intense jittering should be | ||
- `jitterSpread`: how much the jittered rays should be spread; higher values will give a rougher look regarding the reflections but are more expensive to compute with | ||
- `jitterRough`: how intense jittering should be in relation to a material's roughness | ||
- `MAX_STEPS`: the number of steps a reflection ray can maximally do to find an object it intersected (and thus reflects) | ||
- `NUM_BINARY_SEARCH_STEPS`: once we had our ray intersect something, we need to find the exact point in space it intersected and thus it reflects; this can be done through binary search with the given number of maximum steps | ||
- `maxDepthDifference`: the maximum depth difference between a ray and the particular depth at its screen position after refining with binary search; lower values will result in better performance | ||
- `maxDepth`: the maximum depth for which reflections will be calculated | ||
- `thickness`: the maximum depth difference between a ray and the particular depth at its screen position before refining with binary search; lower values will result in better performance | ||
- `ior`: Index of Refraction, used for calculating fresnel; reflections tend to be more intense the steeper the angle between them and the viewer is, the ior parameter set how much the intensity varies | ||
- `STRETCH_MISSED_RAYS`: if there should still be reflections for rays for which a reflecting point couldn't be found; enabling this will result in stretched looking reflections which can look good or bad depending on the angle | ||
- `USE_MRT`: WebGL2 only - whether to use multiple render targets when rendering the G-buffers (normals, depth and roughness); using them can improve performance as they will render all information to multiple buffers for each fragment in one run; this setting can't be changed during run-time | ||
- `USE_ROUGHNESSMAP`: if roughness maps should be taken account of when calculating reflections | ||
- `USE_NORMALMAP`: if normal maps should be taken account of when calculating reflections | ||
</details> | ||
@@ -155,3 +130,3 @@ | ||
Since the right options for an SSR effect depend a lot on the scene, it can happen that you don't seem to have an effect at all in your scene when you use the SSR effect for the first time in it without any configuration. This can have multiple causes such as `rayStep` being way too low for your scene for example. So to find out which SSR options are right for your scene, you should use a GUI to find the right values easily. | ||
Since the right options for an SSR effect depend a lot on the scene, it can happen that you don't seem to have an effect at all in your scene when you use the SSR effect for the first time in it without any configuration. This can have multiple causes such as `rayDistance` being way too low for your scene for example. So to find out which SSR options are right for your scene, you should use a GUI to find the right values easily. | ||
The [example](https://github.com/0beqz/screen-space-reflections/tree/main/example) already comes with a simple one-file GUI [`SSRDebugGUI.js`](https://github.com/0beqz/screen-space-reflections/blob/main/example/SSRDebugGUI.js) that you can use in your project like so: | ||
@@ -194,3 +169,3 @@ | ||
- Early out cases to compute only possible reflections and boost performance | ||
- Using an edge-preserving bilateral blur filter to keep details while blurring noise | ||
- Box Blur to reduce noise | ||
@@ -201,5 +176,4 @@ ## What's new in v2 | ||
- Implemented accumulative sampling by saving and re-using the last frame's reflections to accumulate especially jittered reflections over frames | ||
- Made all SSR-related options (e.g. `thickness`, `ior`, `rayStep`,...) reactive so that you now just need to set `ssrEffect.rayStep = value` for example to update values | ||
- Made all SSR-related options (e.g. `thickness`, `ior`, `rayDistance`,...) reactive so that you now just need to set `ssrEffect.rayDistance = value` for example to update values | ||
- Fixed jittering so that it's actually correct from all angles (it used to be less intense the higher you were looking down at a reflection) | ||
- Removed Kawase Blur in favor of Bilateral Blur to preserve edges and keep details as the blur method of the SSR effect | ||
- Changed the SSR implementation from a pass to an effect to improve performance | ||
@@ -224,4 +198,3 @@ - Optimizations regarding computation of required buffers and reflections | ||
- increase `maxDepthDifference` | ||
- increase `maxDepth` or set it directly to 1 | ||
- decrease `rayStep` and increase `MAX_STEPS` if reflections are cutting off now | ||
- decrease `rayDistance` and increase `MAX_STEPS` if reflections are cutting off now | ||
- increase `NUM_BINARY_SEARCH_STEPS` | ||
@@ -267,3 +240,2 @@ | ||
- [ ] Reprojection: support skinned meshes | ||
- [ ] Proper upsampling to still get quality reflections when using half-res buffers | ||
@@ -279,3 +251,3 @@ | ||
- Bilateral Blur Filter: [gl_ssao](https://github.com/nvpro-samples/gl_ssao/blob/master/bilateralblur.frag.glsl) | ||
- Box Blur filter: [glfx.js](https://github.com/evanw/glfx.js) | ||
@@ -282,0 +254,0 @@ - Video texture: [Uzunov Rostislav](https://www.pexels.com/@rostislav/) |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
128319
1918
9
20
284