three#UniformsLib JavaScript Examples
The following examples show how to use
three#UniformsLib.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LineMaterial.js From BlueMapWeb with MIT License | 6 votes |
/**
* parameters = {
* color: <hex>,
* linewidth: <float>,
* dashed: <boolean>,
* dashScale: <float>,
* dashSize: <float>,
* gapSize: <float>,
* resolution: <Vector2>, // to be set by renderer
* }
*/
UniformsLib.line = {
linewidth: { value: 1 },
resolution: { value: new Vector2( 1, 1 ) },
dashScale: { value: 1 },
dashSize: { value: 1 },
gapSize: { value: 1 },
opacity: { value: 1 }
};
Example #2
Source File: QuadTextureMaterial.js From map33.js with MIT License | 6 votes |
QuadTextureMaterial = (urls) => {
return Promise.all(urls.map(url => loader.loadAsync(url))).then(maps => {
return new ShaderMaterial({
uniforms: {
mapNW: {value: maps[0]},
mapSW: {value: maps[1]},
mapNE: {value: maps[2]},
mapSE: {value: maps[3]},
...UniformsLib.common,
...UniformsLib.lights,
...UniformsLib.fog,
},
vertexShader,
fragmentShader,
defines: {
USE_MAP: true,
USE_UV: true,
},
lights: true,
fog: true,
})
})
}
Example #3
Source File: LineMaterial.js From BlueMapWeb with MIT License | 4 votes |
ShaderLib[ 'line' ] = {
uniforms: UniformsUtils.merge( [
UniformsLib.common,
UniformsLib.fog,
UniformsLib.line
] ),
vertexShader:
`
#include <common>
#include <color_pars_vertex>
#include <fog_pars_vertex>
#include <logdepthbuf_pars_vertex>
#include <clipping_planes_pars_vertex>
uniform float linewidth;
uniform vec2 resolution;
attribute vec3 instanceStart;
attribute vec3 instanceEnd;
attribute vec3 instanceColorStart;
attribute vec3 instanceColorEnd;
varying vec2 vUv;
varying float vDistance;
#ifdef USE_DASH
uniform float dashScale;
attribute float instanceDistanceStart;
attribute float instanceDistanceEnd;
varying float vLineDistance;
#endif
void trimSegment( const in vec4 start, inout vec4 end ) {
// trim end segment so it terminates between the camera plane and the near plane
// conservative estimate of the near plane
float a = projectionMatrix[ 2 ][ 2 ]; // 3nd entry in 3th column
float b = projectionMatrix[ 3 ][ 2 ]; // 3nd entry in 4th column
float nearEstimate = - 0.5 * b / a;
float alpha = ( nearEstimate - start.z ) / ( end.z - start.z );
end.xyz = mix( start.xyz, end.xyz, alpha );
}
void main() {
#ifdef USE_COLOR
vColor.xyz = ( position.y < 0.5 ) ? instanceColorStart : instanceColorEnd;
#endif
#ifdef USE_DASH
vLineDistance = ( position.y < 0.5 ) ? dashScale * instanceDistanceStart : dashScale * instanceDistanceEnd;
#endif
float aspect = resolution.x / resolution.y;
vUv = uv;
// camera space
vec4 start = modelViewMatrix * vec4( instanceStart, 1.0 );
vec4 end = modelViewMatrix * vec4( instanceEnd, 1.0 );
// special case for perspective projection, and segments that terminate either in, or behind, the camera plane
// clearly the gpu firmware has a way of addressing this issue when projecting into ndc space
// but we need to perform ndc-space calculations in the shader, so we must address this issue directly
// perhaps there is a more elegant solution -- WestLangley
bool perspective = ( projectionMatrix[ 2 ][ 3 ] == - 1.0 ); // 4th entry in the 3rd column
if ( perspective ) {
if ( start.z < 0.0 && end.z >= 0.0 ) {
trimSegment( start, end );
} else if ( end.z < 0.0 && start.z >= 0.0 ) {
trimSegment( end, start );
}
}
// clip space
vec4 clipStart = projectionMatrix * start;
vec4 clipEnd = projectionMatrix * end;
// ndc space
vec2 ndcStart = clipStart.xy / clipStart.w;
vec2 ndcEnd = clipEnd.xy / clipEnd.w;
// direction
vec2 dir = ndcEnd - ndcStart;
// account for clip-space aspect ratio
dir.x *= aspect;
dir = normalize( dir );
// perpendicular to dir
vec2 offset = vec2( dir.y, - dir.x );
// undo aspect ratio adjustment
dir.x /= aspect;
offset.x /= aspect;
// sign flip
if ( position.x < 0.0 ) offset *= - 1.0;
// endcaps
if ( position.y < 0.0 ) {
offset += - dir;
} else if ( position.y > 1.0 ) {
offset += dir;
}
// adjust for linewidth
offset *= linewidth;
// adjust for clip-space to screen-space conversion // maybe resolution should be based on viewport ...
offset /= resolution.y;
// select end
vec4 clip = ( position.y < 0.5 ) ? clipStart : clipEnd;
// back to clip space
offset *= clip.w;
clip.xy += offset;
gl_Position = clip;
vec4 mvPosition = ( position.y < 0.5 ) ? start : end; // this is an approximation
vDistance = -mvPosition.z;
#include <logdepthbuf_vertex>
#include <clipping_planes_vertex>
#include <fog_vertex>
}
`,
fragmentShader:
`
#define FLT_MAX 3.402823466e+38
uniform vec3 diffuse;
uniform float opacity;
uniform float fadeDistanceMax;
uniform float fadeDistanceMin;
#ifdef USE_DASH
uniform float dashSize;
uniform float gapSize;
#endif
varying float vLineDistance;
#include <common>
#include <color_pars_fragment>
#include <fog_pars_fragment>
#include <logdepthbuf_pars_fragment>
#include <clipping_planes_pars_fragment>
varying vec2 vUv;
varying float vDistance;
void main() {
#include <clipping_planes_fragment>
#ifdef USE_DASH
if ( vUv.y < - 1.0 || vUv.y > 1.0 ) discard; // discard endcaps
if ( mod( vLineDistance, dashSize + gapSize ) > dashSize ) discard; // todo - FIX
#endif
if ( abs( vUv.y ) > 1.0 ) {
float a = vUv.x;
float b = ( vUv.y > 0.0 ) ? vUv.y - 1.0 : vUv.y + 1.0;
float len2 = a * a + b * b;
if ( len2 > 1.0 ) discard;
}
// distance fading
float fdMax = FLT_MAX;
if ( fadeDistanceMax > 0.0 ) fdMax = fadeDistanceMax;
float minDelta = (vDistance - fadeDistanceMin) / fadeDistanceMin;
float maxDelta = (vDistance - fadeDistanceMax) / (fadeDistanceMax * 0.5);
float distanceOpacity = min(
clamp(minDelta, 0.0, 1.0),
1.0 - clamp(maxDelta + 1.0, 0.0, 1.0)
);
vec4 diffuseColor = vec4( diffuse, opacity * distanceOpacity );
#include <logdepthbuf_fragment>
#include <color_fragment>
gl_FragColor = vec4( diffuseColor.rgb, diffuseColor.a );
#include <tonemapping_fragment>
#include <encodings_fragment>
#include <fog_fragment>
#include <premultiplied_alpha_fragment>
}
`
};
Example #4
Source File: DisplacementSphere.js From personal-website-react with MIT License | 4 votes |
DisplacementSphere = (props) => {
const { theme } = useContext(ThemeContext);
const rgbBackground = theme === "light" ? "250 250 250" : "17 17 17";
const width = useRef(window.innerWidth);
const height = useRef(window.innerHeight);
const start = useRef(Date.now());
const canvasRef = useRef();
const mouse = useRef();
const renderer = useRef();
const camera = useRef();
const scene = useRef();
const lights = useRef();
const uniforms = useRef();
const material = useRef();
const geometry = useRef();
const sphere = useRef();
const tweenRef = useRef();
const sphereSpring = useRef();
const prefersReducedMotion = Boolean(usePrefersReducedMotion() && false); //disabled until switching themes fixed
const isInViewport = useInViewport(canvasRef);
useEffect(() => {
mouse.current = new Vector2(0.8, 0.5);
renderer.current = new WebGLRenderer({
canvas: canvasRef.current,
powerPreference: "high-performance",
});
renderer.current.setSize(width.current, height.current);
renderer.current.setPixelRatio(1);
renderer.current.outputEncoding = sRGBEncoding;
camera.current = new PerspectiveCamera(
55,
width.current / height.current,
0.1,
200
);
camera.current.position.z = 52;
scene.current = new Scene();
material.current = new MeshPhongMaterial();
material.current.onBeforeCompile = (shader) => {
uniforms.current = UniformsUtils.merge([
UniformsLib["ambient"],
UniformsLib["lights"],
shader.uniforms,
{ time: { type: "f", value: 0 } },
]);
shader.uniforms = uniforms.current;
shader.vertexShader = vertShader;
shader.fragmentShader = fragShader;
shader.lights = true;
};
geometry.current = new SphereBufferGeometry(32, 128, 128);
sphere.current = new Mesh(geometry.current, material.current);
sphere.current.position.z = 0;
sphere.current.modifier = Math.random();
scene.current.add(sphere.current);
return () => {
cleanScene(scene.current);
cleanRenderer(renderer.current);
};
}, []);
useEffect(() => {
const dirLight = new DirectionalLight(
rgbToThreeColor("250 250 250"),
0.6
);
const ambientLight = new AmbientLight(
rgbToThreeColor("250 250 250"),
theme === "light" ? 0.8 : 0.1
);
dirLight.position.z = 200;
dirLight.position.x = 100;
dirLight.position.y = 100;
lights.current = [dirLight, ambientLight];
scene.current.background = rgbToThreeColor(rgbBackground);
lights.current.forEach((light) => scene.current.add(light));
return () => {
removeLights(lights.current);
};
}, [rgbBackground, theme]);
useEffect(() => {
const handleResize = () => {
const canvasHeight = innerHeight();
const windowWidth = window.innerWidth;
const fullHeight = canvasHeight + canvasHeight * 0.3;
canvasRef.current.style.height = fullHeight;
renderer.current.setSize(windowWidth, fullHeight);
camera.current.aspect = windowWidth / fullHeight;
camera.current.updateProjectionMatrix();
// Render a single frame on resize when not animating
if (prefersReducedMotion) {
renderer.current.render(scene.current, camera.current);
}
if (windowWidth <= media.mobile) {
sphere.current.position.x = 14;
sphere.current.position.y = 10;
} else if (windowWidth <= media.tablet) {
sphere.current.position.x = 18;
sphere.current.position.y = 14;
} else {
sphere.current.position.x = 22;
sphere.current.position.y = 16;
}
};
window.addEventListener("resize", handleResize);
handleResize();
return () => {
window.removeEventListener("resize", handleResize);
};
}, [prefersReducedMotion]);
useEffect(() => {
const onMouseMove = (event) => {
const { rotation } = sphere.current;
const position = {
x: event.clientX / window.innerWidth,
y: event.clientY / window.innerHeight,
};
if (!sphereSpring.current) {
sphereSpring.current = value(rotation.toArray(), (values) =>
rotation.set(
values[0],
values[1],
sphere.current.rotation.z
)
);
}
tweenRef.current = spring({
from: sphereSpring.current.get(),
to: [position.y / 2, position.x / 2],
stiffness: 30,
damping: 20,
velocity: sphereSpring.current.getVelocity(),
mass: 2,
restSpeed: 0.0001,
}).start(sphereSpring.current);
};
if (!prefersReducedMotion && isInViewport) {
window.addEventListener("mousemove", onMouseMove);
}
return () => {
window.removeEventListener("mousemove", onMouseMove);
if (tweenRef.current) {
tweenRef.current.stop();
}
};
}, [isInViewport, prefersReducedMotion]);
useEffect(() => {
let animation;
const animate = () => {
animation = requestAnimationFrame(animate);
if (uniforms.current !== undefined) {
uniforms.current.time.value =
0.00005 * (Date.now() - start.current);
}
sphere.current.rotation.z += 0.001;
renderer.current.render(scene.current, camera.current);
};
if (!prefersReducedMotion && isInViewport) {
animate();
} else {
renderer.current.render(scene.current, camera.current);
}
return () => {
cancelAnimationFrame(animation);
};
}, [isInViewport, prefersReducedMotion]);
return (
<Transition appear in onEnter={reflow} timeout={3000}>
{(status) => (
<canvas
aria-hidden
className={classNames(
"displacement-sphere",
`displacement-sphere--${status}`
)}
ref={canvasRef}
{...props}
/>
)}
</Transition>
);
}
Example #5
Source File: Water.js From canvas with Apache License 2.0 | 4 votes |
Water = function ( geometry, options ) {
Mesh.call( this, geometry );
var scope = this;
options = options || {};
var textureWidth = options.textureWidth !== undefined ? options.textureWidth : 512;
var textureHeight = options.textureHeight !== undefined ? options.textureHeight : 512;
var clipBias = options.clipBias !== undefined ? options.clipBias : 0.0;
var alpha = options.alpha !== undefined ? options.alpha : 1.0;
var time = options.time !== undefined ? options.time : 0.0;
var normalSampler = options.waterNormals !== undefined ? options.waterNormals : null;
var sunDirection = options.sunDirection !== undefined ? options.sunDirection : new Vector3( 0.70707, 0.70707, 0.0 );
var sunColor = new Color( options.sunColor !== undefined ? options.sunColor : 0xffffff );
var waterColor = new Color( options.waterColor !== undefined ? options.waterColor : 0x7F7F7F );
var eye = options.eye !== undefined ? options.eye : new Vector3( 0, 0, 0 );
var distortionScale = options.distortionScale !== undefined ? options.distortionScale : 20.0;
var side = options.side !== undefined ? options.side : FrontSide;
var fog = options.fog !== undefined ? options.fog : false;
//
var mirrorPlane = new Plane();
var normal = new Vector3();
var mirrorWorldPosition = new Vector3();
var cameraWorldPosition = new Vector3();
var rotationMatrix = new Matrix4();
var lookAtPosition = new Vector3( 0, 0, - 1 );
var clipPlane = new Vector4();
var view = new Vector3();
var target = new Vector3();
var q = new Vector4();
var textureMatrix = new Matrix4();
var mirrorCamera = new PerspectiveCamera();
var parameters = {
minFilter: LinearFilter,
magFilter: LinearFilter,
format: RGBFormat,
stencilBuffer: false
};
var renderTarget = new WebGLRenderTarget( textureWidth, textureHeight, parameters );
if ( ! MathUtils.isPowerOfTwo( textureWidth ) || ! MathUtils.isPowerOfTwo( textureHeight ) ) {
renderTarget.texture.generateMipmaps = false;
}
var mirrorShader = {
uniforms: UniformsUtils.merge( [
UniformsLib[ 'fog' ],
UniformsLib[ 'lights' ],
{
"normalSampler": { value: null },
"mirrorSampler": { value: null },
"alpha": { value: 1.0 },
"time": { value: 0.0 },
"size": { value: 1.0 },
"distortionScale": { value: 20.0 },
"textureMatrix": { value: new Matrix4() },
"sunColor": { value: new Color( 0x7F7F7F ) },
"sunDirection": { value: new Vector3( 0.70707, 0.70707, 0 ) },
"eye": { value: new Vector3() },
"waterColor": { value: new Color( 0x555555 ) }
}
] ),
vertexShader: [
'uniform mat4 textureMatrix;',
'uniform float time;',
'varying vec4 mirrorCoord;',
'varying vec4 worldPosition;',
'#include <common>',
'#include <fog_pars_vertex>',
'#include <shadowmap_pars_vertex>',
'#include <logdepthbuf_pars_vertex>',
'void main() {',
' mirrorCoord = modelMatrix * vec4( position, 1.0 );',
' worldPosition = mirrorCoord.xyzw;',
' mirrorCoord = textureMatrix * mirrorCoord;',
' vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );',
' gl_Position = projectionMatrix * mvPosition;',
'#include <logdepthbuf_vertex>',
'#include <fog_vertex>',
'#include <shadowmap_vertex>',
'}'
].join( '\n' ),
fragmentShader: [
'uniform sampler2D mirrorSampler;',
'uniform float alpha;',
'uniform float time;',
'uniform float size;',
'uniform float distortionScale;',
'uniform sampler2D normalSampler;',
'uniform vec3 sunColor;',
'uniform vec3 sunDirection;',
'uniform vec3 eye;',
'uniform vec3 waterColor;',
'varying vec4 mirrorCoord;',
'varying vec4 worldPosition;',
'vec4 getNoise( vec2 uv ) {',
' vec2 uv0 = ( uv / 103.0 ) + vec2(time / 17.0, time / 29.0);',
' vec2 uv1 = uv / 107.0-vec2( time / -19.0, time / 31.0 );',
' vec2 uv2 = uv / vec2( 8907.0, 9803.0 ) + vec2( time / 101.0, time / 97.0 );',
' vec2 uv3 = uv / vec2( 1091.0, 1027.0 ) - vec2( time / 109.0, time / -113.0 );',
' vec4 noise = texture2D( normalSampler, uv0 ) +',
' texture2D( normalSampler, uv1 ) +',
' texture2D( normalSampler, uv2 ) +',
' texture2D( normalSampler, uv3 );',
' return noise * 0.5 - 1.0;',
'}',
'void sunLight( const vec3 surfaceNormal, const vec3 eyeDirection, float shiny, float spec, float diffuse, inout vec3 diffuseColor, inout vec3 specularColor ) {',
' vec3 reflection = normalize( reflect( -sunDirection, surfaceNormal ) );',
' float direction = max( 0.0, dot( eyeDirection, reflection ) );',
' specularColor += pow( direction, shiny ) * sunColor * spec;',
' diffuseColor += max( dot( sunDirection, surfaceNormal ), 0.0 ) * sunColor * diffuse;',
'}',
'#include <common>',
'#include <packing>',
'#include <bsdfs>',
'#include <fog_pars_fragment>',
'#include <logdepthbuf_pars_fragment>',
'#include <lights_pars_begin>',
'#include <shadowmap_pars_fragment>',
'#include <shadowmask_pars_fragment>',
'void main() {',
'#include <logdepthbuf_fragment>',
' vec4 noise = getNoise( worldPosition.xz * size );',
' vec3 surfaceNormal = normalize( noise.xzy * vec3( 1.5, 1.0, 1.5 ) );',
' vec3 diffuseLight = vec3(0.0);',
' vec3 specularLight = vec3(0.0);',
' vec3 worldToEye = eye-worldPosition.xyz;',
' vec3 eyeDirection = normalize( worldToEye );',
' sunLight( surfaceNormal, eyeDirection, 100.0, 2.0, 0.5, diffuseLight, specularLight );',
' float distance = length(worldToEye);',
' vec2 distortion = surfaceNormal.xz * ( 0.001 + 1.0 / distance ) * distortionScale;',
' vec3 reflectionSample = vec3( texture2D( mirrorSampler, mirrorCoord.xy / mirrorCoord.w + distortion ) );',
' float theta = max( dot( eyeDirection, surfaceNormal ), 0.0 );',
' float rf0 = 0.3;',
' float reflectance = rf0 + ( 1.0 - rf0 ) * pow( ( 1.0 - theta ), 5.0 );',
' vec3 scatter = max( 0.0, dot( surfaceNormal, eyeDirection ) ) * waterColor;',
' vec3 albedo = mix( ( sunColor * diffuseLight * 0.3 + scatter ) * getShadowMask(), ( vec3( 0.1 ) + reflectionSample * 0.9 + reflectionSample * specularLight ), reflectance);',
' vec3 outgoingLight = albedo;',
' gl_FragColor = vec4( outgoingLight, alpha );',
'#include <tonemapping_fragment>',
'#include <fog_fragment>',
'}'
].join( '\n' )
};
var material = new ShaderMaterial( {
fragmentShader: mirrorShader.fragmentShader,
vertexShader: mirrorShader.vertexShader,
uniforms: UniformsUtils.clone( mirrorShader.uniforms ),
lights: true,
side: side,
fog: fog
} );
material.uniforms[ "mirrorSampler" ].value = renderTarget.texture;
material.uniforms[ "textureMatrix" ].value = textureMatrix;
material.uniforms[ "alpha" ].value = alpha;
material.uniforms[ "time" ].value = time;
material.uniforms[ "normalSampler" ].value = normalSampler;
material.uniforms[ "sunColor" ].value = sunColor;
material.uniforms[ "waterColor" ].value = waterColor;
material.uniforms[ "sunDirection" ].value = sunDirection;
material.uniforms[ "distortionScale" ].value = distortionScale;
material.uniforms[ "eye" ].value = eye;
scope.material = material;
scope.onBeforeRender = function ( renderer, scene, camera ) {
mirrorWorldPosition.setFromMatrixPosition( scope.matrixWorld );
cameraWorldPosition.setFromMatrixPosition( camera.matrixWorld );
rotationMatrix.extractRotation( scope.matrixWorld );
normal.set( 0, 0, 1 );
normal.applyMatrix4( rotationMatrix );
view.subVectors( mirrorWorldPosition, cameraWorldPosition );
// Avoid rendering when mirror is facing away
if ( view.dot( normal ) > 0 ) return;
view.reflect( normal ).negate();
view.add( mirrorWorldPosition );
rotationMatrix.extractRotation( camera.matrixWorld );
lookAtPosition.set( 0, 0, - 1 );
lookAtPosition.applyMatrix4( rotationMatrix );
lookAtPosition.add( cameraWorldPosition );
target.subVectors( mirrorWorldPosition, lookAtPosition );
target.reflect( normal ).negate();
target.add( mirrorWorldPosition );
mirrorCamera.position.copy( view );
mirrorCamera.up.set( 0, 1, 0 );
mirrorCamera.up.applyMatrix4( rotationMatrix );
mirrorCamera.up.reflect( normal );
mirrorCamera.lookAt( target );
mirrorCamera.far = camera.far; // Used in WebGLBackground
mirrorCamera.updateMatrixWorld();
mirrorCamera.projectionMatrix.copy( camera.projectionMatrix );
// Update the texture matrix
textureMatrix.set(
0.5, 0.0, 0.0, 0.5,
0.0, 0.5, 0.0, 0.5,
0.0, 0.0, 0.5, 0.5,
0.0, 0.0, 0.0, 1.0
);
textureMatrix.multiply( mirrorCamera.projectionMatrix );
textureMatrix.multiply( mirrorCamera.matrixWorldInverse );
// Now update projection matrix with new clip plane, implementing code from: http://www.terathon.com/code/oblique.html
// Paper explaining this technique: http://www.terathon.com/lengyel/Lengyel-Oblique.pdf
mirrorPlane.setFromNormalAndCoplanarPoint( normal, mirrorWorldPosition );
mirrorPlane.applyMatrix4( mirrorCamera.matrixWorldInverse );
clipPlane.set( mirrorPlane.normal.x, mirrorPlane.normal.y, mirrorPlane.normal.z, mirrorPlane.constant );
var projectionMatrix = mirrorCamera.projectionMatrix;
q.x = ( Math.sign( clipPlane.x ) + projectionMatrix.elements[ 8 ] ) / projectionMatrix.elements[ 0 ];
q.y = ( Math.sign( clipPlane.y ) + projectionMatrix.elements[ 9 ] ) / projectionMatrix.elements[ 5 ];
q.z = - 1.0;
q.w = ( 1.0 + projectionMatrix.elements[ 10 ] ) / projectionMatrix.elements[ 14 ];
// Calculate the scaled plane vector
clipPlane.multiplyScalar( 2.0 / clipPlane.dot( q ) );
// Replacing the third row of the projection matrix
projectionMatrix.elements[ 2 ] = clipPlane.x;
projectionMatrix.elements[ 6 ] = clipPlane.y;
projectionMatrix.elements[ 10 ] = clipPlane.z + 1.0 - clipBias;
projectionMatrix.elements[ 14 ] = clipPlane.w;
eye.setFromMatrixPosition( camera.matrixWorld );
//
var currentRenderTarget = renderer.getRenderTarget();
var currentXrEnabled = renderer.xr.enabled;
var currentShadowAutoUpdate = renderer.shadowMap.autoUpdate;
scope.visible = false;
renderer.xr.enabled = false; // Avoid camera modification and recursion
renderer.shadowMap.autoUpdate = false; // Avoid re-computing shadows
renderer.setRenderTarget( renderTarget );
renderer.state.buffers.depth.setMask( true ); // make sure the depth buffer is writable so it can be properly cleared, see #18897
if ( renderer.autoClear === false ) renderer.clear();
renderer.render( scene, mirrorCamera );
scope.visible = true;
renderer.xr.enabled = currentXrEnabled;
renderer.shadowMap.autoUpdate = currentShadowAutoUpdate;
renderer.setRenderTarget( currentRenderTarget );
// Restore viewport
var viewport = camera.viewport;
if ( viewport !== undefined ) {
renderer.state.viewport( viewport );
}
};
}