three#LinearFilter JavaScript Examples
The following examples show how to use
three#LinearFilter.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TileMap.js From BlueMapWeb with MIT License | 6 votes |
/**
* @param width {number}
* @param height {number}
*/
constructor(width, height) {
this.canvas = document.createElementNS('http://www.w3.org/1999/xhtml', 'canvas');
this.canvas.width = width;
this.canvas.height = height;
/**
* @type CanvasRenderingContext2D
*/
this.tileMapContext = this.canvas.getContext('2d', {
alpha: false,
willReadFrequently: true,
});
this.texture = new Texture(this.canvas);
this.texture.generateMipmaps = false;
this.texture.magFilter = LinearFilter;
this.texture.minFilter = LinearFilter;
this.texture.wrapS = ClampToEdgeWrapping;
this.texture.wrapT = ClampToEdgeWrapping;
this.texture.flipY = false;
this.texture.needsUpdate = true;
}
Example #2
Source File: FontLibrary.js From three-mesh-ui with MIT License | 6 votes |
/*
This method is intended for adding manually loaded fonts. Method assumes font hasn't been loaded or requested yet. If it was,
font with specified name will be overwritten, but components using it won't be updated.
*/
function addFont( name, json, texture ) {
texture.generateMipmaps = false;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
requiredFontFamilies.push( name );
fontFamilies[ name ] = json;
// Ensure the font json is processed
_buildFriendlyKerningValues( json );
if ( texture ) {
requiredFontTextures.push( name );
fontTextures[ name ] = texture;
}
}
Example #3
Source File: SavePass.js From threejs-tutorial with MIT License | 6 votes |
SavePass = function (renderTarget) {
Pass.call(this);
if (CopyShader === undefined)
console.error("SavePass relies on CopyShader");
var shader = CopyShader;
this.textureID = "tDiffuse";
this.uniforms = UniformsUtils.clone(shader.uniforms);
this.material = new ShaderMaterial({
uniforms: this.uniforms,
vertexShader: shader.vertexShader,
fragmentShader: shader.fragmentShader,
});
this.renderTarget = renderTarget;
if (this.renderTarget === undefined) {
this.renderTarget = new WebGLRenderTarget(
window.innerWidth,
window.innerHeight,
{
minFilter: LinearFilter,
magFilter: LinearFilter,
format: RGBFormat,
stencilBuffer: false,
}
);
this.renderTarget.texture.name = "SavePass.rt";
}
this.needsSwap = false;
this.fsQuad = new Pass.FullScreenQuad(this.material);
}
Example #4
Source File: NormalPass.js From three-viewer with MIT License | 5 votes |
/**
* Constructs a new normal pass.
*
* @param {Scene} scene - The scene to render.
* @param {Camera} camera - The camera to use to render the scene.
* @param {Object} [options] - The options.
* @param {Number} [options.resolutionScale=1.0] - Deprecated. Adjust the height or width instead for consistent results.
* @param {Number} [options.width=Resizer.AUTO_SIZE] - The render width.
* @param {Number} [options.height=Resizer.AUTO_SIZE] - The render height.
* @param {WebGLRenderTarget} [options.renderTarget] - A custom render target.
*/
constructor(scene, camera, {
resolutionScale = 1.0,
width = Resizer.AUTO_SIZE,
height = Resizer.AUTO_SIZE,
renderTarget
} = {}) {
super("NormalPass");
this.needsSwap = false;
/**
* A render pass.
*
* @type {RenderPass}
* @private
*/
this.renderPass = new RenderPass(scene, camera, new MeshNormalMaterial({
morphTargets: true,
morphNormals: true,
skinning: true
}));
const clearPass = this.renderPass.getClearPass();
clearPass.overrideClearColor = new Color(0x7777ff);
clearPass.overrideClearAlpha = 1.0;
/**
* A render target that contains the scene normals.
*
* @type {WebGLRenderTarget}
*/
this.renderTarget = renderTarget;
if(this.renderTarget === undefined) {
this.renderTarget = new WebGLRenderTarget(1, 1, {
minFilter: LinearFilter,
magFilter: LinearFilter,
format: RGBFormat,
stencilBuffer: false
});
this.renderTarget.texture.name = "NormalPass.Target";
}
/**
* The desired render resolution.
*
* Use {@link Resizer.AUTO_SIZE} for the width or height to automatically
* calculate it based on its counterpart and the original aspect ratio.
*
* @type {Resizer}
*/
this.resolution = new Resizer(this, width, height);
this.resolution.scale = resolutionScale;
}
Example #5
Source File: BloomEffect.js From three-viewer with MIT License | 5 votes |
/**
* Constructs a new bloom effect.
*
* @param {Object} [options] - The options.
* @param {BlendFunction} [options.blendFunction=BlendFunction.SCREEN] - The blend function of this effect.
* @param {Number} [options.luminanceThreshold=0.9] - The luminance threshold. Raise this value to mask out darker elements in the scene. Range is [0, 1].
* @param {Number} [options.luminanceSmoothing=0.025] - Controls the smoothness of the luminance threshold. Range is [0, 1].
* @param {Number} [options.resolutionScale=0.5] - Deprecated. Use height or width instead.
* @param {Number} [options.intensity=1.0] - The intensity.
* @param {Number} [options.width=Resizer.AUTO_SIZE] - The render width.
* @param {Number} [options.height=Resizer.AUTO_SIZE] - The render height.
* @param {KernelSize} [options.kernelSize=KernelSize.LARGE] - The blur kernel size.
*/
constructor({
blendFunction = BlendFunction.SCREEN,
luminanceThreshold = 0.9,
luminanceSmoothing = 0.025,
resolutionScale = 0.5,
intensity = 1.0,
width = Resizer.AUTO_SIZE,
height = Resizer.AUTO_SIZE,
kernelSize = KernelSize.LARGE
} = {}) {
super("BloomEffect", fragmentShader, {
blendFunction,
uniforms: new Map([
["texture", new Uniform(null)],
["intensity", new Uniform(intensity)]
])
});
/**
* A render target.
*
* @type {WebGLRenderTarget}
* @private
*/
this.renderTarget = new WebGLRenderTarget(1, 1, {
minFilter: LinearFilter,
magFilter: LinearFilter,
stencilBuffer: false,
depthBuffer: false
});
this.renderTarget.texture.name = "Bloom.Target";
this.renderTarget.texture.generateMipmaps = false;
this.uniforms.get("texture").value = this.renderTarget.texture;
/**
* A blur pass.
*
* @type {BlurPass}
*/
this.blurPass = new BlurPass({ resolutionScale, width, height, kernelSize });
this.blurPass.resolution.resizable = this;
/**
* A luminance shader pass.
*
* You may disable this pass to deactivate luminance filtering.
*
* @type {ShaderPass}
*/
this.luminancePass = new ShaderPass(new LuminanceMaterial(true));
this.luminanceMaterial.threshold = luminanceThreshold;
this.luminanceMaterial.smoothing = luminanceSmoothing;
}
Example #6
Source File: EffectComposer.js From three-viewer with MIT License | 5 votes |
/**
* Creates a new render target by replicating the renderer's canvas.
*
* The created render target uses a linear filter for texel minification and
* magnification. Its render texture format depends on whether the renderer
* uses the alpha channel. Mipmaps are disabled.
*
* Note: The buffer format will also be set to RGBA if the frame buffer type
* is HalfFloatType because RGB16F buffers are not renderable.
*
* @param {Boolean} depthBuffer - Whether the render target should have a depth buffer.
* @param {Boolean} stencilBuffer - Whether the render target should have a stencil buffer.
* @param {Number} type - The frame buffer type.
* @param {Number} multisampling - The number of samples to use for antialiasing.
* @return {WebGLRenderTarget} A new render target that equals the renderer's canvas.
*/
createBuffer(depthBuffer, stencilBuffer, type, multisampling) {
const size = this.renderer.getDrawingBufferSize(new Vector2());
const alpha = this.renderer.getContext().getContextAttributes().alpha;
const options = {
format: (!alpha && type === UnsignedByteType) ? RGBFormat : RGBAFormat,
minFilter: LinearFilter,
magFilter: LinearFilter,
stencilBuffer,
depthBuffer,
type
};
const renderTarget = (multisampling > 0) ?
new WebGLMultisampleRenderTarget(size.width, size.height, options) :
new WebGLRenderTarget(size.width, size.height, options);
if(multisampling > 0) {
renderTarget.samples = multisampling;
}
renderTarget.texture.name = "EffectComposer.Buffer";
renderTarget.texture.generateMipmaps = false;
return renderTarget;
}
Example #7
Source File: BloomPass.js From Computer-Graphics with MIT License | 5 votes |
constructor( strength = 1, kernelSize = 25, sigma = 4, resolution = 256 ) {
super();
// render targets
const pars = { minFilter: LinearFilter, magFilter: LinearFilter, format: RGBAFormat };
this.renderTargetX = new WebGLRenderTarget( resolution, resolution, pars );
this.renderTargetX.texture.name = 'BloomPass.x';
this.renderTargetY = new WebGLRenderTarget( resolution, resolution, pars );
this.renderTargetY.texture.name = 'BloomPass.y';
// copy material
if ( CopyShader === undefined ) console.error( 'THREE.BloomPass relies on CopyShader' );
const copyShader = CopyShader;
this.copyUniforms = UniformsUtils.clone( copyShader.uniforms );
this.copyUniforms[ 'opacity' ].value = strength;
this.materialCopy = new ShaderMaterial( {
uniforms: this.copyUniforms,
vertexShader: copyShader.vertexShader,
fragmentShader: copyShader.fragmentShader,
blending: AdditiveBlending,
transparent: true
} );
// convolution material
if ( ConvolutionShader === undefined ) console.error( 'THREE.BloomPass relies on ConvolutionShader' );
const convolutionShader = ConvolutionShader;
this.convolutionUniforms = UniformsUtils.clone( convolutionShader.uniforms );
this.convolutionUniforms[ 'uImageIncrement' ].value = BloomPass.blurX;
this.convolutionUniforms[ 'cKernel' ].value = ConvolutionShader.buildKernel( sigma );
this.materialConvolution = new ShaderMaterial( {
uniforms: this.convolutionUniforms,
vertexShader: convolutionShader.vertexShader,
fragmentShader: convolutionShader.fragmentShader,
defines: {
'KERNEL_SIZE_FLOAT': kernelSize.toFixed( 1 ),
'KERNEL_SIZE_INT': kernelSize.toFixed( 0 )
}
} );
this.needsSwap = false;
this.fsQuad = new FullScreenQuad( null );
}
Example #8
Source File: AfterimagePass.js From Computer-Graphics with MIT License | 5 votes |
constructor( damp = 0.96 ) {
super();
if ( AfterimageShader === undefined ) console.error( 'THREE.AfterimagePass relies on AfterimageShader' );
this.shader = AfterimageShader;
this.uniforms = UniformsUtils.clone( this.shader.uniforms );
this.uniforms[ 'damp' ].value = damp;
this.textureComp = new WebGLRenderTarget( window.innerWidth, window.innerHeight, {
minFilter: LinearFilter,
magFilter: NearestFilter,
format: RGBAFormat
} );
this.textureOld = new WebGLRenderTarget( window.innerWidth, window.innerHeight, {
minFilter: LinearFilter,
magFilter: NearestFilter,
format: RGBAFormat
} );
this.shaderMaterial = new ShaderMaterial( {
uniforms: this.uniforms,
vertexShader: this.shader.vertexShader,
fragmentShader: this.shader.fragmentShader
} );
this.compFsQuad = new FullScreenQuad( this.shaderMaterial );
const material = new MeshBasicMaterial();
this.copyFsQuad = new FullScreenQuad( material );
}
Example #9
Source File: AdaptiveToneMappingPass.js From Computer-Graphics with MIT License | 5 votes |
reset() {
// render targets
if ( this.luminanceRT ) {
this.luminanceRT.dispose();
}
if ( this.currentLuminanceRT ) {
this.currentLuminanceRT.dispose();
}
if ( this.previousLuminanceRT ) {
this.previousLuminanceRT.dispose();
}
const pars = { minFilter: LinearFilter, magFilter: LinearFilter, format: RGBAFormat }; // was RGB format. changed to RGBA format. see discussion in #8415 / #8450
this.luminanceRT = new WebGLRenderTarget( this.resolution, this.resolution, pars );
this.luminanceRT.texture.name = 'AdaptiveToneMappingPass.l';
this.luminanceRT.texture.generateMipmaps = false;
this.previousLuminanceRT = new WebGLRenderTarget( this.resolution, this.resolution, pars );
this.previousLuminanceRT.texture.name = 'AdaptiveToneMappingPass.pl';
this.previousLuminanceRT.texture.generateMipmaps = false;
// We only need mipmapping for the current luminosity because we want a down-sampled version to sample in our adaptive shader
pars.minFilter = LinearMipmapLinearFilter;
pars.generateMipmaps = true;
this.currentLuminanceRT = new WebGLRenderTarget( this.resolution, this.resolution, pars );
this.currentLuminanceRT.texture.name = 'AdaptiveToneMappingPass.cl';
if ( this.adaptive ) {
this.materialToneMap.defines[ 'ADAPTED_LUMINANCE' ] = '';
this.materialToneMap.uniforms.luminanceMap.value = this.luminanceRT.texture;
}
//Put something in the adaptive luminance texture so that the scene can render initially
this.fsQuad.material = new MeshBasicMaterial( { color: 0x777777 } );
this.materialLuminance.needsUpdate = true;
this.materialAdaptiveLum.needsUpdate = true;
this.materialToneMap.needsUpdate = true;
// renderer.render( this.scene, this.camera, this.luminanceRT );
// renderer.render( this.scene, this.camera, this.previousLuminanceRT );
// renderer.render( this.scene, this.camera, this.currentLuminanceRT );
}
Example #10
Source File: FontLibrary.js From three-mesh-ui with MIT License | 5 votes |
/**
Called by MeshUIComponent after fontTexture was set
When done, it calls MeshUIComponent.update, to actually display
the text with the loaded font.
*/
function setFontTexture( component, url ) {
// if this font was never asked for, we load it
if ( requiredFontTextures.indexOf( url ) === -1 ) {
requiredFontTextures.push( url );
textureLoader.load( url, ( texture ) => {
texture.generateMipmaps = false;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
fontTextures[ url ] = texture;
for ( const recordID of Object.keys( records ) ) {
if ( url === records[ recordID ].textureURL ) {
// update all the components that were waiting for this font for an update
records[ recordID ].component._updateFontTexture( texture );
}
}
} );
}
// keep record of the font that this component use
if ( !records[ component.id ] ) records[ component.id ] = { component };
records[ component.id ].textureURL = url;
// update the component, only if the font is already requested and loaded
if ( fontTextures[ url ] ) {
component._updateFontTexture( fontTextures[ url ] );
}
}
Example #11
Source File: EffectComposer.js From threejs-tutorial with MIT License | 5 votes |
EffectComposer = function (renderer, renderTarget) {
this.renderer = renderer;
if (renderTarget === undefined) {
var parameters = {
minFilter: LinearFilter,
magFilter: LinearFilter,
format: RGBAFormat,
stencilBuffer: false,
};
var size = renderer.getSize(new Vector2());
this._pixelRatio = renderer.getPixelRatio();
this._width = size.width;
this._height = size.height;
renderTarget = new WebGLRenderTarget(
this._width * this._pixelRatio,
this._height * this._pixelRatio,
parameters
);
renderTarget.texture.name = "EffectComposer.rt1";
} else {
this._pixelRatio = 1;
this._width = renderTarget.width;
this._height = renderTarget.height;
}
this.renderTarget1 = renderTarget;
this.renderTarget2 = renderTarget.clone();
this.renderTarget2.texture.name = "EffectComposer.rt2";
this.writeBuffer = this.renderTarget1;
this.readBuffer = this.renderTarget2;
this.renderToScreen = true;
this.passes = [];
// dependencies
if (CopyShader === undefined) {
console.error("THREE.EffectComposer relies on CopyShader");
}
if (ShaderPass === undefined) {
console.error("THREE.EffectComposer relies on ShaderPass");
}
this.copyPass = new ShaderPass(CopyShader);
this.clock = new Clock();
}
Example #12
Source File: BlurPass.js From threejs-tutorial with MIT License | 5 votes |
BlurPass = function (blur, resolution) {
Pass.call(this);
this.downSampleRatio = 2;
this.blur = blur !== undefined ? blur : 0.8;
this.resolution =
resolution !== undefined
? new Vector2(resolution.x, resolution.y)
: new Vector2(256, 256);
var pars = {
minFilter: LinearFilter,
magFilter: LinearFilter,
format: RGBAFormat,
};
var resx = Math.round(this.resolution.x / this.downSampleRatio);
var resy = Math.round(this.resolution.y / this.downSampleRatio);
this.renderTargetBlurBuffer1 = new WebGLRenderTarget(resx, resy, pars);
this.renderTargetBlurBuffer1.texture.name = "BlurPass.blur1";
this.renderTargetBlurBuffer1.texture.generateMipmaps = false;
this.renderTargetBlurBuffer2 = new WebGLRenderTarget(
Math.round(resx / 2),
Math.round(resy / 2),
pars
);
this.renderTargetBlurBuffer2.texture.name = "BlurPass.blur2";
this.renderTargetBlurBuffer2.texture.generateMipmaps = false;
this.separableBlurMaterial1 = this.getSeperableBlurMaterial(16);
this.separableBlurMaterial1.uniforms["texSize"].value = new Vector2(
resx,
resy
);
this.separableBlurMaterial1.uniforms["kernelRadius"].value = 1;
this.separableBlurMaterial2 = this.getSeperableBlurMaterial(16);
this.separableBlurMaterial2.uniforms["texSize"].value = new Vector2(
Math.round(resx / 2),
Math.round(resy / 2)
);
this.separableBlurMaterial2.uniforms["kernelRadius"].value = 1;
var copyShader = CopyShader;
this.copyUniforms = UniformsUtils.clone(copyShader.uniforms);
this.materialCopy = new ShaderMaterial({
uniforms: this.copyUniforms,
vertexShader: copyShader.vertexShader,
fragmentShader: copyShader.fragmentShader,
depthTest: false,
depthWrite: false,
transparent: true,
});
//this.needsSwap = false;
this.fsQuad = new Pass.FullScreenQuad(null);
}
Example #13
Source File: RGBELoader.js From FirstPersonCameraControl with MIT License | 4 votes |
RGBELoader.prototype = Object.assign( Object.create( DataTextureLoader.prototype ), {
constructor: RGBELoader,
// adapted from http://www.graphics.cornell.edu/~bjw/rgbe.html
parse: function ( buffer ) {
var
/* return codes for rgbe routines */
//RGBE_RETURN_SUCCESS = 0,
RGBE_RETURN_FAILURE = - 1,
/* default error routine. change this to change error handling */
rgbe_read_error = 1,
rgbe_write_error = 2,
rgbe_format_error = 3,
rgbe_memory_error = 4,
rgbe_error = function ( rgbe_error_code, msg ) {
switch ( rgbe_error_code ) {
case rgbe_read_error: console.error( "RGBELoader Read Error: " + ( msg || '' ) );
break;
case rgbe_write_error: console.error( "RGBELoader Write Error: " + ( msg || '' ) );
break;
case rgbe_format_error: console.error( "RGBELoader Bad File Format: " + ( msg || '' ) );
break;
default:
case rgbe_memory_error: console.error( "RGBELoader: Error: " + ( msg || '' ) );
}
return RGBE_RETURN_FAILURE;
},
/* offsets to red, green, and blue components in a data (float) pixel */
//RGBE_DATA_RED = 0,
//RGBE_DATA_GREEN = 1,
//RGBE_DATA_BLUE = 2,
/* number of floats per pixel, use 4 since stored in rgba image format */
//RGBE_DATA_SIZE = 4,
/* flags indicating which fields in an rgbe_header_info are valid */
RGBE_VALID_PROGRAMTYPE = 1,
RGBE_VALID_FORMAT = 2,
RGBE_VALID_DIMENSIONS = 4,
NEWLINE = "\n",
fgets = function ( buffer, lineLimit, consume ) {
lineLimit = ! lineLimit ? 1024 : lineLimit;
var p = buffer.pos,
i = - 1, len = 0, s = '', chunkSize = 128,
chunk = String.fromCharCode.apply( null, new Uint16Array( buffer.subarray( p, p + chunkSize ) ) )
;
while ( ( 0 > ( i = chunk.indexOf( NEWLINE ) ) ) && ( len < lineLimit ) && ( p < buffer.byteLength ) ) {
s += chunk; len += chunk.length;
p += chunkSize;
chunk += String.fromCharCode.apply( null, new Uint16Array( buffer.subarray( p, p + chunkSize ) ) );
}
if ( - 1 < i ) {
/*for (i=l-1; i>=0; i--) {
byteCode = m.charCodeAt(i);
if (byteCode > 0x7f && byteCode <= 0x7ff) byteLen++;
else if (byteCode > 0x7ff && byteCode <= 0xffff) byteLen += 2;
if (byteCode >= 0xDC00 && byteCode <= 0xDFFF) i--; //trail surrogate
}*/
if ( false !== consume ) buffer.pos += len + i + 1;
return s + chunk.slice( 0, i );
}
return false;
},
/* minimal header reading. modify if you want to parse more information */
RGBE_ReadHeader = function ( buffer ) {
var line, match,
// regexes to parse header info fields
magic_token_re = /^#\?(\S+)$/,
gamma_re = /^\s*GAMMA\s*=\s*(\d+(\.\d+)?)\s*$/,
exposure_re = /^\s*EXPOSURE\s*=\s*(\d+(\.\d+)?)\s*$/,
format_re = /^\s*FORMAT=(\S+)\s*$/,
dimensions_re = /^\s*\-Y\s+(\d+)\s+\+X\s+(\d+)\s*$/,
// RGBE format header struct
header = {
valid: 0, /* indicate which fields are valid */
string: '', /* the actual header string */
comments: '', /* comments found in header */
programtype: 'RGBE', /* listed at beginning of file to identify it after "#?". defaults to "RGBE" */
format: '', /* RGBE format, default 32-bit_rle_rgbe */
gamma: 1.0, /* image has already been gamma corrected with given gamma. defaults to 1.0 (no correction) */
exposure: 1.0, /* a value of 1.0 in an image corresponds to <exposure> watts/steradian/m^2. defaults to 1.0 */
width: 0, height: 0 /* image dimensions, width/height */
};
if ( buffer.pos >= buffer.byteLength || ! ( line = fgets( buffer ) ) ) {
return rgbe_error( rgbe_read_error, "no header found" );
}
/* if you want to require the magic token then uncomment the next line */
if ( ! ( match = line.match( magic_token_re ) ) ) {
return rgbe_error( rgbe_format_error, "bad initial token" );
}
header.valid |= RGBE_VALID_PROGRAMTYPE;
header.programtype = match[ 1 ];
header.string += line + "\n";
while ( true ) {
line = fgets( buffer );
if ( false === line ) break;
header.string += line + "\n";
if ( '#' === line.charAt( 0 ) ) {
header.comments += line + "\n";
continue; // comment line
}
if ( match = line.match( gamma_re ) ) {
header.gamma = parseFloat( match[ 1 ], 10 );
}
if ( match = line.match( exposure_re ) ) {
header.exposure = parseFloat( match[ 1 ], 10 );
}
if ( match = line.match( format_re ) ) {
header.valid |= RGBE_VALID_FORMAT;
header.format = match[ 1 ];//'32-bit_rle_rgbe';
}
if ( match = line.match( dimensions_re ) ) {
header.valid |= RGBE_VALID_DIMENSIONS;
header.height = parseInt( match[ 1 ], 10 );
header.width = parseInt( match[ 2 ], 10 );
}
if ( ( header.valid & RGBE_VALID_FORMAT ) && ( header.valid & RGBE_VALID_DIMENSIONS ) ) break;
}
if ( ! ( header.valid & RGBE_VALID_FORMAT ) ) {
return rgbe_error( rgbe_format_error, "missing format specifier" );
}
if ( ! ( header.valid & RGBE_VALID_DIMENSIONS ) ) {
return rgbe_error( rgbe_format_error, "missing image size specifier" );
}
return header;
},
RGBE_ReadPixels_RLE = function ( buffer, w, h ) {
var data_rgba, offset, pos, count, byteValue,
scanline_buffer, ptr, ptr_end, i, l, off, isEncodedRun,
scanline_width = w, num_scanlines = h, rgbeStart
;
if (
// run length encoding is not allowed so read flat
( ( scanline_width < 8 ) || ( scanline_width > 0x7fff ) ) ||
// this file is not run length encoded
( ( 2 !== buffer[ 0 ] ) || ( 2 !== buffer[ 1 ] ) || ( buffer[ 2 ] & 0x80 ) )
) {
// return the flat buffer
return new Uint8Array( buffer );
}
if ( scanline_width !== ( ( buffer[ 2 ] << 8 ) | buffer[ 3 ] ) ) {
return rgbe_error( rgbe_format_error, "wrong scanline width" );
}
data_rgba = new Uint8Array( 4 * w * h );
if ( ! data_rgba || ! data_rgba.length ) {
return rgbe_error( rgbe_memory_error, "unable to allocate buffer space" );
}
offset = 0; pos = 0; ptr_end = 4 * scanline_width;
rgbeStart = new Uint8Array( 4 );
scanline_buffer = new Uint8Array( ptr_end );
// read in each successive scanline
while ( ( num_scanlines > 0 ) && ( pos < buffer.byteLength ) ) {
if ( pos + 4 > buffer.byteLength ) {
return rgbe_error( rgbe_read_error );
}
rgbeStart[ 0 ] = buffer[ pos ++ ];
rgbeStart[ 1 ] = buffer[ pos ++ ];
rgbeStart[ 2 ] = buffer[ pos ++ ];
rgbeStart[ 3 ] = buffer[ pos ++ ];
if ( ( 2 != rgbeStart[ 0 ] ) || ( 2 != rgbeStart[ 1 ] ) || ( ( ( rgbeStart[ 2 ] << 8 ) | rgbeStart[ 3 ] ) != scanline_width ) ) {
return rgbe_error( rgbe_format_error, "bad rgbe scanline format" );
}
// read each of the four channels for the scanline into the buffer
// first red, then green, then blue, then exponent
ptr = 0;
while ( ( ptr < ptr_end ) && ( pos < buffer.byteLength ) ) {
count = buffer[ pos ++ ];
isEncodedRun = count > 128;
if ( isEncodedRun ) count -= 128;
if ( ( 0 === count ) || ( ptr + count > ptr_end ) ) {
return rgbe_error( rgbe_format_error, "bad scanline data" );
}
if ( isEncodedRun ) {
// a (encoded) run of the same value
byteValue = buffer[ pos ++ ];
for ( i = 0; i < count; i ++ ) {
scanline_buffer[ ptr ++ ] = byteValue;
}
//ptr += count;
} else {
// a literal-run
scanline_buffer.set( buffer.subarray( pos, pos + count ), ptr );
ptr += count; pos += count;
}
}
// now convert data from buffer into rgba
// first red, then green, then blue, then exponent (alpha)
l = scanline_width; //scanline_buffer.byteLength;
for ( i = 0; i < l; i ++ ) {
off = 0;
data_rgba[ offset ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 1 ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 2 ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 3 ] = scanline_buffer[ i + off ];
offset += 4;
}
num_scanlines --;
}
return data_rgba;
};
var RGBEByteToRGBFloat = function ( sourceArray, sourceOffset, destArray, destOffset ) {
var e = sourceArray[ sourceOffset + 3 ];
var scale = Math.pow( 2.0, e - 128.0 ) / 255.0;
destArray[ destOffset + 0 ] = sourceArray[ sourceOffset + 0 ] * scale;
destArray[ destOffset + 1 ] = sourceArray[ sourceOffset + 1 ] * scale;
destArray[ destOffset + 2 ] = sourceArray[ sourceOffset + 2 ] * scale;
};
var RGBEByteToRGBHalf = ( function () {
// Source: http://gamedev.stackexchange.com/questions/17326/conversion-of-a-number-from-single-precision-floating-point-representation-to-a/17410#17410
var floatView = new Float32Array( 1 );
var int32View = new Int32Array( floatView.buffer );
/* This method is faster than the OpenEXR implementation (very often
* used, eg. in Ogre), with the additional benefit of rounding, inspired
* by James Tursa?s half-precision code. */
function toHalf( val ) {
floatView[ 0 ] = val;
var x = int32View[ 0 ];
var bits = ( x >> 16 ) & 0x8000; /* Get the sign */
var m = ( x >> 12 ) & 0x07ff; /* Keep one extra bit for rounding */
var e = ( x >> 23 ) & 0xff; /* Using int is faster here */
/* If zero, or denormal, or exponent underflows too much for a denormal
* half, return signed zero. */
if ( e < 103 ) return bits;
/* If NaN, return NaN. If Inf or exponent overflow, return Inf. */
if ( e > 142 ) {
bits |= 0x7c00;
/* If exponent was 0xff and one mantissa bit was set, it means NaN,
* not Inf, so make sure we set one mantissa bit too. */
bits |= ( ( e == 255 ) ? 0 : 1 ) && ( x & 0x007fffff );
return bits;
}
/* If exponent underflows but not too much, return a denormal */
if ( e < 113 ) {
m |= 0x0800;
/* Extra rounding may overflow and set mantissa to 0 and exponent
* to 1, which is OK. */
bits |= ( m >> ( 114 - e ) ) + ( ( m >> ( 113 - e ) ) & 1 );
return bits;
}
bits |= ( ( e - 112 ) << 10 ) | ( m >> 1 );
/* Extra rounding. An overflow will set mantissa to 0 and increment
* the exponent, which is OK. */
bits += m & 1;
return bits;
}
return function ( sourceArray, sourceOffset, destArray, destOffset ) {
var e = sourceArray[ sourceOffset + 3 ];
var scale = Math.pow( 2.0, e - 128.0 ) / 255.0;
destArray[ destOffset + 0 ] = toHalf( sourceArray[ sourceOffset + 0 ] * scale );
destArray[ destOffset + 1 ] = toHalf( sourceArray[ sourceOffset + 1 ] * scale );
destArray[ destOffset + 2 ] = toHalf( sourceArray[ sourceOffset + 2 ] * scale );
};
} )();
var byteArray = new Uint8Array( buffer );
byteArray.pos = 0;
var rgbe_header_info = RGBE_ReadHeader( byteArray );
if ( RGBE_RETURN_FAILURE !== rgbe_header_info ) {
var w = rgbe_header_info.width,
h = rgbe_header_info.height,
image_rgba_data = RGBE_ReadPixels_RLE( byteArray.subarray( byteArray.pos ), w, h );
if ( RGBE_RETURN_FAILURE !== image_rgba_data ) {
switch ( this.type ) {
case UnsignedByteType:
var data = image_rgba_data;
var format = RGBEFormat; // handled as THREE.RGBAFormat in shaders
var type = UnsignedByteType;
break;
case FloatType:
var numElements = ( image_rgba_data.length / 4 ) * 3;
var floatArray = new Float32Array( numElements );
for ( var j = 0; j < numElements; j ++ ) {
RGBEByteToRGBFloat( image_rgba_data, j * 4, floatArray, j * 3 );
}
var data = floatArray;
var format = RGBFormat;
var type = FloatType;
break;
case HalfFloatType:
var numElements = ( image_rgba_data.length / 4 ) * 3;
var halfArray = new Uint16Array( numElements );
for ( var j = 0; j < numElements; j ++ ) {
RGBEByteToRGBHalf( image_rgba_data, j * 4, halfArray, j * 3 );
}
var data = halfArray;
var format = RGBFormat;
var type = HalfFloatType;
break;
default:
console.error( 'THREE.RGBELoader: unsupported type: ', this.type );
break;
}
return {
width: w, height: h,
data: data,
header: rgbe_header_info.string,
gamma: rgbe_header_info.gamma,
exposure: rgbe_header_info.exposure,
format: format,
type: type
};
}
}
return null;
},
setDataType: function ( value ) {
this.type = value;
return this;
},
load: function ( url, onLoad, onProgress, onError ) {
function onLoadCallback( texture, texData ) {
switch ( texture.type ) {
case UnsignedByteType:
texture.encoding = RGBEEncoding;
texture.minFilter = NearestFilter;
texture.magFilter = NearestFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
case FloatType:
texture.encoding = LinearEncoding;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
case HalfFloatType:
texture.encoding = LinearEncoding;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
}
if ( onLoad ) onLoad( texture, texData );
}
return DataTextureLoader.prototype.load.call( this, url, onLoadCallback, onProgress, onError );
}
} );
Example #14
Source File: GLTFLoader.js From FirstPersonCameraControl with MIT License | 4 votes |
GLTFLoader = ( function () {
function GLTFLoader( manager ) {
Loader.call( this, manager );
this.dracoLoader = null;
this.ddsLoader = null;
}
GLTFLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
constructor: GLTFLoader,
load: function ( url, onLoad, onProgress, onError ) {
var scope = this;
var resourcePath;
if ( this.resourcePath !== '' ) {
resourcePath = this.resourcePath;
} else if ( this.path !== '' ) {
resourcePath = this.path;
} else {
resourcePath = LoaderUtils.extractUrlBase( url );
}
// Tells the LoadingManager to track an extra item, which resolves after
// the model is fully loaded. This means the count of items loaded will
// be incorrect, but ensures manager.onLoad() does not fire early.
scope.manager.itemStart( url );
var _onError = function ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
scope.manager.itemEnd( url );
};
var loader = new FileLoader( scope.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
if ( scope.crossOrigin === 'use-credentials' ) {
loader.setWithCredentials( true );
}
loader.load( url, function ( data ) {
try {
scope.parse( data, resourcePath, function ( gltf ) {
onLoad( gltf );
scope.manager.itemEnd( url );
}, _onError );
} catch ( e ) {
_onError( e );
}
}, onProgress, _onError );
},
setDRACOLoader: function ( dracoLoader ) {
this.dracoLoader = dracoLoader;
return this;
},
setDDSLoader: function ( ddsLoader ) {
this.ddsLoader = ddsLoader;
return this;
},
parse: function ( data, path, onLoad, onError ) {
var content;
var extensions = {};
if ( typeof data === 'string' ) {
content = data;
} else {
var magic = LoaderUtils.decodeText( new Uint8Array( data, 0, 4 ) );
if ( magic === BINARY_EXTENSION_HEADER_MAGIC ) {
try {
extensions[ EXTENSIONS.KHR_BINARY_GLTF ] = new GLTFBinaryExtension( data );
} catch ( error ) {
if ( onError ) onError( error );
return;
}
content = extensions[ EXTENSIONS.KHR_BINARY_GLTF ].content;
} else {
content = LoaderUtils.decodeText( new Uint8Array( data ) );
}
}
var json = JSON.parse( content );
if ( json.asset === undefined || json.asset.version[ 0 ] < 2 ) {
if ( onError ) onError( new Error( 'THREE.GLTFLoader: Unsupported asset. glTF versions >=2.0 are supported.' ) );
return;
}
if ( json.extensionsUsed ) {
for ( var i = 0; i < json.extensionsUsed.length; ++ i ) {
var extensionName = json.extensionsUsed[ i ];
var extensionsRequired = json.extensionsRequired || [];
switch ( extensionName ) {
case EXTENSIONS.KHR_LIGHTS_PUNCTUAL:
extensions[ extensionName ] = new GLTFLightsExtension( json );
break;
case EXTENSIONS.KHR_MATERIALS_CLEARCOAT:
extensions[ extensionName ] = new GLTFMaterialsClearcoatExtension();
break;
case EXTENSIONS.KHR_MATERIALS_UNLIT:
extensions[ extensionName ] = new GLTFMaterialsUnlitExtension();
break;
case EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS:
extensions[ extensionName ] = new GLTFMaterialsPbrSpecularGlossinessExtension();
break;
case EXTENSIONS.KHR_DRACO_MESH_COMPRESSION:
extensions[ extensionName ] = new GLTFDracoMeshCompressionExtension( json, this.dracoLoader );
break;
case EXTENSIONS.MSFT_TEXTURE_DDS:
extensions[ extensionName ] = new GLTFTextureDDSExtension( this.ddsLoader );
break;
case EXTENSIONS.KHR_TEXTURE_TRANSFORM:
extensions[ extensionName ] = new GLTFTextureTransformExtension();
break;
case EXTENSIONS.KHR_MESH_QUANTIZATION:
extensions[ extensionName ] = new GLTFMeshQuantizationExtension();
break;
default:
if ( extensionsRequired.indexOf( extensionName ) >= 0 ) {
console.warn( 'THREE.GLTFLoader: Unknown extension "' + extensionName + '".' );
}
}
}
}
var parser = new GLTFParser( json, extensions, {
path: path || this.resourcePath || '',
crossOrigin: this.crossOrigin,
manager: this.manager
} );
parser.parse( onLoad, onError );
}
} );
/* GLTFREGISTRY */
function GLTFRegistry() {
var objects = {};
return {
get: function ( key ) {
return objects[ key ];
},
add: function ( key, object ) {
objects[ key ] = object;
},
remove: function ( key ) {
delete objects[ key ];
},
removeAll: function () {
objects = {};
}
};
}
/*********************************/
/********** EXTENSIONS ***********/
/*********************************/
var EXTENSIONS = {
KHR_BINARY_GLTF: 'KHR_binary_glTF',
KHR_DRACO_MESH_COMPRESSION: 'KHR_draco_mesh_compression',
KHR_LIGHTS_PUNCTUAL: 'KHR_lights_punctual',
KHR_MATERIALS_CLEARCOAT: 'KHR_materials_clearcoat',
KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS: 'KHR_materials_pbrSpecularGlossiness',
KHR_MATERIALS_UNLIT: 'KHR_materials_unlit',
KHR_TEXTURE_TRANSFORM: 'KHR_texture_transform',
KHR_MESH_QUANTIZATION: 'KHR_mesh_quantization',
MSFT_TEXTURE_DDS: 'MSFT_texture_dds'
};
/**
* DDS Texture Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Vendor/MSFT_texture_dds
*
*/
function GLTFTextureDDSExtension( ddsLoader ) {
if ( ! ddsLoader ) {
throw new Error( 'THREE.GLTFLoader: Attempting to load .dds texture without importing DDSLoader' );
}
this.name = EXTENSIONS.MSFT_TEXTURE_DDS;
this.ddsLoader = ddsLoader;
}
/**
* Punctual Lights Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_lights_punctual
*/
function GLTFLightsExtension( json ) {
this.name = EXTENSIONS.KHR_LIGHTS_PUNCTUAL;
var extension = ( json.extensions && json.extensions[ EXTENSIONS.KHR_LIGHTS_PUNCTUAL ] ) || {};
this.lightDefs = extension.lights || [];
}
GLTFLightsExtension.prototype.loadLight = function ( lightIndex ) {
var lightDef = this.lightDefs[ lightIndex ];
var lightNode;
var color = new Color( 0xffffff );
if ( lightDef.color !== undefined ) color.fromArray( lightDef.color );
var range = lightDef.range !== undefined ? lightDef.range : 0;
switch ( lightDef.type ) {
case 'directional':
lightNode = new DirectionalLight( color );
lightNode.target.position.set( 0, 0, - 1 );
lightNode.add( lightNode.target );
break;
case 'point':
lightNode = new PointLight( color );
lightNode.distance = range;
break;
case 'spot':
lightNode = new SpotLight( color );
lightNode.distance = range;
// Handle spotlight properties.
lightDef.spot = lightDef.spot || {};
lightDef.spot.innerConeAngle = lightDef.spot.innerConeAngle !== undefined ? lightDef.spot.innerConeAngle : 0;
lightDef.spot.outerConeAngle = lightDef.spot.outerConeAngle !== undefined ? lightDef.spot.outerConeAngle : Math.PI / 4.0;
lightNode.angle = lightDef.spot.outerConeAngle;
lightNode.penumbra = 1.0 - lightDef.spot.innerConeAngle / lightDef.spot.outerConeAngle;
lightNode.target.position.set( 0, 0, - 1 );
lightNode.add( lightNode.target );
break;
default:
throw new Error( 'THREE.GLTFLoader: Unexpected light type, "' + lightDef.type + '".' );
}
// Some lights (e.g. spot) default to a position other than the origin. Reset the position
// here, because node-level parsing will only override position if explicitly specified.
lightNode.position.set( 0, 0, 0 );
lightNode.decay = 2;
if ( lightDef.intensity !== undefined ) lightNode.intensity = lightDef.intensity;
lightNode.name = lightDef.name || ( 'light_' + lightIndex );
return Promise.resolve( lightNode );
};
/**
* Unlit Materials Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_unlit
*/
function GLTFMaterialsUnlitExtension() {
this.name = EXTENSIONS.KHR_MATERIALS_UNLIT;
}
GLTFMaterialsUnlitExtension.prototype.getMaterialType = function () {
return MeshBasicMaterial;
};
GLTFMaterialsUnlitExtension.prototype.extendParams = function ( materialParams, materialDef, parser ) {
var pending = [];
materialParams.color = new Color( 1.0, 1.0, 1.0 );
materialParams.opacity = 1.0;
var metallicRoughness = materialDef.pbrMetallicRoughness;
if ( metallicRoughness ) {
if ( Array.isArray( metallicRoughness.baseColorFactor ) ) {
var array = metallicRoughness.baseColorFactor;
materialParams.color.fromArray( array );
materialParams.opacity = array[ 3 ];
}
if ( metallicRoughness.baseColorTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'map', metallicRoughness.baseColorTexture ) );
}
}
return Promise.all( pending );
};
/**
* Clearcoat Materials Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_clearcoat
*/
function GLTFMaterialsClearcoatExtension() {
this.name = EXTENSIONS.KHR_MATERIALS_CLEARCOAT;
}
GLTFMaterialsClearcoatExtension.prototype.getMaterialType = function () {
return MeshPhysicalMaterial;
};
GLTFMaterialsClearcoatExtension.prototype.extendParams = function ( materialParams, materialDef, parser ) {
var pending = [];
var extension = materialDef.extensions[ this.name ];
if ( extension.clearcoatFactor !== undefined ) {
materialParams.clearcoat = extension.clearcoatFactor;
}
if ( extension.clearcoatTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'clearcoatMap', extension.clearcoatTexture ) );
}
if ( extension.clearcoatRoughnessFactor !== undefined ) {
materialParams.clearcoatRoughness = extension.clearcoatRoughnessFactor;
}
if ( extension.clearcoatRoughnessTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'clearcoatRoughnessMap', extension.clearcoatRoughnessTexture ) );
}
if ( extension.clearcoatNormalTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'clearcoatNormalMap', extension.clearcoatNormalTexture ) );
if ( extension.clearcoatNormalTexture.scale !== undefined ) {
var scale = extension.clearcoatNormalTexture.scale;
materialParams.clearcoatNormalScale = new Vector2( scale, scale );
}
}
return Promise.all( pending );
};
/* BINARY EXTENSION */
var BINARY_EXTENSION_HEADER_MAGIC = 'glTF';
var BINARY_EXTENSION_HEADER_LENGTH = 12;
var BINARY_EXTENSION_CHUNK_TYPES = { JSON: 0x4E4F534A, BIN: 0x004E4942 };
function GLTFBinaryExtension( data ) {
this.name = EXTENSIONS.KHR_BINARY_GLTF;
this.content = null;
this.body = null;
var headerView = new DataView( data, 0, BINARY_EXTENSION_HEADER_LENGTH );
this.header = {
magic: LoaderUtils.decodeText( new Uint8Array( data.slice( 0, 4 ) ) ),
version: headerView.getUint32( 4, true ),
length: headerView.getUint32( 8, true )
};
if ( this.header.magic !== BINARY_EXTENSION_HEADER_MAGIC ) {
throw new Error( 'THREE.GLTFLoader: Unsupported glTF-Binary header.' );
} else if ( this.header.version < 2.0 ) {
throw new Error( 'THREE.GLTFLoader: Legacy binary file detected.' );
}
var chunkView = new DataView( data, BINARY_EXTENSION_HEADER_LENGTH );
var chunkIndex = 0;
while ( chunkIndex < chunkView.byteLength ) {
var chunkLength = chunkView.getUint32( chunkIndex, true );
chunkIndex += 4;
var chunkType = chunkView.getUint32( chunkIndex, true );
chunkIndex += 4;
if ( chunkType === BINARY_EXTENSION_CHUNK_TYPES.JSON ) {
var contentArray = new Uint8Array( data, BINARY_EXTENSION_HEADER_LENGTH + chunkIndex, chunkLength );
this.content = LoaderUtils.decodeText( contentArray );
} else if ( chunkType === BINARY_EXTENSION_CHUNK_TYPES.BIN ) {
var byteOffset = BINARY_EXTENSION_HEADER_LENGTH + chunkIndex;
this.body = data.slice( byteOffset, byteOffset + chunkLength );
}
// Clients must ignore chunks with unknown types.
chunkIndex += chunkLength;
}
if ( this.content === null ) {
throw new Error( 'THREE.GLTFLoader: JSON content not found.' );
}
}
/**
* DRACO Mesh Compression Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_draco_mesh_compression
*/
function GLTFDracoMeshCompressionExtension( json, dracoLoader ) {
if ( ! dracoLoader ) {
throw new Error( 'THREE.GLTFLoader: No DRACOLoader instance provided.' );
}
this.name = EXTENSIONS.KHR_DRACO_MESH_COMPRESSION;
this.json = json;
this.dracoLoader = dracoLoader;
this.dracoLoader.preload();
}
GLTFDracoMeshCompressionExtension.prototype.decodePrimitive = function ( primitive, parser ) {
var json = this.json;
var dracoLoader = this.dracoLoader;
var bufferViewIndex = primitive.extensions[ this.name ].bufferView;
var gltfAttributeMap = primitive.extensions[ this.name ].attributes;
var threeAttributeMap = {};
var attributeNormalizedMap = {};
var attributeTypeMap = {};
for ( var attributeName in gltfAttributeMap ) {
var threeAttributeName = ATTRIBUTES[ attributeName ] || attributeName.toLowerCase();
threeAttributeMap[ threeAttributeName ] = gltfAttributeMap[ attributeName ];
}
for ( attributeName in primitive.attributes ) {
var threeAttributeName = ATTRIBUTES[ attributeName ] || attributeName.toLowerCase();
if ( gltfAttributeMap[ attributeName ] !== undefined ) {
var accessorDef = json.accessors[ primitive.attributes[ attributeName ] ];
var componentType = WEBGL_COMPONENT_TYPES[ accessorDef.componentType ];
attributeTypeMap[ threeAttributeName ] = componentType;
attributeNormalizedMap[ threeAttributeName ] = accessorDef.normalized === true;
}
}
return parser.getDependency( 'bufferView', bufferViewIndex ).then( function ( bufferView ) {
return new Promise( function ( resolve ) {
dracoLoader.decodeDracoFile( bufferView, function ( geometry ) {
for ( var attributeName in geometry.attributes ) {
var attribute = geometry.attributes[ attributeName ];
var normalized = attributeNormalizedMap[ attributeName ];
if ( normalized !== undefined ) attribute.normalized = normalized;
}
resolve( geometry );
}, threeAttributeMap, attributeTypeMap );
} );
} );
};
/**
* Texture Transform Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_texture_transform
*/
function GLTFTextureTransformExtension() {
this.name = EXTENSIONS.KHR_TEXTURE_TRANSFORM;
}
GLTFTextureTransformExtension.prototype.extendTexture = function ( texture, transform ) {
texture = texture.clone();
if ( transform.offset !== undefined ) {
texture.offset.fromArray( transform.offset );
}
if ( transform.rotation !== undefined ) {
texture.rotation = transform.rotation;
}
if ( transform.scale !== undefined ) {
texture.repeat.fromArray( transform.scale );
}
if ( transform.texCoord !== undefined ) {
console.warn( 'THREE.GLTFLoader: Custom UV sets in "' + this.name + '" extension not yet supported.' );
}
texture.needsUpdate = true;
return texture;
};
/**
* Specular-Glossiness Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_pbrSpecularGlossiness
*/
/**
* A sub class of StandardMaterial with some of the functionality
* changed via the `onBeforeCompile` callback
* @pailhead
*/
function GLTFMeshStandardSGMaterial( params ) {
MeshStandardMaterial.call( this );
this.isGLTFSpecularGlossinessMaterial = true;
//various chunks that need replacing
var specularMapParsFragmentChunk = [
'#ifdef USE_SPECULARMAP',
' uniform sampler2D specularMap;',
'#endif'
].join( '\n' );
var glossinessMapParsFragmentChunk = [
'#ifdef USE_GLOSSINESSMAP',
' uniform sampler2D glossinessMap;',
'#endif'
].join( '\n' );
var specularMapFragmentChunk = [
'vec3 specularFactor = specular;',
'#ifdef USE_SPECULARMAP',
' vec4 texelSpecular = texture2D( specularMap, vUv );',
' texelSpecular = sRGBToLinear( texelSpecular );',
' // reads channel RGB, compatible with a glTF Specular-Glossiness (RGBA) texture',
' specularFactor *= texelSpecular.rgb;',
'#endif'
].join( '\n' );
var glossinessMapFragmentChunk = [
'float glossinessFactor = glossiness;',
'#ifdef USE_GLOSSINESSMAP',
' vec4 texelGlossiness = texture2D( glossinessMap, vUv );',
' // reads channel A, compatible with a glTF Specular-Glossiness (RGBA) texture',
' glossinessFactor *= texelGlossiness.a;',
'#endif'
].join( '\n' );
var lightPhysicalFragmentChunk = [
'PhysicalMaterial material;',
'material.diffuseColor = diffuseColor.rgb;',
'vec3 dxy = max( abs( dFdx( geometryNormal ) ), abs( dFdy( geometryNormal ) ) );',
'float geometryRoughness = max( max( dxy.x, dxy.y ), dxy.z );',
'material.specularRoughness = max( 1.0 - glossinessFactor, 0.0525 );// 0.0525 corresponds to the base mip of a 256 cubemap.',
'material.specularRoughness += geometryRoughness;',
'material.specularRoughness = min( material.specularRoughness, 1.0 );',
'material.specularColor = specularFactor.rgb;',
].join( '\n' );
var uniforms = {
specular: { value: new Color().setHex( 0xffffff ) },
glossiness: { value: 1 },
specularMap: { value: null },
glossinessMap: { value: null }
};
this._extraUniforms = uniforms;
// please see #14031 or #13198 for an alternate approach
this.onBeforeCompile = function ( shader ) {
for ( var uniformName in uniforms ) {
shader.uniforms[ uniformName ] = uniforms[ uniformName ];
}
shader.fragmentShader = shader.fragmentShader.replace( 'uniform float roughness;', 'uniform vec3 specular;' );
shader.fragmentShader = shader.fragmentShader.replace( 'uniform float metalness;', 'uniform float glossiness;' );
shader.fragmentShader = shader.fragmentShader.replace( '#include <roughnessmap_pars_fragment>', specularMapParsFragmentChunk );
shader.fragmentShader = shader.fragmentShader.replace( '#include <metalnessmap_pars_fragment>', glossinessMapParsFragmentChunk );
shader.fragmentShader = shader.fragmentShader.replace( '#include <roughnessmap_fragment>', specularMapFragmentChunk );
shader.fragmentShader = shader.fragmentShader.replace( '#include <metalnessmap_fragment>', glossinessMapFragmentChunk );
shader.fragmentShader = shader.fragmentShader.replace( '#include <lights_physical_fragment>', lightPhysicalFragmentChunk );
};
/*eslint-disable*/
Object.defineProperties(
this,
{
specular: {
get: function () { return uniforms.specular.value; },
set: function ( v ) { uniforms.specular.value = v; }
},
specularMap: {
get: function () { return uniforms.specularMap.value; },
set: function ( v ) { uniforms.specularMap.value = v; }
},
glossiness: {
get: function () { return uniforms.glossiness.value; },
set: function ( v ) { uniforms.glossiness.value = v; }
},
glossinessMap: {
get: function () { return uniforms.glossinessMap.value; },
set: function ( v ) {
uniforms.glossinessMap.value = v;
//how about something like this - @pailhead
if ( v ) {
this.defines.USE_GLOSSINESSMAP = '';
// set USE_ROUGHNESSMAP to enable vUv
this.defines.USE_ROUGHNESSMAP = '';
} else {
delete this.defines.USE_ROUGHNESSMAP;
delete this.defines.USE_GLOSSINESSMAP;
}
}
}
}
);
/*eslint-enable*/
delete this.metalness;
delete this.roughness;
delete this.metalnessMap;
delete this.roughnessMap;
this.setValues( params );
}
GLTFMeshStandardSGMaterial.prototype = Object.create( MeshStandardMaterial.prototype );
GLTFMeshStandardSGMaterial.prototype.constructor = GLTFMeshStandardSGMaterial;
GLTFMeshStandardSGMaterial.prototype.copy = function ( source ) {
MeshStandardMaterial.prototype.copy.call( this, source );
this.specularMap = source.specularMap;
this.specular.copy( source.specular );
this.glossinessMap = source.glossinessMap;
this.glossiness = source.glossiness;
delete this.metalness;
delete this.roughness;
delete this.metalnessMap;
delete this.roughnessMap;
return this;
};
function GLTFMaterialsPbrSpecularGlossinessExtension() {
return {
name: EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS,
specularGlossinessParams: [
'color',
'map',
'lightMap',
'lightMapIntensity',
'aoMap',
'aoMapIntensity',
'emissive',
'emissiveIntensity',
'emissiveMap',
'bumpMap',
'bumpScale',
'normalMap',
'normalMapType',
'displacementMap',
'displacementScale',
'displacementBias',
'specularMap',
'specular',
'glossinessMap',
'glossiness',
'alphaMap',
'envMap',
'envMapIntensity',
'refractionRatio',
],
getMaterialType: function () {
return GLTFMeshStandardSGMaterial;
},
extendParams: function ( materialParams, materialDef, parser ) {
var pbrSpecularGlossiness = materialDef.extensions[ this.name ];
materialParams.color = new Color( 1.0, 1.0, 1.0 );
materialParams.opacity = 1.0;
var pending = [];
if ( Array.isArray( pbrSpecularGlossiness.diffuseFactor ) ) {
var array = pbrSpecularGlossiness.diffuseFactor;
materialParams.color.fromArray( array );
materialParams.opacity = array[ 3 ];
}
if ( pbrSpecularGlossiness.diffuseTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'map', pbrSpecularGlossiness.diffuseTexture ) );
}
materialParams.emissive = new Color( 0.0, 0.0, 0.0 );
materialParams.glossiness = pbrSpecularGlossiness.glossinessFactor !== undefined ? pbrSpecularGlossiness.glossinessFactor : 1.0;
materialParams.specular = new Color( 1.0, 1.0, 1.0 );
if ( Array.isArray( pbrSpecularGlossiness.specularFactor ) ) {
materialParams.specular.fromArray( pbrSpecularGlossiness.specularFactor );
}
if ( pbrSpecularGlossiness.specularGlossinessTexture !== undefined ) {
var specGlossMapDef = pbrSpecularGlossiness.specularGlossinessTexture;
pending.push( parser.assignTexture( materialParams, 'glossinessMap', specGlossMapDef ) );
pending.push( parser.assignTexture( materialParams, 'specularMap', specGlossMapDef ) );
}
return Promise.all( pending );
},
createMaterial: function ( materialParams ) {
var material = new GLTFMeshStandardSGMaterial( materialParams );
material.fog = true;
material.color = materialParams.color;
material.map = materialParams.map === undefined ? null : materialParams.map;
material.lightMap = null;
material.lightMapIntensity = 1.0;
material.aoMap = materialParams.aoMap === undefined ? null : materialParams.aoMap;
material.aoMapIntensity = 1.0;
material.emissive = materialParams.emissive;
material.emissiveIntensity = 1.0;
material.emissiveMap = materialParams.emissiveMap === undefined ? null : materialParams.emissiveMap;
material.bumpMap = materialParams.bumpMap === undefined ? null : materialParams.bumpMap;
material.bumpScale = 1;
material.normalMap = materialParams.normalMap === undefined ? null : materialParams.normalMap;
material.normalMapType = TangentSpaceNormalMap;
if ( materialParams.normalScale ) material.normalScale = materialParams.normalScale;
material.displacementMap = null;
material.displacementScale = 1;
material.displacementBias = 0;
material.specularMap = materialParams.specularMap === undefined ? null : materialParams.specularMap;
material.specular = materialParams.specular;
material.glossinessMap = materialParams.glossinessMap === undefined ? null : materialParams.glossinessMap;
material.glossiness = materialParams.glossiness;
material.alphaMap = null;
material.envMap = materialParams.envMap === undefined ? null : materialParams.envMap;
material.envMapIntensity = 1.0;
material.refractionRatio = 0.98;
return material;
},
};
}
/**
* Mesh Quantization Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_mesh_quantization
*/
function GLTFMeshQuantizationExtension() {
this.name = EXTENSIONS.KHR_MESH_QUANTIZATION;
}
/*********************************/
/********** INTERPOLATION ********/
/*********************************/
// Spline Interpolation
// Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#appendix-c-spline-interpolation
function GLTFCubicSplineInterpolant( parameterPositions, sampleValues, sampleSize, resultBuffer ) {
Interpolant.call( this, parameterPositions, sampleValues, sampleSize, resultBuffer );
}
GLTFCubicSplineInterpolant.prototype = Object.create( Interpolant.prototype );
GLTFCubicSplineInterpolant.prototype.constructor = GLTFCubicSplineInterpolant;
GLTFCubicSplineInterpolant.prototype.copySampleValue_ = function ( index ) {
// Copies a sample value to the result buffer. See description of glTF
// CUBICSPLINE values layout in interpolate_() function below.
var result = this.resultBuffer,
values = this.sampleValues,
valueSize = this.valueSize,
offset = index * valueSize * 3 + valueSize;
for ( var i = 0; i !== valueSize; i ++ ) {
result[ i ] = values[ offset + i ];
}
return result;
};
GLTFCubicSplineInterpolant.prototype.beforeStart_ = GLTFCubicSplineInterpolant.prototype.copySampleValue_;
GLTFCubicSplineInterpolant.prototype.afterEnd_ = GLTFCubicSplineInterpolant.prototype.copySampleValue_;
GLTFCubicSplineInterpolant.prototype.interpolate_ = function ( i1, t0, t, t1 ) {
var result = this.resultBuffer;
var values = this.sampleValues;
var stride = this.valueSize;
var stride2 = stride * 2;
var stride3 = stride * 3;
var td = t1 - t0;
var p = ( t - t0 ) / td;
var pp = p * p;
var ppp = pp * p;
var offset1 = i1 * stride3;
var offset0 = offset1 - stride3;
var s2 = - 2 * ppp + 3 * pp;
var s3 = ppp - pp;
var s0 = 1 - s2;
var s1 = s3 - pp + p;
// Layout of keyframe output values for CUBICSPLINE animations:
// [ inTangent_1, splineVertex_1, outTangent_1, inTangent_2, splineVertex_2, ... ]
for ( var i = 0; i !== stride; i ++ ) {
var p0 = values[ offset0 + i + stride ]; // splineVertex_k
var m0 = values[ offset0 + i + stride2 ] * td; // outTangent_k * (t_k+1 - t_k)
var p1 = values[ offset1 + i + stride ]; // splineVertex_k+1
var m1 = values[ offset1 + i ] * td; // inTangent_k+1 * (t_k+1 - t_k)
result[ i ] = s0 * p0 + s1 * m0 + s2 * p1 + s3 * m1;
}
return result;
};
/*********************************/
/********** INTERNALS ************/
/*********************************/
/* CONSTANTS */
var WEBGL_CONSTANTS = {
FLOAT: 5126,
//FLOAT_MAT2: 35674,
FLOAT_MAT3: 35675,
FLOAT_MAT4: 35676,
FLOAT_VEC2: 35664,
FLOAT_VEC3: 35665,
FLOAT_VEC4: 35666,
LINEAR: 9729,
REPEAT: 10497,
SAMPLER_2D: 35678,
POINTS: 0,
LINES: 1,
LINE_LOOP: 2,
LINE_STRIP: 3,
TRIANGLES: 4,
TRIANGLE_STRIP: 5,
TRIANGLE_FAN: 6,
UNSIGNED_BYTE: 5121,
UNSIGNED_SHORT: 5123
};
var WEBGL_COMPONENT_TYPES = {
5120: Int8Array,
5121: Uint8Array,
5122: Int16Array,
5123: Uint16Array,
5125: Uint32Array,
5126: Float32Array
};
var WEBGL_FILTERS = {
9728: NearestFilter,
9729: LinearFilter,
9984: NearestMipmapNearestFilter,
9985: LinearMipmapNearestFilter,
9986: NearestMipmapLinearFilter,
9987: LinearMipmapLinearFilter
};
var WEBGL_WRAPPINGS = {
33071: ClampToEdgeWrapping,
33648: MirroredRepeatWrapping,
10497: RepeatWrapping
};
var WEBGL_TYPE_SIZES = {
'SCALAR': 1,
'VEC2': 2,
'VEC3': 3,
'VEC4': 4,
'MAT2': 4,
'MAT3': 9,
'MAT4': 16
};
var ATTRIBUTES = {
POSITION: 'position',
NORMAL: 'normal',
TANGENT: 'tangent',
TEXCOORD_0: 'uv',
TEXCOORD_1: 'uv2',
COLOR_0: 'color',
WEIGHTS_0: 'skinWeight',
JOINTS_0: 'skinIndex',
};
var PATH_PROPERTIES = {
scale: 'scale',
translation: 'position',
rotation: 'quaternion',
weights: 'morphTargetInfluences'
};
var INTERPOLATION = {
CUBICSPLINE: undefined, // We use a custom interpolant (GLTFCubicSplineInterpolation) for CUBICSPLINE tracks. Each
// keyframe track will be initialized with a default interpolation type, then modified.
LINEAR: InterpolateLinear,
STEP: InterpolateDiscrete
};
var ALPHA_MODES = {
OPAQUE: 'OPAQUE',
MASK: 'MASK',
BLEND: 'BLEND'
};
var MIME_TYPE_FORMATS = {
'image/png': RGBAFormat,
'image/jpeg': RGBFormat
};
/* UTILITY FUNCTIONS */
function resolveURL( url, path ) {
// Invalid URL
if ( typeof url !== 'string' || url === '' ) return '';
// Host Relative URL
if ( /^https?:\/\//i.test( path ) && /^\//.test( url ) ) {
path = path.replace( /(^https?:\/\/[^\/]+).*/i, '$1' );
}
// Absolute URL http://,https://,//
if ( /^(https?:)?\/\//i.test( url ) ) return url;
// Data URI
if ( /^data:.*,.*$/i.test( url ) ) return url;
// Blob URL
if ( /^blob:.*$/i.test( url ) ) return url;
// Relative URL
return path + url;
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#default-material
*/
function createDefaultMaterial( cache ) {
if ( cache[ 'DefaultMaterial' ] === undefined ) {
cache[ 'DefaultMaterial' ] = new MeshStandardMaterial( {
color: 0xFFFFFF,
emissive: 0x000000,
metalness: 1,
roughness: 1,
transparent: false,
depthTest: true,
side: FrontSide
} );
}
return cache[ 'DefaultMaterial' ];
}
function addUnknownExtensionsToUserData( knownExtensions, object, objectDef ) {
// Add unknown glTF extensions to an object's userData.
for ( var name in objectDef.extensions ) {
if ( knownExtensions[ name ] === undefined ) {
object.userData.gltfExtensions = object.userData.gltfExtensions || {};
object.userData.gltfExtensions[ name ] = objectDef.extensions[ name ];
}
}
}
/**
* @param {Object3D|Material|BufferGeometry} object
* @param {GLTF.definition} gltfDef
*/
function assignExtrasToUserData( object, gltfDef ) {
if ( gltfDef.extras !== undefined ) {
if ( typeof gltfDef.extras === 'object' ) {
Object.assign( object.userData, gltfDef.extras );
} else {
console.warn( 'THREE.GLTFLoader: Ignoring primitive type .extras, ' + gltfDef.extras );
}
}
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#morph-targets
*
* @param {BufferGeometry} geometry
* @param {Array<GLTF.Target>} targets
* @param {GLTFParser} parser
* @return {Promise<BufferGeometry>}
*/
function addMorphTargets( geometry, targets, parser ) {
var hasMorphPosition = false;
var hasMorphNormal = false;
for ( var i = 0, il = targets.length; i < il; i ++ ) {
var target = targets[ i ];
if ( target.POSITION !== undefined ) hasMorphPosition = true;
if ( target.NORMAL !== undefined ) hasMorphNormal = true;
if ( hasMorphPosition && hasMorphNormal ) break;
}
if ( ! hasMorphPosition && ! hasMorphNormal ) return Promise.resolve( geometry );
var pendingPositionAccessors = [];
var pendingNormalAccessors = [];
for ( var i = 0, il = targets.length; i < il; i ++ ) {
var target = targets[ i ];
if ( hasMorphPosition ) {
var pendingAccessor = target.POSITION !== undefined
? parser.getDependency( 'accessor', target.POSITION )
: geometry.attributes.position;
pendingPositionAccessors.push( pendingAccessor );
}
if ( hasMorphNormal ) {
var pendingAccessor = target.NORMAL !== undefined
? parser.getDependency( 'accessor', target.NORMAL )
: geometry.attributes.normal;
pendingNormalAccessors.push( pendingAccessor );
}
}
return Promise.all( [
Promise.all( pendingPositionAccessors ),
Promise.all( pendingNormalAccessors )
] ).then( function ( accessors ) {
var morphPositions = accessors[ 0 ];
var morphNormals = accessors[ 1 ];
if ( hasMorphPosition ) geometry.morphAttributes.position = morphPositions;
if ( hasMorphNormal ) geometry.morphAttributes.normal = morphNormals;
geometry.morphTargetsRelative = true;
return geometry;
} );
}
/**
* @param {Mesh} mesh
* @param {GLTF.Mesh} meshDef
*/
function updateMorphTargets( mesh, meshDef ) {
mesh.updateMorphTargets();
if ( meshDef.weights !== undefined ) {
for ( var i = 0, il = meshDef.weights.length; i < il; i ++ ) {
mesh.morphTargetInfluences[ i ] = meshDef.weights[ i ];
}
}
// .extras has user-defined data, so check that .extras.targetNames is an array.
if ( meshDef.extras && Array.isArray( meshDef.extras.targetNames ) ) {
var targetNames = meshDef.extras.targetNames;
if ( mesh.morphTargetInfluences.length === targetNames.length ) {
mesh.morphTargetDictionary = {};
for ( var i = 0, il = targetNames.length; i < il; i ++ ) {
mesh.morphTargetDictionary[ targetNames[ i ] ] = i;
}
} else {
console.warn( 'THREE.GLTFLoader: Invalid extras.targetNames length. Ignoring names.' );
}
}
}
function createPrimitiveKey( primitiveDef ) {
var dracoExtension = primitiveDef.extensions && primitiveDef.extensions[ EXTENSIONS.KHR_DRACO_MESH_COMPRESSION ];
var geometryKey;
if ( dracoExtension ) {
geometryKey = 'draco:' + dracoExtension.bufferView
+ ':' + dracoExtension.indices
+ ':' + createAttributesKey( dracoExtension.attributes );
} else {
geometryKey = primitiveDef.indices + ':' + createAttributesKey( primitiveDef.attributes ) + ':' + primitiveDef.mode;
}
return geometryKey;
}
function createAttributesKey( attributes ) {
var attributesKey = '';
var keys = Object.keys( attributes ).sort();
for ( var i = 0, il = keys.length; i < il; i ++ ) {
attributesKey += keys[ i ] + ':' + attributes[ keys[ i ] ] + ';';
}
return attributesKey;
}
/* GLTF PARSER */
function GLTFParser( json, extensions, options ) {
this.json = json || {};
this.extensions = extensions || {};
this.options = options || {};
// loader object cache
this.cache = new GLTFRegistry();
// BufferGeometry caching
this.primitiveCache = {};
this.textureLoader = new TextureLoader( this.options.manager );
this.textureLoader.setCrossOrigin( this.options.crossOrigin );
this.fileLoader = new FileLoader( this.options.manager );
this.fileLoader.setResponseType( 'arraybuffer' );
if ( this.options.crossOrigin === 'use-credentials' ) {
this.fileLoader.setWithCredentials( true );
}
}
GLTFParser.prototype.parse = function ( onLoad, onError ) {
var parser = this;
var json = this.json;
var extensions = this.extensions;
// Clear the loader cache
this.cache.removeAll();
// Mark the special nodes/meshes in json for efficient parse
this.markDefs();
Promise.all( [
this.getDependencies( 'scene' ),
this.getDependencies( 'animation' ),
this.getDependencies( 'camera' ),
] ).then( function ( dependencies ) {
var result = {
scene: dependencies[ 0 ][ json.scene || 0 ],
scenes: dependencies[ 0 ],
animations: dependencies[ 1 ],
cameras: dependencies[ 2 ],
asset: json.asset,
parser: parser,
userData: {}
};
addUnknownExtensionsToUserData( extensions, result, json );
assignExtrasToUserData( result, json );
onLoad( result );
} ).catch( onError );
};
/**
* Marks the special nodes/meshes in json for efficient parse.
*/
GLTFParser.prototype.markDefs = function () {
var nodeDefs = this.json.nodes || [];
var skinDefs = this.json.skins || [];
var meshDefs = this.json.meshes || [];
var meshReferences = {};
var meshUses = {};
// Nothing in the node definition indicates whether it is a Bone or an
// Object3D. Use the skins' joint references to mark bones.
for ( var skinIndex = 0, skinLength = skinDefs.length; skinIndex < skinLength; skinIndex ++ ) {
var joints = skinDefs[ skinIndex ].joints;
for ( var i = 0, il = joints.length; i < il; i ++ ) {
nodeDefs[ joints[ i ] ].isBone = true;
}
}
// Meshes can (and should) be reused by multiple nodes in a glTF asset. To
// avoid having more than one Mesh with the same name, count
// references and rename instances below.
//
// Example: CesiumMilkTruck sample model reuses "Wheel" meshes.
for ( var nodeIndex = 0, nodeLength = nodeDefs.length; nodeIndex < nodeLength; nodeIndex ++ ) {
var nodeDef = nodeDefs[ nodeIndex ];
if ( nodeDef.mesh !== undefined ) {
if ( meshReferences[ nodeDef.mesh ] === undefined ) {
meshReferences[ nodeDef.mesh ] = meshUses[ nodeDef.mesh ] = 0;
}
meshReferences[ nodeDef.mesh ] ++;
// Nothing in the mesh definition indicates whether it is
// a SkinnedMesh or Mesh. Use the node's mesh reference
// to mark SkinnedMesh if node has skin.
if ( nodeDef.skin !== undefined ) {
meshDefs[ nodeDef.mesh ].isSkinnedMesh = true;
}
}
}
this.json.meshReferences = meshReferences;
this.json.meshUses = meshUses;
};
/**
* Requests the specified dependency asynchronously, with caching.
* @param {string} type
* @param {number} index
* @return {Promise<Object3D|Material|THREE.Texture|AnimationClip|ArrayBuffer|Object>}
*/
GLTFParser.prototype.getDependency = function ( type, index ) {
var cacheKey = type + ':' + index;
var dependency = this.cache.get( cacheKey );
if ( ! dependency ) {
switch ( type ) {
case 'scene':
dependency = this.loadScene( index );
break;
case 'node':
dependency = this.loadNode( index );
break;
case 'mesh':
dependency = this.loadMesh( index );
break;
case 'accessor':
dependency = this.loadAccessor( index );
break;
case 'bufferView':
dependency = this.loadBufferView( index );
break;
case 'buffer':
dependency = this.loadBuffer( index );
break;
case 'material':
dependency = this.loadMaterial( index );
break;
case 'texture':
dependency = this.loadTexture( index );
break;
case 'skin':
dependency = this.loadSkin( index );
break;
case 'animation':
dependency = this.loadAnimation( index );
break;
case 'camera':
dependency = this.loadCamera( index );
break;
case 'light':
dependency = this.extensions[ EXTENSIONS.KHR_LIGHTS_PUNCTUAL ].loadLight( index );
break;
default:
throw new Error( 'Unknown type: ' + type );
}
this.cache.add( cacheKey, dependency );
}
return dependency;
};
/**
* Requests all dependencies of the specified type asynchronously, with caching.
* @param {string} type
* @return {Promise<Array<Object>>}
*/
GLTFParser.prototype.getDependencies = function ( type ) {
var dependencies = this.cache.get( type );
if ( ! dependencies ) {
var parser = this;
var defs = this.json[ type + ( type === 'mesh' ? 'es' : 's' ) ] || [];
dependencies = Promise.all( defs.map( function ( def, index ) {
return parser.getDependency( type, index );
} ) );
this.cache.add( type, dependencies );
}
return dependencies;
};
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#buffers-and-buffer-views
* @param {number} bufferIndex
* @return {Promise<ArrayBuffer>}
*/
GLTFParser.prototype.loadBuffer = function ( bufferIndex ) {
var bufferDef = this.json.buffers[ bufferIndex ];
var loader = this.fileLoader;
if ( bufferDef.type && bufferDef.type !== 'arraybuffer' ) {
throw new Error( 'THREE.GLTFLoader: ' + bufferDef.type + ' buffer type is not supported.' );
}
// If present, GLB container is required to be the first buffer.
if ( bufferDef.uri === undefined && bufferIndex === 0 ) {
return Promise.resolve( this.extensions[ EXTENSIONS.KHR_BINARY_GLTF ].body );
}
var options = this.options;
return new Promise( function ( resolve, reject ) {
loader.load( resolveURL( bufferDef.uri, options.path ), resolve, undefined, function () {
reject( new Error( 'THREE.GLTFLoader: Failed to load buffer "' + bufferDef.uri + '".' ) );
} );
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#buffers-and-buffer-views
* @param {number} bufferViewIndex
* @return {Promise<ArrayBuffer>}
*/
GLTFParser.prototype.loadBufferView = function ( bufferViewIndex ) {
var bufferViewDef = this.json.bufferViews[ bufferViewIndex ];
return this.getDependency( 'buffer', bufferViewDef.buffer ).then( function ( buffer ) {
var byteLength = bufferViewDef.byteLength || 0;
var byteOffset = bufferViewDef.byteOffset || 0;
return buffer.slice( byteOffset, byteOffset + byteLength );
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#accessors
* @param {number} accessorIndex
* @return {Promise<BufferAttribute|InterleavedBufferAttribute>}
*/
GLTFParser.prototype.loadAccessor = function ( accessorIndex ) {
var parser = this;
var json = this.json;
var accessorDef = this.json.accessors[ accessorIndex ];
if ( accessorDef.bufferView === undefined && accessorDef.sparse === undefined ) {
// Ignore empty accessors, which may be used to declare runtime
// information about attributes coming from another source (e.g. Draco
// compression extension).
return Promise.resolve( null );
}
var pendingBufferViews = [];
if ( accessorDef.bufferView !== undefined ) {
pendingBufferViews.push( this.getDependency( 'bufferView', accessorDef.bufferView ) );
} else {
pendingBufferViews.push( null );
}
if ( accessorDef.sparse !== undefined ) {
pendingBufferViews.push( this.getDependency( 'bufferView', accessorDef.sparse.indices.bufferView ) );
pendingBufferViews.push( this.getDependency( 'bufferView', accessorDef.sparse.values.bufferView ) );
}
return Promise.all( pendingBufferViews ).then( function ( bufferViews ) {
var bufferView = bufferViews[ 0 ];
var itemSize = WEBGL_TYPE_SIZES[ accessorDef.type ];
var TypedArray = WEBGL_COMPONENT_TYPES[ accessorDef.componentType ];
// For VEC3: itemSize is 3, elementBytes is 4, itemBytes is 12.
var elementBytes = TypedArray.BYTES_PER_ELEMENT;
var itemBytes = elementBytes * itemSize;
var byteOffset = accessorDef.byteOffset || 0;
var byteStride = accessorDef.bufferView !== undefined ? json.bufferViews[ accessorDef.bufferView ].byteStride : undefined;
var normalized = accessorDef.normalized === true;
var array, bufferAttribute;
// The buffer is not interleaved if the stride is the item size in bytes.
if ( byteStride && byteStride !== itemBytes ) {
// Each "slice" of the buffer, as defined by 'count' elements of 'byteStride' bytes, gets its own InterleavedBuffer
// This makes sure that IBA.count reflects accessor.count properly
var ibSlice = Math.floor( byteOffset / byteStride );
var ibCacheKey = 'InterleavedBuffer:' + accessorDef.bufferView + ':' + accessorDef.componentType + ':' + ibSlice + ':' + accessorDef.count;
var ib = parser.cache.get( ibCacheKey );
if ( ! ib ) {
array = new TypedArray( bufferView, ibSlice * byteStride, accessorDef.count * byteStride / elementBytes );
// Integer parameters to IB/IBA are in array elements, not bytes.
ib = new InterleavedBuffer( array, byteStride / elementBytes );
parser.cache.add( ibCacheKey, ib );
}
bufferAttribute = new InterleavedBufferAttribute( ib, itemSize, ( byteOffset % byteStride ) / elementBytes, normalized );
} else {
if ( bufferView === null ) {
array = new TypedArray( accessorDef.count * itemSize );
} else {
array = new TypedArray( bufferView, byteOffset, accessorDef.count * itemSize );
}
bufferAttribute = new BufferAttribute( array, itemSize, normalized );
}
// https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#sparse-accessors
if ( accessorDef.sparse !== undefined ) {
var itemSizeIndices = WEBGL_TYPE_SIZES.SCALAR;
var TypedArrayIndices = WEBGL_COMPONENT_TYPES[ accessorDef.sparse.indices.componentType ];
var byteOffsetIndices = accessorDef.sparse.indices.byteOffset || 0;
var byteOffsetValues = accessorDef.sparse.values.byteOffset || 0;
var sparseIndices = new TypedArrayIndices( bufferViews[ 1 ], byteOffsetIndices, accessorDef.sparse.count * itemSizeIndices );
var sparseValues = new TypedArray( bufferViews[ 2 ], byteOffsetValues, accessorDef.sparse.count * itemSize );
if ( bufferView !== null ) {
// Avoid modifying the original ArrayBuffer, if the bufferView wasn't initialized with zeroes.
bufferAttribute = new BufferAttribute( bufferAttribute.array.slice(), bufferAttribute.itemSize, bufferAttribute.normalized );
}
for ( var i = 0, il = sparseIndices.length; i < il; i ++ ) {
var index = sparseIndices[ i ];
bufferAttribute.setX( index, sparseValues[ i * itemSize ] );
if ( itemSize >= 2 ) bufferAttribute.setY( index, sparseValues[ i * itemSize + 1 ] );
if ( itemSize >= 3 ) bufferAttribute.setZ( index, sparseValues[ i * itemSize + 2 ] );
if ( itemSize >= 4 ) bufferAttribute.setW( index, sparseValues[ i * itemSize + 3 ] );
if ( itemSize >= 5 ) throw new Error( 'THREE.GLTFLoader: Unsupported itemSize in sparse BufferAttribute.' );
}
}
return bufferAttribute;
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#textures
* @param {number} textureIndex
* @return {Promise<THREE.Texture>}
*/
GLTFParser.prototype.loadTexture = function ( textureIndex ) {
var parser = this;
var json = this.json;
var options = this.options;
var textureLoader = this.textureLoader;
var URL = self.URL || self.webkitURL;
var textureDef = json.textures[ textureIndex ];
var textureExtensions = textureDef.extensions || {};
var source;
if ( textureExtensions[ EXTENSIONS.MSFT_TEXTURE_DDS ] ) {
source = json.images[ textureExtensions[ EXTENSIONS.MSFT_TEXTURE_DDS ].source ];
} else {
source = json.images[ textureDef.source ];
}
var sourceURI = source.uri;
var isObjectURL = false;
if ( source.bufferView !== undefined ) {
// Load binary image data from bufferView, if provided.
sourceURI = parser.getDependency( 'bufferView', source.bufferView ).then( function ( bufferView ) {
isObjectURL = true;
var blob = new Blob( [ bufferView ], { type: source.mimeType } );
sourceURI = URL.createObjectURL( blob );
return sourceURI;
} );
}
return Promise.resolve( sourceURI ).then( function ( sourceURI ) {
// Load Texture resource.
var loader = options.manager.getHandler( sourceURI );
if ( ! loader ) {
loader = textureExtensions[ EXTENSIONS.MSFT_TEXTURE_DDS ]
? parser.extensions[ EXTENSIONS.MSFT_TEXTURE_DDS ].ddsLoader
: textureLoader;
}
return new Promise( function ( resolve, reject ) {
loader.load( resolveURL( sourceURI, options.path ), resolve, undefined, reject );
} );
} ).then( function ( texture ) {
// Clean up resources and configure Texture.
if ( isObjectURL === true ) {
URL.revokeObjectURL( sourceURI );
}
texture.flipY = false;
if ( textureDef.name ) texture.name = textureDef.name;
// Ignore unknown mime types, like DDS files.
if ( source.mimeType in MIME_TYPE_FORMATS ) {
texture.format = MIME_TYPE_FORMATS[ source.mimeType ];
}
var samplers = json.samplers || {};
var sampler = samplers[ textureDef.sampler ] || {};
texture.magFilter = WEBGL_FILTERS[ sampler.magFilter ] || LinearFilter;
texture.minFilter = WEBGL_FILTERS[ sampler.minFilter ] || LinearMipmapLinearFilter;
texture.wrapS = WEBGL_WRAPPINGS[ sampler.wrapS ] || RepeatWrapping;
texture.wrapT = WEBGL_WRAPPINGS[ sampler.wrapT ] || RepeatWrapping;
return texture;
} );
};
/**
* Asynchronously assigns a texture to the given material parameters.
* @param {Object} materialParams
* @param {string} mapName
* @param {Object} mapDef
* @return {Promise}
*/
GLTFParser.prototype.assignTexture = function ( materialParams, mapName, mapDef ) {
var parser = this;
return this.getDependency( 'texture', mapDef.index ).then( function ( texture ) {
if ( ! texture.isCompressedTexture ) {
switch ( mapName ) {
case 'aoMap':
case 'emissiveMap':
case 'metalnessMap':
case 'normalMap':
case 'roughnessMap':
texture.format = RGBFormat;
break;
}
}
// Materials sample aoMap from UV set 1 and other maps from UV set 0 - this can't be configured
// However, we will copy UV set 0 to UV set 1 on demand for aoMap
if ( mapDef.texCoord !== undefined && mapDef.texCoord != 0 && ! ( mapName === 'aoMap' && mapDef.texCoord == 1 ) ) {
console.warn( 'THREE.GLTFLoader: Custom UV set ' + mapDef.texCoord + ' for texture ' + mapName + ' not yet supported.' );
}
if ( parser.extensions[ EXTENSIONS.KHR_TEXTURE_TRANSFORM ] ) {
var transform = mapDef.extensions !== undefined ? mapDef.extensions[ EXTENSIONS.KHR_TEXTURE_TRANSFORM ] : undefined;
if ( transform ) {
texture = parser.extensions[ EXTENSIONS.KHR_TEXTURE_TRANSFORM ].extendTexture( texture, transform );
}
}
materialParams[ mapName ] = texture;
} );
};
/**
* Assigns final material to a Mesh, Line, or Points instance. The instance
* already has a material (generated from the glTF material options alone)
* but reuse of the same glTF material may require multiple threejs materials
* to accomodate different primitive types, defines, etc. New materials will
* be created if necessary, and reused from a cache.
* @param {Object3D} mesh Mesh, Line, or Points instance.
*/
GLTFParser.prototype.assignFinalMaterial = function ( mesh ) {
var geometry = mesh.geometry;
var material = mesh.material;
var useVertexTangents = geometry.attributes.tangent !== undefined;
var useVertexColors = geometry.attributes.color !== undefined;
var useFlatShading = geometry.attributes.normal === undefined;
var useSkinning = mesh.isSkinnedMesh === true;
var useMorphTargets = Object.keys( geometry.morphAttributes ).length > 0;
var useMorphNormals = useMorphTargets && geometry.morphAttributes.normal !== undefined;
if ( mesh.isPoints ) {
var cacheKey = 'PointsMaterial:' + material.uuid;
var pointsMaterial = this.cache.get( cacheKey );
if ( ! pointsMaterial ) {
pointsMaterial = new PointsMaterial();
Material.prototype.copy.call( pointsMaterial, material );
pointsMaterial.color.copy( material.color );
pointsMaterial.map = material.map;
pointsMaterial.sizeAttenuation = false; // glTF spec says points should be 1px
this.cache.add( cacheKey, pointsMaterial );
}
material = pointsMaterial;
} else if ( mesh.isLine ) {
var cacheKey = 'LineBasicMaterial:' + material.uuid;
var lineMaterial = this.cache.get( cacheKey );
if ( ! lineMaterial ) {
lineMaterial = new LineBasicMaterial();
Material.prototype.copy.call( lineMaterial, material );
lineMaterial.color.copy( material.color );
this.cache.add( cacheKey, lineMaterial );
}
material = lineMaterial;
}
// Clone the material if it will be modified
if ( useVertexTangents || useVertexColors || useFlatShading || useSkinning || useMorphTargets ) {
var cacheKey = 'ClonedMaterial:' + material.uuid + ':';
if ( material.isGLTFSpecularGlossinessMaterial ) cacheKey += 'specular-glossiness:';
if ( useSkinning ) cacheKey += 'skinning:';
if ( useVertexTangents ) cacheKey += 'vertex-tangents:';
if ( useVertexColors ) cacheKey += 'vertex-colors:';
if ( useFlatShading ) cacheKey += 'flat-shading:';
if ( useMorphTargets ) cacheKey += 'morph-targets:';
if ( useMorphNormals ) cacheKey += 'morph-normals:';
var cachedMaterial = this.cache.get( cacheKey );
if ( ! cachedMaterial ) {
cachedMaterial = material.clone();
if ( useSkinning ) cachedMaterial.skinning = true;
if ( useVertexTangents ) cachedMaterial.vertexTangents = true;
if ( useVertexColors ) cachedMaterial.vertexColors = true;
if ( useFlatShading ) cachedMaterial.flatShading = true;
if ( useMorphTargets ) cachedMaterial.morphTargets = true;
if ( useMorphNormals ) cachedMaterial.morphNormals = true;
this.cache.add( cacheKey, cachedMaterial );
}
material = cachedMaterial;
}
// workarounds for mesh and geometry
if ( material.aoMap && geometry.attributes.uv2 === undefined && geometry.attributes.uv !== undefined ) {
geometry.setAttribute( 'uv2', new BufferAttribute( geometry.attributes.uv.array, 2 ) );
}
// https://github.com/mrdoob/three.js/issues/11438#issuecomment-507003995
if ( material.normalScale && ! useVertexTangents ) {
material.normalScale.y = - material.normalScale.y;
}
if ( material.clearcoatNormalScale && ! useVertexTangents ) {
material.clearcoatNormalScale.y = - material.clearcoatNormalScale.y;
}
mesh.material = material;
};
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#materials
* @param {number} materialIndex
* @return {Promise<Material>}
*/
GLTFParser.prototype.loadMaterial = function ( materialIndex ) {
var parser = this;
var json = this.json;
var extensions = this.extensions;
var materialDef = json.materials[ materialIndex ];
var materialType;
var materialParams = {};
var materialExtensions = materialDef.extensions || {};
var pending = [];
if ( materialExtensions[ EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS ] ) {
var sgExtension = extensions[ EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS ];
materialType = sgExtension.getMaterialType();
pending.push( sgExtension.extendParams( materialParams, materialDef, parser ) );
} else if ( materialExtensions[ EXTENSIONS.KHR_MATERIALS_UNLIT ] ) {
var kmuExtension = extensions[ EXTENSIONS.KHR_MATERIALS_UNLIT ];
materialType = kmuExtension.getMaterialType();
pending.push( kmuExtension.extendParams( materialParams, materialDef, parser ) );
} else {
// Specification:
// https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#metallic-roughness-material
materialType = MeshStandardMaterial;
var metallicRoughness = materialDef.pbrMetallicRoughness || {};
materialParams.color = new Color( 1.0, 1.0, 1.0 );
materialParams.opacity = 1.0;
if ( Array.isArray( metallicRoughness.baseColorFactor ) ) {
var array = metallicRoughness.baseColorFactor;
materialParams.color.fromArray( array );
materialParams.opacity = array[ 3 ];
}
if ( metallicRoughness.baseColorTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'map', metallicRoughness.baseColorTexture ) );
}
materialParams.metalness = metallicRoughness.metallicFactor !== undefined ? metallicRoughness.metallicFactor : 1.0;
materialParams.roughness = metallicRoughness.roughnessFactor !== undefined ? metallicRoughness.roughnessFactor : 1.0;
if ( metallicRoughness.metallicRoughnessTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'metalnessMap', metallicRoughness.metallicRoughnessTexture ) );
pending.push( parser.assignTexture( materialParams, 'roughnessMap', metallicRoughness.metallicRoughnessTexture ) );
}
}
if ( materialDef.doubleSided === true ) {
materialParams.side = DoubleSide;
}
var alphaMode = materialDef.alphaMode || ALPHA_MODES.OPAQUE;
if ( alphaMode === ALPHA_MODES.BLEND ) {
materialParams.transparent = true;
// See: https://github.com/mrdoob/three.js/issues/17706
materialParams.depthWrite = false;
} else {
materialParams.transparent = false;
if ( alphaMode === ALPHA_MODES.MASK ) {
materialParams.alphaTest = materialDef.alphaCutoff !== undefined ? materialDef.alphaCutoff : 0.5;
}
}
if ( materialDef.normalTexture !== undefined && materialType !== MeshBasicMaterial ) {
pending.push( parser.assignTexture( materialParams, 'normalMap', materialDef.normalTexture ) );
materialParams.normalScale = new Vector2( 1, 1 );
if ( materialDef.normalTexture.scale !== undefined ) {
materialParams.normalScale.set( materialDef.normalTexture.scale, materialDef.normalTexture.scale );
}
}
if ( materialDef.occlusionTexture !== undefined && materialType !== MeshBasicMaterial ) {
pending.push( parser.assignTexture( materialParams, 'aoMap', materialDef.occlusionTexture ) );
if ( materialDef.occlusionTexture.strength !== undefined ) {
materialParams.aoMapIntensity = materialDef.occlusionTexture.strength;
}
}
if ( materialDef.emissiveFactor !== undefined && materialType !== MeshBasicMaterial ) {
materialParams.emissive = new Color().fromArray( materialDef.emissiveFactor );
}
if ( materialDef.emissiveTexture !== undefined && materialType !== MeshBasicMaterial ) {
pending.push( parser.assignTexture( materialParams, 'emissiveMap', materialDef.emissiveTexture ) );
}
if ( materialExtensions[ EXTENSIONS.KHR_MATERIALS_CLEARCOAT ] ) {
var clearcoatExtension = extensions[ EXTENSIONS.KHR_MATERIALS_CLEARCOAT ];
materialType = clearcoatExtension.getMaterialType();
pending.push( clearcoatExtension.extendParams( materialParams, { extensions: materialExtensions }, parser ) );
}
return Promise.all( pending ).then( function () {
var material;
if ( materialType === GLTFMeshStandardSGMaterial ) {
material = extensions[ EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS ].createMaterial( materialParams );
} else {
material = new materialType( materialParams );
}
if ( materialDef.name ) material.name = materialDef.name;
// baseColorTexture, emissiveTexture, and specularGlossinessTexture use sRGB encoding.
if ( material.map ) material.map.encoding = sRGBEncoding;
if ( material.emissiveMap ) material.emissiveMap.encoding = sRGBEncoding;
assignExtrasToUserData( material, materialDef );
if ( materialDef.extensions ) addUnknownExtensionsToUserData( extensions, material, materialDef );
return material;
} );
};
/**
* @param {BufferGeometry} geometry
* @param {GLTF.Primitive} primitiveDef
* @param {GLTFParser} parser
*/
function computeBounds( geometry, primitiveDef, parser ) {
var attributes = primitiveDef.attributes;
var box = new Box3();
if ( attributes.POSITION !== undefined ) {
var accessor = parser.json.accessors[ attributes.POSITION ];
var min = accessor.min;
var max = accessor.max;
// glTF requires 'min' and 'max', but VRM (which extends glTF) currently ignores that requirement.
if ( min !== undefined && max !== undefined ) {
box.set(
new Vector3( min[ 0 ], min[ 1 ], min[ 2 ] ),
new Vector3( max[ 0 ], max[ 1 ], max[ 2 ] ) );
} else {
console.warn( 'THREE.GLTFLoader: Missing min/max properties for accessor POSITION.' );
return;
}
} else {
return;
}
var targets = primitiveDef.targets;
if ( targets !== undefined ) {
var maxDisplacement = new Vector3();
var vector = new Vector3();
for ( var i = 0, il = targets.length; i < il; i ++ ) {
var target = targets[ i ];
if ( target.POSITION !== undefined ) {
var accessor = parser.json.accessors[ target.POSITION ];
var min = accessor.min;
var max = accessor.max;
// glTF requires 'min' and 'max', but VRM (which extends glTF) currently ignores that requirement.
if ( min !== undefined && max !== undefined ) {
// we need to get max of absolute components because target weight is [-1,1]
vector.setX( Math.max( Math.abs( min[ 0 ] ), Math.abs( max[ 0 ] ) ) );
vector.setY( Math.max( Math.abs( min[ 1 ] ), Math.abs( max[ 1 ] ) ) );
vector.setZ( Math.max( Math.abs( min[ 2 ] ), Math.abs( max[ 2 ] ) ) );
// Note: this assumes that the sum of all weights is at most 1. This isn't quite correct - it's more conservative
// to assume that each target can have a max weight of 1. However, for some use cases - notably, when morph targets
// are used to implement key-frame animations and as such only two are active at a time - this results in very large
// boxes. So for now we make a box that's sometimes a touch too small but is hopefully mostly of reasonable size.
maxDisplacement.max( vector );
} else {
console.warn( 'THREE.GLTFLoader: Missing min/max properties for accessor POSITION.' );
}
}
}
// As per comment above this box isn't conservative, but has a reasonable size for a very large number of morph targets.
box.expandByVector( maxDisplacement );
}
geometry.boundingBox = box;
var sphere = new Sphere();
box.getCenter( sphere.center );
sphere.radius = box.min.distanceTo( box.max ) / 2;
geometry.boundingSphere = sphere;
}
/**
* @param {BufferGeometry} geometry
* @param {GLTF.Primitive} primitiveDef
* @param {GLTFParser} parser
* @return {Promise<BufferGeometry>}
*/
function addPrimitiveAttributes( geometry, primitiveDef, parser ) {
var attributes = primitiveDef.attributes;
var pending = [];
function assignAttributeAccessor( accessorIndex, attributeName ) {
return parser.getDependency( 'accessor', accessorIndex )
.then( function ( accessor ) {
geometry.setAttribute( attributeName, accessor );
} );
}
for ( var gltfAttributeName in attributes ) {
var threeAttributeName = ATTRIBUTES[ gltfAttributeName ] || gltfAttributeName.toLowerCase();
// Skip attributes already provided by e.g. Draco extension.
if ( threeAttributeName in geometry.attributes ) continue;
pending.push( assignAttributeAccessor( attributes[ gltfAttributeName ], threeAttributeName ) );
}
if ( primitiveDef.indices !== undefined && ! geometry.index ) {
var accessor = parser.getDependency( 'accessor', primitiveDef.indices ).then( function ( accessor ) {
geometry.setIndex( accessor );
} );
pending.push( accessor );
}
assignExtrasToUserData( geometry, primitiveDef );
computeBounds( geometry, primitiveDef, parser );
return Promise.all( pending ).then( function () {
return primitiveDef.targets !== undefined
? addMorphTargets( geometry, primitiveDef.targets, parser )
: geometry;
} );
}
/**
* @param {BufferGeometry} geometry
* @param {Number} drawMode
* @return {BufferGeometry}
*/
function toTrianglesDrawMode( geometry, drawMode ) {
var index = geometry.getIndex();
// generate index if not present
if ( index === null ) {
var indices = [];
var position = geometry.getAttribute( 'position' );
if ( position !== undefined ) {
for ( var i = 0; i < position.count; i ++ ) {
indices.push( i );
}
geometry.setIndex( indices );
index = geometry.getIndex();
} else {
console.error( 'THREE.GLTFLoader.toTrianglesDrawMode(): Undefined position attribute. Processing not possible.' );
return geometry;
}
}
//
var numberOfTriangles = index.count - 2;
var newIndices = [];
if ( drawMode === TriangleFanDrawMode ) {
// gl.TRIANGLE_FAN
for ( var i = 1; i <= numberOfTriangles; i ++ ) {
newIndices.push( index.getX( 0 ) );
newIndices.push( index.getX( i ) );
newIndices.push( index.getX( i + 1 ) );
}
} else {
// gl.TRIANGLE_STRIP
for ( var i = 0; i < numberOfTriangles; i ++ ) {
if ( i % 2 === 0 ) {
newIndices.push( index.getX( i ) );
newIndices.push( index.getX( i + 1 ) );
newIndices.push( index.getX( i + 2 ) );
} else {
newIndices.push( index.getX( i + 2 ) );
newIndices.push( index.getX( i + 1 ) );
newIndices.push( index.getX( i ) );
}
}
}
if ( ( newIndices.length / 3 ) !== numberOfTriangles ) {
console.error( 'THREE.GLTFLoader.toTrianglesDrawMode(): Unable to generate correct amount of triangles.' );
}
// build final geometry
var newGeometry = geometry.clone();
newGeometry.setIndex( newIndices );
return newGeometry;
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#geometry
*
* Creates BufferGeometries from primitives.
*
* @param {Array<GLTF.Primitive>} primitives
* @return {Promise<Array<BufferGeometry>>}
*/
GLTFParser.prototype.loadGeometries = function ( primitives ) {
var parser = this;
var extensions = this.extensions;
var cache = this.primitiveCache;
function createDracoPrimitive( primitive ) {
return extensions[ EXTENSIONS.KHR_DRACO_MESH_COMPRESSION ]
.decodePrimitive( primitive, parser )
.then( function ( geometry ) {
return addPrimitiveAttributes( geometry, primitive, parser );
} );
}
var pending = [];
for ( var i = 0, il = primitives.length; i < il; i ++ ) {
var primitive = primitives[ i ];
var cacheKey = createPrimitiveKey( primitive );
// See if we've already created this geometry
var cached = cache[ cacheKey ];
if ( cached ) {
// Use the cached geometry if it exists
pending.push( cached.promise );
} else {
var geometryPromise;
if ( primitive.extensions && primitive.extensions[ EXTENSIONS.KHR_DRACO_MESH_COMPRESSION ] ) {
// Use DRACO geometry if available
geometryPromise = createDracoPrimitive( primitive );
} else {
// Otherwise create a new geometry
geometryPromise = addPrimitiveAttributes( new BufferGeometry(), primitive, parser );
}
// Cache this geometry
cache[ cacheKey ] = { primitive: primitive, promise: geometryPromise };
pending.push( geometryPromise );
}
}
return Promise.all( pending );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#meshes
* @param {number} meshIndex
* @return {Promise<Group|Mesh|SkinnedMesh>}
*/
GLTFParser.prototype.loadMesh = function ( meshIndex ) {
var parser = this;
var json = this.json;
var meshDef = json.meshes[ meshIndex ];
var primitives = meshDef.primitives;
var pending = [];
for ( var i = 0, il = primitives.length; i < il; i ++ ) {
var material = primitives[ i ].material === undefined
? createDefaultMaterial( this.cache )
: this.getDependency( 'material', primitives[ i ].material );
pending.push( material );
}
pending.push( parser.loadGeometries( primitives ) );
return Promise.all( pending ).then( function ( results ) {
var materials = results.slice( 0, results.length - 1 );
var geometries = results[ results.length - 1 ];
var meshes = [];
for ( var i = 0, il = geometries.length; i < il; i ++ ) {
var geometry = geometries[ i ];
var primitive = primitives[ i ];
// 1. create Mesh
var mesh;
var material = materials[ i ];
if ( primitive.mode === WEBGL_CONSTANTS.TRIANGLES ||
primitive.mode === WEBGL_CONSTANTS.TRIANGLE_STRIP ||
primitive.mode === WEBGL_CONSTANTS.TRIANGLE_FAN ||
primitive.mode === undefined ) {
// .isSkinnedMesh isn't in glTF spec. See .markDefs()
mesh = meshDef.isSkinnedMesh === true
? new SkinnedMesh( geometry, material )
: new Mesh( geometry, material );
if ( mesh.isSkinnedMesh === true && ! mesh.geometry.attributes.skinWeight.normalized ) {
// we normalize floating point skin weight array to fix malformed assets (see #15319)
// it's important to skip this for non-float32 data since normalizeSkinWeights assumes non-normalized inputs
mesh.normalizeSkinWeights();
}
if ( primitive.mode === WEBGL_CONSTANTS.TRIANGLE_STRIP ) {
mesh.geometry = toTrianglesDrawMode( mesh.geometry, TriangleStripDrawMode );
} else if ( primitive.mode === WEBGL_CONSTANTS.TRIANGLE_FAN ) {
mesh.geometry = toTrianglesDrawMode( mesh.geometry, TriangleFanDrawMode );
}
} else if ( primitive.mode === WEBGL_CONSTANTS.LINES ) {
mesh = new LineSegments( geometry, material );
} else if ( primitive.mode === WEBGL_CONSTANTS.LINE_STRIP ) {
mesh = new Line( geometry, material );
} else if ( primitive.mode === WEBGL_CONSTANTS.LINE_LOOP ) {
mesh = new LineLoop( geometry, material );
} else if ( primitive.mode === WEBGL_CONSTANTS.POINTS ) {
mesh = new Points( geometry, material );
} else {
throw new Error( 'THREE.GLTFLoader: Primitive mode unsupported: ' + primitive.mode );
}
if ( Object.keys( mesh.geometry.morphAttributes ).length > 0 ) {
updateMorphTargets( mesh, meshDef );
}
mesh.name = meshDef.name || ( 'mesh_' + meshIndex );
if ( geometries.length > 1 ) mesh.name += '_' + i;
assignExtrasToUserData( mesh, meshDef );
parser.assignFinalMaterial( mesh );
meshes.push( mesh );
}
if ( meshes.length === 1 ) {
return meshes[ 0 ];
}
var group = new Group();
for ( var i = 0, il = meshes.length; i < il; i ++ ) {
group.add( meshes[ i ] );
}
return group;
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#cameras
* @param {number} cameraIndex
* @return {Promise<THREE.Camera>}
*/
GLTFParser.prototype.loadCamera = function ( cameraIndex ) {
var camera;
var cameraDef = this.json.cameras[ cameraIndex ];
var params = cameraDef[ cameraDef.type ];
if ( ! params ) {
console.warn( 'THREE.GLTFLoader: Missing camera parameters.' );
return;
}
if ( cameraDef.type === 'perspective' ) {
camera = new PerspectiveCamera( MathUtils.radToDeg( params.yfov ), params.aspectRatio || 1, params.znear || 1, params.zfar || 2e6 );
} else if ( cameraDef.type === 'orthographic' ) {
camera = new OrthographicCamera( params.xmag / - 2, params.xmag / 2, params.ymag / 2, params.ymag / - 2, params.znear, params.zfar );
}
if ( cameraDef.name ) camera.name = cameraDef.name;
assignExtrasToUserData( camera, cameraDef );
return Promise.resolve( camera );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#skins
* @param {number} skinIndex
* @return {Promise<Object>}
*/
GLTFParser.prototype.loadSkin = function ( skinIndex ) {
var skinDef = this.json.skins[ skinIndex ];
var skinEntry = { joints: skinDef.joints };
if ( skinDef.inverseBindMatrices === undefined ) {
return Promise.resolve( skinEntry );
}
return this.getDependency( 'accessor', skinDef.inverseBindMatrices ).then( function ( accessor ) {
skinEntry.inverseBindMatrices = accessor;
return skinEntry;
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#animations
* @param {number} animationIndex
* @return {Promise<AnimationClip>}
*/
GLTFParser.prototype.loadAnimation = function ( animationIndex ) {
var json = this.json;
var animationDef = json.animations[ animationIndex ];
var pendingNodes = [];
var pendingInputAccessors = [];
var pendingOutputAccessors = [];
var pendingSamplers = [];
var pendingTargets = [];
for ( var i = 0, il = animationDef.channels.length; i < il; i ++ ) {
var channel = animationDef.channels[ i ];
var sampler = animationDef.samplers[ channel.sampler ];
var target = channel.target;
var name = target.node !== undefined ? target.node : target.id; // NOTE: target.id is deprecated.
var input = animationDef.parameters !== undefined ? animationDef.parameters[ sampler.input ] : sampler.input;
var output = animationDef.parameters !== undefined ? animationDef.parameters[ sampler.output ] : sampler.output;
pendingNodes.push( this.getDependency( 'node', name ) );
pendingInputAccessors.push( this.getDependency( 'accessor', input ) );
pendingOutputAccessors.push( this.getDependency( 'accessor', output ) );
pendingSamplers.push( sampler );
pendingTargets.push( target );
}
return Promise.all( [
Promise.all( pendingNodes ),
Promise.all( pendingInputAccessors ),
Promise.all( pendingOutputAccessors ),
Promise.all( pendingSamplers ),
Promise.all( pendingTargets )
] ).then( function ( dependencies ) {
var nodes = dependencies[ 0 ];
var inputAccessors = dependencies[ 1 ];
var outputAccessors = dependencies[ 2 ];
var samplers = dependencies[ 3 ];
var targets = dependencies[ 4 ];
var tracks = [];
for ( var i = 0, il = nodes.length; i < il; i ++ ) {
var node = nodes[ i ];
var inputAccessor = inputAccessors[ i ];
var outputAccessor = outputAccessors[ i ];
var sampler = samplers[ i ];
var target = targets[ i ];
if ( node === undefined ) continue;
node.updateMatrix();
node.matrixAutoUpdate = true;
var TypedKeyframeTrack;
switch ( PATH_PROPERTIES[ target.path ] ) {
case PATH_PROPERTIES.weights:
TypedKeyframeTrack = NumberKeyframeTrack;
break;
case PATH_PROPERTIES.rotation:
TypedKeyframeTrack = QuaternionKeyframeTrack;
break;
case PATH_PROPERTIES.position:
case PATH_PROPERTIES.scale:
default:
TypedKeyframeTrack = VectorKeyframeTrack;
break;
}
var targetName = node.name ? node.name : node.uuid;
var interpolation = sampler.interpolation !== undefined ? INTERPOLATION[ sampler.interpolation ] : InterpolateLinear;
var targetNames = [];
if ( PATH_PROPERTIES[ target.path ] === PATH_PROPERTIES.weights ) {
// Node may be a Group (glTF mesh with several primitives) or a Mesh.
node.traverse( function ( object ) {
if ( object.isMesh === true && object.morphTargetInfluences ) {
targetNames.push( object.name ? object.name : object.uuid );
}
} );
} else {
targetNames.push( targetName );
}
var outputArray = outputAccessor.array;
if ( outputAccessor.normalized ) {
var scale;
if ( outputArray.constructor === Int8Array ) {
scale = 1 / 127;
} else if ( outputArray.constructor === Uint8Array ) {
scale = 1 / 255;
} else if ( outputArray.constructor == Int16Array ) {
scale = 1 / 32767;
} else if ( outputArray.constructor === Uint16Array ) {
scale = 1 / 65535;
} else {
throw new Error( 'THREE.GLTFLoader: Unsupported output accessor component type.' );
}
var scaled = new Float32Array( outputArray.length );
for ( var j = 0, jl = outputArray.length; j < jl; j ++ ) {
scaled[ j ] = outputArray[ j ] * scale;
}
outputArray = scaled;
}
for ( var j = 0, jl = targetNames.length; j < jl; j ++ ) {
var track = new TypedKeyframeTrack(
targetNames[ j ] + '.' + PATH_PROPERTIES[ target.path ],
inputAccessor.array,
outputArray,
interpolation
);
// Override interpolation with custom factory method.
if ( sampler.interpolation === 'CUBICSPLINE' ) {
track.createInterpolant = function InterpolantFactoryMethodGLTFCubicSpline( result ) {
// A CUBICSPLINE keyframe in glTF has three output values for each input value,
// representing inTangent, splineVertex, and outTangent. As a result, track.getValueSize()
// must be divided by three to get the interpolant's sampleSize argument.
return new GLTFCubicSplineInterpolant( this.times, this.values, this.getValueSize() / 3, result );
};
// Mark as CUBICSPLINE. `track.getInterpolation()` doesn't support custom interpolants.
track.createInterpolant.isInterpolantFactoryMethodGLTFCubicSpline = true;
}
tracks.push( track );
}
}
var name = animationDef.name ? animationDef.name : 'animation_' + animationIndex;
return new AnimationClip( name, undefined, tracks );
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#nodes-and-hierarchy
* @param {number} nodeIndex
* @return {Promise<Object3D>}
*/
GLTFParser.prototype.loadNode = function ( nodeIndex ) {
var json = this.json;
var extensions = this.extensions;
var parser = this;
var meshReferences = json.meshReferences;
var meshUses = json.meshUses;
var nodeDef = json.nodes[ nodeIndex ];
return ( function () {
var pending = [];
if ( nodeDef.mesh !== undefined ) {
pending.push( parser.getDependency( 'mesh', nodeDef.mesh ).then( function ( mesh ) {
var node;
if ( meshReferences[ nodeDef.mesh ] > 1 ) {
var instanceNum = meshUses[ nodeDef.mesh ] ++;
node = mesh.clone();
node.name += '_instance_' + instanceNum;
} else {
node = mesh;
}
// if weights are provided on the node, override weights on the mesh.
if ( nodeDef.weights !== undefined ) {
node.traverse( function ( o ) {
if ( ! o.isMesh ) return;
for ( var i = 0, il = nodeDef.weights.length; i < il; i ++ ) {
o.morphTargetInfluences[ i ] = nodeDef.weights[ i ];
}
} );
}
return node;
} ) );
}
if ( nodeDef.camera !== undefined ) {
pending.push( parser.getDependency( 'camera', nodeDef.camera ) );
}
if ( nodeDef.extensions
&& nodeDef.extensions[ EXTENSIONS.KHR_LIGHTS_PUNCTUAL ]
&& nodeDef.extensions[ EXTENSIONS.KHR_LIGHTS_PUNCTUAL ].light !== undefined ) {
pending.push( parser.getDependency( 'light', nodeDef.extensions[ EXTENSIONS.KHR_LIGHTS_PUNCTUAL ].light ) );
}
return Promise.all( pending );
}() ).then( function ( objects ) {
var node;
// .isBone isn't in glTF spec. See .markDefs
if ( nodeDef.isBone === true ) {
node = new Bone();
} else if ( objects.length > 1 ) {
node = new Group();
} else if ( objects.length === 1 ) {
node = objects[ 0 ];
} else {
node = new Object3D();
}
if ( node !== objects[ 0 ] ) {
for ( var i = 0, il = objects.length; i < il; i ++ ) {
node.add( objects[ i ] );
}
}
if ( nodeDef.name ) {
node.userData.name = nodeDef.name;
node.name = PropertyBinding.sanitizeNodeName( nodeDef.name );
}
assignExtrasToUserData( node, nodeDef );
if ( nodeDef.extensions ) addUnknownExtensionsToUserData( extensions, node, nodeDef );
if ( nodeDef.matrix !== undefined ) {
var matrix = new Matrix4();
matrix.fromArray( nodeDef.matrix );
node.applyMatrix4( matrix );
} else {
if ( nodeDef.translation !== undefined ) {
node.position.fromArray( nodeDef.translation );
}
if ( nodeDef.rotation !== undefined ) {
node.quaternion.fromArray( nodeDef.rotation );
}
if ( nodeDef.scale !== undefined ) {
node.scale.fromArray( nodeDef.scale );
}
}
return node;
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#scenes
* @param {number} sceneIndex
* @return {Promise<Group>}
*/
GLTFParser.prototype.loadScene = function () {
// scene node hierachy builder
function buildNodeHierachy( nodeId, parentObject, json, parser ) {
var nodeDef = json.nodes[ nodeId ];
return parser.getDependency( 'node', nodeId ).then( function ( node ) {
if ( nodeDef.skin === undefined ) return node;
// build skeleton here as well
var skinEntry;
return parser.getDependency( 'skin', nodeDef.skin ).then( function ( skin ) {
skinEntry = skin;
var pendingJoints = [];
for ( var i = 0, il = skinEntry.joints.length; i < il; i ++ ) {
pendingJoints.push( parser.getDependency( 'node', skinEntry.joints[ i ] ) );
}
return Promise.all( pendingJoints );
} ).then( function ( jointNodes ) {
node.traverse( function ( mesh ) {
if ( ! mesh.isMesh ) return;
var bones = [];
var boneInverses = [];
for ( var j = 0, jl = jointNodes.length; j < jl; j ++ ) {
var jointNode = jointNodes[ j ];
if ( jointNode ) {
bones.push( jointNode );
var mat = new Matrix4();
if ( skinEntry.inverseBindMatrices !== undefined ) {
mat.fromArray( skinEntry.inverseBindMatrices.array, j * 16 );
}
boneInverses.push( mat );
} else {
console.warn( 'THREE.GLTFLoader: Joint "%s" could not be found.', skinEntry.joints[ j ] );
}
}
mesh.bind( new Skeleton( bones, boneInverses ), mesh.matrixWorld );
} );
return node;
} );
} ).then( function ( node ) {
// build node hierachy
parentObject.add( node );
var pending = [];
if ( nodeDef.children ) {
var children = nodeDef.children;
for ( var i = 0, il = children.length; i < il; i ++ ) {
var child = children[ i ];
pending.push( buildNodeHierachy( child, node, json, parser ) );
}
}
return Promise.all( pending );
} );
}
return function loadScene( sceneIndex ) {
var json = this.json;
var extensions = this.extensions;
var sceneDef = this.json.scenes[ sceneIndex ];
var parser = this;
// Loader returns Group, not Scene.
// See: https://github.com/mrdoob/three.js/issues/18342#issuecomment-578981172
var scene = new Group();
if ( sceneDef.name ) scene.name = sceneDef.name;
assignExtrasToUserData( scene, sceneDef );
if ( sceneDef.extensions ) addUnknownExtensionsToUserData( extensions, scene, sceneDef );
var nodeIds = sceneDef.nodes || [];
var pending = [];
for ( var i = 0, il = nodeIds.length; i < il; i ++ ) {
pending.push( buildNodeHierachy( nodeIds[ i ], scene, json, parser ) );
}
return Promise.all( pending ).then( function () {
return scene;
} );
};
}();
return GLTFLoader;
} )()
Example #15
Source File: Water.js From canvas with Apache License 2.0 | 4 votes |
Water = function ( geometry, options ) {
Mesh.call( this, geometry );
var scope = this;
options = options || {};
var textureWidth = options.textureWidth !== undefined ? options.textureWidth : 512;
var textureHeight = options.textureHeight !== undefined ? options.textureHeight : 512;
var clipBias = options.clipBias !== undefined ? options.clipBias : 0.0;
var alpha = options.alpha !== undefined ? options.alpha : 1.0;
var time = options.time !== undefined ? options.time : 0.0;
var normalSampler = options.waterNormals !== undefined ? options.waterNormals : null;
var sunDirection = options.sunDirection !== undefined ? options.sunDirection : new Vector3( 0.70707, 0.70707, 0.0 );
var sunColor = new Color( options.sunColor !== undefined ? options.sunColor : 0xffffff );
var waterColor = new Color( options.waterColor !== undefined ? options.waterColor : 0x7F7F7F );
var eye = options.eye !== undefined ? options.eye : new Vector3( 0, 0, 0 );
var distortionScale = options.distortionScale !== undefined ? options.distortionScale : 20.0;
var side = options.side !== undefined ? options.side : FrontSide;
var fog = options.fog !== undefined ? options.fog : false;
//
var mirrorPlane = new Plane();
var normal = new Vector3();
var mirrorWorldPosition = new Vector3();
var cameraWorldPosition = new Vector3();
var rotationMatrix = new Matrix4();
var lookAtPosition = new Vector3( 0, 0, - 1 );
var clipPlane = new Vector4();
var view = new Vector3();
var target = new Vector3();
var q = new Vector4();
var textureMatrix = new Matrix4();
var mirrorCamera = new PerspectiveCamera();
var parameters = {
minFilter: LinearFilter,
magFilter: LinearFilter,
format: RGBFormat,
stencilBuffer: false
};
var renderTarget = new WebGLRenderTarget( textureWidth, textureHeight, parameters );
if ( ! MathUtils.isPowerOfTwo( textureWidth ) || ! MathUtils.isPowerOfTwo( textureHeight ) ) {
renderTarget.texture.generateMipmaps = false;
}
var mirrorShader = {
uniforms: UniformsUtils.merge( [
UniformsLib[ 'fog' ],
UniformsLib[ 'lights' ],
{
"normalSampler": { value: null },
"mirrorSampler": { value: null },
"alpha": { value: 1.0 },
"time": { value: 0.0 },
"size": { value: 1.0 },
"distortionScale": { value: 20.0 },
"textureMatrix": { value: new Matrix4() },
"sunColor": { value: new Color( 0x7F7F7F ) },
"sunDirection": { value: new Vector3( 0.70707, 0.70707, 0 ) },
"eye": { value: new Vector3() },
"waterColor": { value: new Color( 0x555555 ) }
}
] ),
vertexShader: [
'uniform mat4 textureMatrix;',
'uniform float time;',
'varying vec4 mirrorCoord;',
'varying vec4 worldPosition;',
'#include <common>',
'#include <fog_pars_vertex>',
'#include <shadowmap_pars_vertex>',
'#include <logdepthbuf_pars_vertex>',
'void main() {',
' mirrorCoord = modelMatrix * vec4( position, 1.0 );',
' worldPosition = mirrorCoord.xyzw;',
' mirrorCoord = textureMatrix * mirrorCoord;',
' vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );',
' gl_Position = projectionMatrix * mvPosition;',
'#include <logdepthbuf_vertex>',
'#include <fog_vertex>',
'#include <shadowmap_vertex>',
'}'
].join( '\n' ),
fragmentShader: [
'uniform sampler2D mirrorSampler;',
'uniform float alpha;',
'uniform float time;',
'uniform float size;',
'uniform float distortionScale;',
'uniform sampler2D normalSampler;',
'uniform vec3 sunColor;',
'uniform vec3 sunDirection;',
'uniform vec3 eye;',
'uniform vec3 waterColor;',
'varying vec4 mirrorCoord;',
'varying vec4 worldPosition;',
'vec4 getNoise( vec2 uv ) {',
' vec2 uv0 = ( uv / 103.0 ) + vec2(time / 17.0, time / 29.0);',
' vec2 uv1 = uv / 107.0-vec2( time / -19.0, time / 31.0 );',
' vec2 uv2 = uv / vec2( 8907.0, 9803.0 ) + vec2( time / 101.0, time / 97.0 );',
' vec2 uv3 = uv / vec2( 1091.0, 1027.0 ) - vec2( time / 109.0, time / -113.0 );',
' vec4 noise = texture2D( normalSampler, uv0 ) +',
' texture2D( normalSampler, uv1 ) +',
' texture2D( normalSampler, uv2 ) +',
' texture2D( normalSampler, uv3 );',
' return noise * 0.5 - 1.0;',
'}',
'void sunLight( const vec3 surfaceNormal, const vec3 eyeDirection, float shiny, float spec, float diffuse, inout vec3 diffuseColor, inout vec3 specularColor ) {',
' vec3 reflection = normalize( reflect( -sunDirection, surfaceNormal ) );',
' float direction = max( 0.0, dot( eyeDirection, reflection ) );',
' specularColor += pow( direction, shiny ) * sunColor * spec;',
' diffuseColor += max( dot( sunDirection, surfaceNormal ), 0.0 ) * sunColor * diffuse;',
'}',
'#include <common>',
'#include <packing>',
'#include <bsdfs>',
'#include <fog_pars_fragment>',
'#include <logdepthbuf_pars_fragment>',
'#include <lights_pars_begin>',
'#include <shadowmap_pars_fragment>',
'#include <shadowmask_pars_fragment>',
'void main() {',
'#include <logdepthbuf_fragment>',
' vec4 noise = getNoise( worldPosition.xz * size );',
' vec3 surfaceNormal = normalize( noise.xzy * vec3( 1.5, 1.0, 1.5 ) );',
' vec3 diffuseLight = vec3(0.0);',
' vec3 specularLight = vec3(0.0);',
' vec3 worldToEye = eye-worldPosition.xyz;',
' vec3 eyeDirection = normalize( worldToEye );',
' sunLight( surfaceNormal, eyeDirection, 100.0, 2.0, 0.5, diffuseLight, specularLight );',
' float distance = length(worldToEye);',
' vec2 distortion = surfaceNormal.xz * ( 0.001 + 1.0 / distance ) * distortionScale;',
' vec3 reflectionSample = vec3( texture2D( mirrorSampler, mirrorCoord.xy / mirrorCoord.w + distortion ) );',
' float theta = max( dot( eyeDirection, surfaceNormal ), 0.0 );',
' float rf0 = 0.3;',
' float reflectance = rf0 + ( 1.0 - rf0 ) * pow( ( 1.0 - theta ), 5.0 );',
' vec3 scatter = max( 0.0, dot( surfaceNormal, eyeDirection ) ) * waterColor;',
' vec3 albedo = mix( ( sunColor * diffuseLight * 0.3 + scatter ) * getShadowMask(), ( vec3( 0.1 ) + reflectionSample * 0.9 + reflectionSample * specularLight ), reflectance);',
' vec3 outgoingLight = albedo;',
' gl_FragColor = vec4( outgoingLight, alpha );',
'#include <tonemapping_fragment>',
'#include <fog_fragment>',
'}'
].join( '\n' )
};
var material = new ShaderMaterial( {
fragmentShader: mirrorShader.fragmentShader,
vertexShader: mirrorShader.vertexShader,
uniforms: UniformsUtils.clone( mirrorShader.uniforms ),
lights: true,
side: side,
fog: fog
} );
material.uniforms[ "mirrorSampler" ].value = renderTarget.texture;
material.uniforms[ "textureMatrix" ].value = textureMatrix;
material.uniforms[ "alpha" ].value = alpha;
material.uniforms[ "time" ].value = time;
material.uniforms[ "normalSampler" ].value = normalSampler;
material.uniforms[ "sunColor" ].value = sunColor;
material.uniforms[ "waterColor" ].value = waterColor;
material.uniforms[ "sunDirection" ].value = sunDirection;
material.uniforms[ "distortionScale" ].value = distortionScale;
material.uniforms[ "eye" ].value = eye;
scope.material = material;
scope.onBeforeRender = function ( renderer, scene, camera ) {
mirrorWorldPosition.setFromMatrixPosition( scope.matrixWorld );
cameraWorldPosition.setFromMatrixPosition( camera.matrixWorld );
rotationMatrix.extractRotation( scope.matrixWorld );
normal.set( 0, 0, 1 );
normal.applyMatrix4( rotationMatrix );
view.subVectors( mirrorWorldPosition, cameraWorldPosition );
// Avoid rendering when mirror is facing away
if ( view.dot( normal ) > 0 ) return;
view.reflect( normal ).negate();
view.add( mirrorWorldPosition );
rotationMatrix.extractRotation( camera.matrixWorld );
lookAtPosition.set( 0, 0, - 1 );
lookAtPosition.applyMatrix4( rotationMatrix );
lookAtPosition.add( cameraWorldPosition );
target.subVectors( mirrorWorldPosition, lookAtPosition );
target.reflect( normal ).negate();
target.add( mirrorWorldPosition );
mirrorCamera.position.copy( view );
mirrorCamera.up.set( 0, 1, 0 );
mirrorCamera.up.applyMatrix4( rotationMatrix );
mirrorCamera.up.reflect( normal );
mirrorCamera.lookAt( target );
mirrorCamera.far = camera.far; // Used in WebGLBackground
mirrorCamera.updateMatrixWorld();
mirrorCamera.projectionMatrix.copy( camera.projectionMatrix );
// Update the texture matrix
textureMatrix.set(
0.5, 0.0, 0.0, 0.5,
0.0, 0.5, 0.0, 0.5,
0.0, 0.0, 0.5, 0.5,
0.0, 0.0, 0.0, 1.0
);
textureMatrix.multiply( mirrorCamera.projectionMatrix );
textureMatrix.multiply( mirrorCamera.matrixWorldInverse );
// Now update projection matrix with new clip plane, implementing code from: http://www.terathon.com/code/oblique.html
// Paper explaining this technique: http://www.terathon.com/lengyel/Lengyel-Oblique.pdf
mirrorPlane.setFromNormalAndCoplanarPoint( normal, mirrorWorldPosition );
mirrorPlane.applyMatrix4( mirrorCamera.matrixWorldInverse );
clipPlane.set( mirrorPlane.normal.x, mirrorPlane.normal.y, mirrorPlane.normal.z, mirrorPlane.constant );
var projectionMatrix = mirrorCamera.projectionMatrix;
q.x = ( Math.sign( clipPlane.x ) + projectionMatrix.elements[ 8 ] ) / projectionMatrix.elements[ 0 ];
q.y = ( Math.sign( clipPlane.y ) + projectionMatrix.elements[ 9 ] ) / projectionMatrix.elements[ 5 ];
q.z = - 1.0;
q.w = ( 1.0 + projectionMatrix.elements[ 10 ] ) / projectionMatrix.elements[ 14 ];
// Calculate the scaled plane vector
clipPlane.multiplyScalar( 2.0 / clipPlane.dot( q ) );
// Replacing the third row of the projection matrix
projectionMatrix.elements[ 2 ] = clipPlane.x;
projectionMatrix.elements[ 6 ] = clipPlane.y;
projectionMatrix.elements[ 10 ] = clipPlane.z + 1.0 - clipBias;
projectionMatrix.elements[ 14 ] = clipPlane.w;
eye.setFromMatrixPosition( camera.matrixWorld );
//
var currentRenderTarget = renderer.getRenderTarget();
var currentXrEnabled = renderer.xr.enabled;
var currentShadowAutoUpdate = renderer.shadowMap.autoUpdate;
scope.visible = false;
renderer.xr.enabled = false; // Avoid camera modification and recursion
renderer.shadowMap.autoUpdate = false; // Avoid re-computing shadows
renderer.setRenderTarget( renderTarget );
renderer.state.buffers.depth.setMask( true ); // make sure the depth buffer is writable so it can be properly cleared, see #18897
if ( renderer.autoClear === false ) renderer.clear();
renderer.render( scene, mirrorCamera );
scope.visible = true;
renderer.xr.enabled = currentXrEnabled;
renderer.shadowMap.autoUpdate = currentShadowAutoUpdate;
renderer.setRenderTarget( currentRenderTarget );
// Restore viewport
var viewport = camera.viewport;
if ( viewport !== undefined ) {
renderer.state.viewport( viewport );
}
};
}
Example #16
Source File: OutlinePass.js From Computer-Graphics with MIT License | 4 votes |
constructor( resolution, scene, camera, selectedObjects ) {
super();
this.renderScene = scene;
this.renderCamera = camera;
this.selectedObjects = selectedObjects !== undefined ? selectedObjects : [];
this.visibleEdgeColor = new Color( 1, 1, 1 );
this.hiddenEdgeColor = new Color( 0.1, 0.04, 0.02 );
this.edgeGlow = 0.0;
this.usePatternTexture = false;
this.edgeThickness = 1.0;
this.edgeStrength = 3.0;
this.downSampleRatio = 2;
this.pulsePeriod = 0;
this._visibilityCache = new Map();
this.resolution = ( resolution !== undefined ) ? new Vector2( resolution.x, resolution.y ) : new Vector2( 256, 256 );
const pars = { minFilter: LinearFilter, magFilter: LinearFilter, format: RGBAFormat };
const resx = Math.round( this.resolution.x / this.downSampleRatio );
const resy = Math.round( this.resolution.y / this.downSampleRatio );
this.renderTargetMaskBuffer = new WebGLRenderTarget( this.resolution.x, this.resolution.y, pars );
this.renderTargetMaskBuffer.texture.name = 'OutlinePass.mask';
this.renderTargetMaskBuffer.texture.generateMipmaps = false;
this.depthMaterial = new MeshDepthMaterial();
this.depthMaterial.side = DoubleSide;
this.depthMaterial.depthPacking = RGBADepthPacking;
this.depthMaterial.blending = NoBlending;
this.prepareMaskMaterial = this.getPrepareMaskMaterial();
this.prepareMaskMaterial.side = DoubleSide;
this.prepareMaskMaterial.fragmentShader = replaceDepthToViewZ( this.prepareMaskMaterial.fragmentShader, this.renderCamera );
this.renderTargetDepthBuffer = new WebGLRenderTarget( this.resolution.x, this.resolution.y, pars );
this.renderTargetDepthBuffer.texture.name = 'OutlinePass.depth';
this.renderTargetDepthBuffer.texture.generateMipmaps = false;
this.renderTargetMaskDownSampleBuffer = new WebGLRenderTarget( resx, resy, pars );
this.renderTargetMaskDownSampleBuffer.texture.name = 'OutlinePass.depthDownSample';
this.renderTargetMaskDownSampleBuffer.texture.generateMipmaps = false;
this.renderTargetBlurBuffer1 = new WebGLRenderTarget( resx, resy, pars );
this.renderTargetBlurBuffer1.texture.name = 'OutlinePass.blur1';
this.renderTargetBlurBuffer1.texture.generateMipmaps = false;
this.renderTargetBlurBuffer2 = new WebGLRenderTarget( Math.round( resx / 2 ), Math.round( resy / 2 ), pars );
this.renderTargetBlurBuffer2.texture.name = 'OutlinePass.blur2';
this.renderTargetBlurBuffer2.texture.generateMipmaps = false;
this.edgeDetectionMaterial = this.getEdgeDetectionMaterial();
this.renderTargetEdgeBuffer1 = new WebGLRenderTarget( resx, resy, pars );
this.renderTargetEdgeBuffer1.texture.name = 'OutlinePass.edge1';
this.renderTargetEdgeBuffer1.texture.generateMipmaps = false;
this.renderTargetEdgeBuffer2 = new WebGLRenderTarget( Math.round( resx / 2 ), Math.round( resy / 2 ), pars );
this.renderTargetEdgeBuffer2.texture.name = 'OutlinePass.edge2';
this.renderTargetEdgeBuffer2.texture.generateMipmaps = false;
const MAX_EDGE_THICKNESS = 4;
const MAX_EDGE_GLOW = 4;
this.separableBlurMaterial1 = this.getSeperableBlurMaterial( MAX_EDGE_THICKNESS );
this.separableBlurMaterial1.uniforms[ 'texSize' ].value.set( resx, resy );
this.separableBlurMaterial1.uniforms[ 'kernelRadius' ].value = 1;
this.separableBlurMaterial2 = this.getSeperableBlurMaterial( MAX_EDGE_GLOW );
this.separableBlurMaterial2.uniforms[ 'texSize' ].value.set( Math.round( resx / 2 ), Math.round( resy / 2 ) );
this.separableBlurMaterial2.uniforms[ 'kernelRadius' ].value = MAX_EDGE_GLOW;
// Overlay material
this.overlayMaterial = this.getOverlayMaterial();
// copy material
if ( CopyShader === undefined ) console.error( 'THREE.OutlinePass relies on CopyShader' );
const copyShader = CopyShader;
this.copyUniforms = UniformsUtils.clone( copyShader.uniforms );
this.copyUniforms[ 'opacity' ].value = 1.0;
this.materialCopy = new ShaderMaterial( {
uniforms: this.copyUniforms,
vertexShader: copyShader.vertexShader,
fragmentShader: copyShader.fragmentShader,
blending: NoBlending,
depthTest: false,
depthWrite: false,
transparent: true
} );
this.enabled = true;
this.needsSwap = false;
this._oldClearColor = new Color();
this.oldClearAlpha = 1;
this.fsQuad = new FullScreenQuad( null );
this.tempPulseColor1 = new Color();
this.tempPulseColor2 = new Color();
this.textureMatrix = new Matrix4();
function replaceDepthToViewZ( string, camera ) {
const type = camera.isPerspectiveCamera ? 'perspective' : 'orthographic';
return string.replace( /DEPTH_TO_VIEW_Z/g, type + 'DepthToViewZ' );
}
}
Example #17
Source File: SAOPass.js From Computer-Graphics with MIT License | 4 votes |
constructor( scene, camera, useDepthTexture = false, useNormals = false, resolution = new Vector2( 256, 256 ) ) {
super();
this.scene = scene;
this.camera = camera;
this.clear = true;
this.needsSwap = false;
this.supportsDepthTextureExtension = useDepthTexture;
this.supportsNormalTexture = useNormals;
this.originalClearColor = new Color();
this._oldClearColor = new Color();
this.oldClearAlpha = 1;
this.params = {
output: 0,
saoBias: 0.5,
saoIntensity: 0.18,
saoScale: 1,
saoKernelRadius: 100,
saoMinResolution: 0,
saoBlur: true,
saoBlurRadius: 8,
saoBlurStdDev: 4,
saoBlurDepthCutoff: 0.01
};
this.resolution = new Vector2( resolution.x, resolution.y );
this.saoRenderTarget = new WebGLRenderTarget( this.resolution.x, this.resolution.y, {
minFilter: LinearFilter,
magFilter: LinearFilter,
format: RGBAFormat
} );
this.blurIntermediateRenderTarget = this.saoRenderTarget.clone();
this.beautyRenderTarget = this.saoRenderTarget.clone();
this.normalRenderTarget = new WebGLRenderTarget( this.resolution.x, this.resolution.y, {
minFilter: NearestFilter,
magFilter: NearestFilter,
format: RGBAFormat
} );
this.depthRenderTarget = this.normalRenderTarget.clone();
let depthTexture;
if ( this.supportsDepthTextureExtension ) {
depthTexture = new DepthTexture();
depthTexture.type = UnsignedShortType;
this.beautyRenderTarget.depthTexture = depthTexture;
this.beautyRenderTarget.depthBuffer = true;
}
this.depthMaterial = new MeshDepthMaterial();
this.depthMaterial.depthPacking = RGBADepthPacking;
this.depthMaterial.blending = NoBlending;
this.normalMaterial = new MeshNormalMaterial();
this.normalMaterial.blending = NoBlending;
if ( SAOShader === undefined ) {
console.error( 'THREE.SAOPass relies on SAOShader' );
}
this.saoMaterial = new ShaderMaterial( {
defines: Object.assign( {}, SAOShader.defines ),
fragmentShader: SAOShader.fragmentShader,
vertexShader: SAOShader.vertexShader,
uniforms: UniformsUtils.clone( SAOShader.uniforms )
} );
this.saoMaterial.extensions.derivatives = true;
this.saoMaterial.defines[ 'DEPTH_PACKING' ] = this.supportsDepthTextureExtension ? 0 : 1;
this.saoMaterial.defines[ 'NORMAL_TEXTURE' ] = this.supportsNormalTexture ? 1 : 0;
this.saoMaterial.defines[ 'PERSPECTIVE_CAMERA' ] = this.camera.isPerspectiveCamera ? 1 : 0;
this.saoMaterial.uniforms[ 'tDepth' ].value = ( this.supportsDepthTextureExtension ) ? depthTexture : this.depthRenderTarget.texture;
this.saoMaterial.uniforms[ 'tNormal' ].value = this.normalRenderTarget.texture;
this.saoMaterial.uniforms[ 'size' ].value.set( this.resolution.x, this.resolution.y );
this.saoMaterial.uniforms[ 'cameraInverseProjectionMatrix' ].value.copy( this.camera.projectionMatrixInverse );
this.saoMaterial.uniforms[ 'cameraProjectionMatrix' ].value = this.camera.projectionMatrix;
this.saoMaterial.blending = NoBlending;
if ( DepthLimitedBlurShader === undefined ) {
console.error( 'THREE.SAOPass relies on DepthLimitedBlurShader' );
}
this.vBlurMaterial = new ShaderMaterial( {
uniforms: UniformsUtils.clone( DepthLimitedBlurShader.uniforms ),
defines: Object.assign( {}, DepthLimitedBlurShader.defines ),
vertexShader: DepthLimitedBlurShader.vertexShader,
fragmentShader: DepthLimitedBlurShader.fragmentShader
} );
this.vBlurMaterial.defines[ 'DEPTH_PACKING' ] = this.supportsDepthTextureExtension ? 0 : 1;
this.vBlurMaterial.defines[ 'PERSPECTIVE_CAMERA' ] = this.camera.isPerspectiveCamera ? 1 : 0;
this.vBlurMaterial.uniforms[ 'tDiffuse' ].value = this.saoRenderTarget.texture;
this.vBlurMaterial.uniforms[ 'tDepth' ].value = ( this.supportsDepthTextureExtension ) ? depthTexture : this.depthRenderTarget.texture;
this.vBlurMaterial.uniforms[ 'size' ].value.set( this.resolution.x, this.resolution.y );
this.vBlurMaterial.blending = NoBlending;
this.hBlurMaterial = new ShaderMaterial( {
uniforms: UniformsUtils.clone( DepthLimitedBlurShader.uniforms ),
defines: Object.assign( {}, DepthLimitedBlurShader.defines ),
vertexShader: DepthLimitedBlurShader.vertexShader,
fragmentShader: DepthLimitedBlurShader.fragmentShader
} );
this.hBlurMaterial.defines[ 'DEPTH_PACKING' ] = this.supportsDepthTextureExtension ? 0 : 1;
this.hBlurMaterial.defines[ 'PERSPECTIVE_CAMERA' ] = this.camera.isPerspectiveCamera ? 1 : 0;
this.hBlurMaterial.uniforms[ 'tDiffuse' ].value = this.blurIntermediateRenderTarget.texture;
this.hBlurMaterial.uniforms[ 'tDepth' ].value = ( this.supportsDepthTextureExtension ) ? depthTexture : this.depthRenderTarget.texture;
this.hBlurMaterial.uniforms[ 'size' ].value.set( this.resolution.x, this.resolution.y );
this.hBlurMaterial.blending = NoBlending;
if ( CopyShader === undefined ) {
console.error( 'THREE.SAOPass relies on CopyShader' );
}
this.materialCopy = new ShaderMaterial( {
uniforms: UniformsUtils.clone( CopyShader.uniforms ),
vertexShader: CopyShader.vertexShader,
fragmentShader: CopyShader.fragmentShader,
blending: NoBlending
} );
this.materialCopy.transparent = true;
this.materialCopy.depthTest = false;
this.materialCopy.depthWrite = false;
this.materialCopy.blending = CustomBlending;
this.materialCopy.blendSrc = DstColorFactor;
this.materialCopy.blendDst = ZeroFactor;
this.materialCopy.blendEquation = AddEquation;
this.materialCopy.blendSrcAlpha = DstAlphaFactor;
this.materialCopy.blendDstAlpha = ZeroFactor;
this.materialCopy.blendEquationAlpha = AddEquation;
if ( UnpackDepthRGBAShader === undefined ) {
console.error( 'THREE.SAOPass relies on UnpackDepthRGBAShader' );
}
this.depthCopy = new ShaderMaterial( {
uniforms: UniformsUtils.clone( UnpackDepthRGBAShader.uniforms ),
vertexShader: UnpackDepthRGBAShader.vertexShader,
fragmentShader: UnpackDepthRGBAShader.fragmentShader,
blending: NoBlending
} );
this.fsQuad = new FullScreenQuad( null );
}
Example #18
Source File: SMAAPass.js From Computer-Graphics with MIT License | 4 votes |
constructor( width, height ) {
super();
// render targets
this.edgesRT = new WebGLRenderTarget( width, height, {
depthBuffer: false
} );
this.edgesRT.texture.name = 'SMAAPass.edges';
this.weightsRT = new WebGLRenderTarget( width, height, {
depthBuffer: false
} );
this.weightsRT.texture.name = 'SMAAPass.weights';
// textures
const scope = this;
const areaTextureImage = new Image();
areaTextureImage.src = this.getAreaTexture();
areaTextureImage.onload = function () {
// assigning data to HTMLImageElement.src is asynchronous (see #15162)
scope.areaTexture.needsUpdate = true;
};
this.areaTexture = new Texture();
this.areaTexture.name = 'SMAAPass.area';
this.areaTexture.image = areaTextureImage;
this.areaTexture.minFilter = LinearFilter;
this.areaTexture.generateMipmaps = false;
this.areaTexture.flipY = false;
const searchTextureImage = new Image();
searchTextureImage.src = this.getSearchTexture();
searchTextureImage.onload = function () {
// assigning data to HTMLImageElement.src is asynchronous (see #15162)
scope.searchTexture.needsUpdate = true;
};
this.searchTexture = new Texture();
this.searchTexture.name = 'SMAAPass.search';
this.searchTexture.image = searchTextureImage;
this.searchTexture.magFilter = NearestFilter;
this.searchTexture.minFilter = NearestFilter;
this.searchTexture.generateMipmaps = false;
this.searchTexture.flipY = false;
// materials - pass 1
if ( SMAAEdgesShader === undefined ) {
console.error( 'THREE.SMAAPass relies on SMAAShader' );
}
this.uniformsEdges = UniformsUtils.clone( SMAAEdgesShader.uniforms );
this.uniformsEdges[ 'resolution' ].value.set( 1 / width, 1 / height );
this.materialEdges = new ShaderMaterial( {
defines: Object.assign( {}, SMAAEdgesShader.defines ),
uniforms: this.uniformsEdges,
vertexShader: SMAAEdgesShader.vertexShader,
fragmentShader: SMAAEdgesShader.fragmentShader
} );
// materials - pass 2
this.uniformsWeights = UniformsUtils.clone( SMAAWeightsShader.uniforms );
this.uniformsWeights[ 'resolution' ].value.set( 1 / width, 1 / height );
this.uniformsWeights[ 'tDiffuse' ].value = this.edgesRT.texture;
this.uniformsWeights[ 'tArea' ].value = this.areaTexture;
this.uniformsWeights[ 'tSearch' ].value = this.searchTexture;
this.materialWeights = new ShaderMaterial( {
defines: Object.assign( {}, SMAAWeightsShader.defines ),
uniforms: this.uniformsWeights,
vertexShader: SMAAWeightsShader.vertexShader,
fragmentShader: SMAAWeightsShader.fragmentShader
} );
// materials - pass 3
this.uniformsBlend = UniformsUtils.clone( SMAABlendShader.uniforms );
this.uniformsBlend[ 'resolution' ].value.set( 1 / width, 1 / height );
this.uniformsBlend[ 'tDiffuse' ].value = this.weightsRT.texture;
this.materialBlend = new ShaderMaterial( {
uniforms: this.uniformsBlend,
vertexShader: SMAABlendShader.vertexShader,
fragmentShader: SMAABlendShader.fragmentShader
} );
this.needsSwap = false;
this.fsQuad = new FullScreenQuad( null );
}
Example #19
Source File: SSAARenderPass.js From Computer-Graphics with MIT License | 4 votes |
render( renderer, writeBuffer, readBuffer ) {
if ( ! this.sampleRenderTarget ) {
this.sampleRenderTarget = new WebGLRenderTarget( readBuffer.width, readBuffer.height, { minFilter: LinearFilter, magFilter: LinearFilter, format: RGBAFormat } );
this.sampleRenderTarget.texture.name = 'SSAARenderPass.sample';
}
const jitterOffsets = _JitterVectors[ Math.max( 0, Math.min( this.sampleLevel, 5 ) ) ];
const autoClear = renderer.autoClear;
renderer.autoClear = false;
renderer.getClearColor( this._oldClearColor );
const oldClearAlpha = renderer.getClearAlpha();
const baseSampleWeight = 1.0 / jitterOffsets.length;
const roundingRange = 1 / 32;
this.copyUniforms[ 'tDiffuse' ].value = this.sampleRenderTarget.texture;
const viewOffset = {
fullWidth: readBuffer.width,
fullHeight: readBuffer.height,
offsetX: 0,
offsetY: 0,
width: readBuffer.width,
height: readBuffer.height
};
const originalViewOffset = Object.assign( {}, this.camera.view );
if ( originalViewOffset.enabled ) Object.assign( viewOffset, originalViewOffset );
// render the scene multiple times, each slightly jitter offset from the last and accumulate the results.
for ( let i = 0; i < jitterOffsets.length; i ++ ) {
const jitterOffset = jitterOffsets[ i ];
if ( this.camera.setViewOffset ) {
this.camera.setViewOffset(
viewOffset.fullWidth, viewOffset.fullHeight,
viewOffset.offsetX + jitterOffset[ 0 ] * 0.0625, viewOffset.offsetY + jitterOffset[ 1 ] * 0.0625, // 0.0625 = 1 / 16
viewOffset.width, viewOffset.height
);
}
let sampleWeight = baseSampleWeight;
if ( this.unbiased ) {
// the theory is that equal weights for each sample lead to an accumulation of rounding errors.
// The following equation varies the sampleWeight per sample so that it is uniformly distributed
// across a range of values whose rounding errors cancel each other out.
const uniformCenteredDistribution = ( - 0.5 + ( i + 0.5 ) / jitterOffsets.length );
sampleWeight += roundingRange * uniformCenteredDistribution;
}
this.copyUniforms[ 'opacity' ].value = sampleWeight;
renderer.setClearColor( this.clearColor, this.clearAlpha );
renderer.setRenderTarget( this.sampleRenderTarget );
renderer.clear();
renderer.render( this.scene, this.camera );
renderer.setRenderTarget( this.renderToScreen ? null : writeBuffer );
if ( i === 0 ) {
renderer.setClearColor( 0x000000, 0.0 );
renderer.clear();
}
this.fsQuad.render( renderer );
}
if ( this.camera.setViewOffset && originalViewOffset.enabled ) {
this.camera.setViewOffset(
originalViewOffset.fullWidth, originalViewOffset.fullHeight,
originalViewOffset.offsetX, originalViewOffset.offsetY,
originalViewOffset.width, originalViewOffset.height
);
} else if ( this.camera.clearViewOffset ) {
this.camera.clearViewOffset();
}
renderer.autoClear = autoClear;
renderer.setClearColor( this._oldClearColor, oldClearAlpha );
}
Example #20
Source File: RGBELoader.js From canvas with Apache License 2.0 | 4 votes |
RGBELoader.prototype = Object.assign( Object.create( DataTextureLoader.prototype ), {
constructor: RGBELoader,
// adapted from http://www.graphics.cornell.edu/~bjw/rgbe.html
parse: function ( buffer ) {
var
/* return codes for rgbe routines */
//RGBE_RETURN_SUCCESS = 0,
RGBE_RETURN_FAILURE = - 1,
/* default error routine. change this to change error handling */
rgbe_read_error = 1,
rgbe_write_error = 2,
rgbe_format_error = 3,
rgbe_memory_error = 4,
rgbe_error = function ( rgbe_error_code, msg ) {
switch ( rgbe_error_code ) {
case rgbe_read_error: console.error( "RGBELoader Read Error: " + ( msg || '' ) );
break;
case rgbe_write_error: console.error( "RGBELoader Write Error: " + ( msg || '' ) );
break;
case rgbe_format_error: console.error( "RGBELoader Bad File Format: " + ( msg || '' ) );
break;
default:
case rgbe_memory_error: console.error( "RGBELoader: Error: " + ( msg || '' ) );
}
return RGBE_RETURN_FAILURE;
},
/* offsets to red, green, and blue components in a data (float) pixel */
//RGBE_DATA_RED = 0,
//RGBE_DATA_GREEN = 1,
//RGBE_DATA_BLUE = 2,
/* number of floats per pixel, use 4 since stored in rgba image format */
//RGBE_DATA_SIZE = 4,
/* flags indicating which fields in an rgbe_header_info are valid */
RGBE_VALID_PROGRAMTYPE = 1,
RGBE_VALID_FORMAT = 2,
RGBE_VALID_DIMENSIONS = 4,
NEWLINE = "\n",
fgets = function ( buffer, lineLimit, consume ) {
lineLimit = ! lineLimit ? 1024 : lineLimit;
var p = buffer.pos,
i = - 1, len = 0, s = '', chunkSize = 128,
chunk = String.fromCharCode.apply( null, new Uint16Array( buffer.subarray( p, p + chunkSize ) ) )
;
while ( ( 0 > ( i = chunk.indexOf( NEWLINE ) ) ) && ( len < lineLimit ) && ( p < buffer.byteLength ) ) {
s += chunk; len += chunk.length;
p += chunkSize;
chunk += String.fromCharCode.apply( null, new Uint16Array( buffer.subarray( p, p + chunkSize ) ) );
}
if ( - 1 < i ) {
/*for (i=l-1; i>=0; i--) {
byteCode = m.charCodeAt(i);
if (byteCode > 0x7f && byteCode <= 0x7ff) byteLen++;
else if (byteCode > 0x7ff && byteCode <= 0xffff) byteLen += 2;
if (byteCode >= 0xDC00 && byteCode <= 0xDFFF) i--; //trail surrogate
}*/
if ( false !== consume ) buffer.pos += len + i + 1;
return s + chunk.slice( 0, i );
}
return false;
},
/* minimal header reading. modify if you want to parse more information */
RGBE_ReadHeader = function ( buffer ) {
var line, match,
// regexes to parse header info fields
magic_token_re = /^#\?(\S+)$/,
gamma_re = /^\s*GAMMA\s*=\s*(\d+(\.\d+)?)\s*$/,
exposure_re = /^\s*EXPOSURE\s*=\s*(\d+(\.\d+)?)\s*$/,
format_re = /^\s*FORMAT=(\S+)\s*$/,
dimensions_re = /^\s*\-Y\s+(\d+)\s+\+X\s+(\d+)\s*$/,
// RGBE format header struct
header = {
valid: 0, /* indicate which fields are valid */
string: '', /* the actual header string */
comments: '', /* comments found in header */
programtype: 'RGBE', /* listed at beginning of file to identify it after "#?". defaults to "RGBE" */
format: '', /* RGBE format, default 32-bit_rle_rgbe */
gamma: 1.0, /* image has already been gamma corrected with given gamma. defaults to 1.0 (no correction) */
exposure: 1.0, /* a value of 1.0 in an image corresponds to <exposure> watts/steradian/m^2. defaults to 1.0 */
width: 0, height: 0 /* image dimensions, width/height */
};
if ( buffer.pos >= buffer.byteLength || ! ( line = fgets( buffer ) ) ) {
return rgbe_error( rgbe_read_error, "no header found" );
}
/* if you want to require the magic token then uncomment the next line */
if ( ! ( match = line.match( magic_token_re ) ) ) {
return rgbe_error( rgbe_format_error, "bad initial token" );
}
header.valid |= RGBE_VALID_PROGRAMTYPE;
header.programtype = match[ 1 ];
header.string += line + "\n";
while ( true ) {
line = fgets( buffer );
if ( false === line ) break;
header.string += line + "\n";
if ( '#' === line.charAt( 0 ) ) {
header.comments += line + "\n";
continue; // comment line
}
if ( match = line.match( gamma_re ) ) {
header.gamma = parseFloat( match[ 1 ], 10 );
}
if ( match = line.match( exposure_re ) ) {
header.exposure = parseFloat( match[ 1 ], 10 );
}
if ( match = line.match( format_re ) ) {
header.valid |= RGBE_VALID_FORMAT;
header.format = match[ 1 ];//'32-bit_rle_rgbe';
}
if ( match = line.match( dimensions_re ) ) {
header.valid |= RGBE_VALID_DIMENSIONS;
header.height = parseInt( match[ 1 ], 10 );
header.width = parseInt( match[ 2 ], 10 );
}
if ( ( header.valid & RGBE_VALID_FORMAT ) && ( header.valid & RGBE_VALID_DIMENSIONS ) ) break;
}
if ( ! ( header.valid & RGBE_VALID_FORMAT ) ) {
return rgbe_error( rgbe_format_error, "missing format specifier" );
}
if ( ! ( header.valid & RGBE_VALID_DIMENSIONS ) ) {
return rgbe_error( rgbe_format_error, "missing image size specifier" );
}
return header;
},
RGBE_ReadPixels_RLE = function ( buffer, w, h ) {
var data_rgba, offset, pos, count, byteValue,
scanline_buffer, ptr, ptr_end, i, l, off, isEncodedRun,
scanline_width = w, num_scanlines = h, rgbeStart
;
if (
// run length encoding is not allowed so read flat
( ( scanline_width < 8 ) || ( scanline_width > 0x7fff ) ) ||
// this file is not run length encoded
( ( 2 !== buffer[ 0 ] ) || ( 2 !== buffer[ 1 ] ) || ( buffer[ 2 ] & 0x80 ) )
) {
// return the flat buffer
return new Uint8Array( buffer );
}
if ( scanline_width !== ( ( buffer[ 2 ] << 8 ) | buffer[ 3 ] ) ) {
return rgbe_error( rgbe_format_error, "wrong scanline width" );
}
data_rgba = new Uint8Array( 4 * w * h );
if ( ! data_rgba.length ) {
return rgbe_error( rgbe_memory_error, "unable to allocate buffer space" );
}
offset = 0; pos = 0; ptr_end = 4 * scanline_width;
rgbeStart = new Uint8Array( 4 );
scanline_buffer = new Uint8Array( ptr_end );
// read in each successive scanline
while ( ( num_scanlines > 0 ) && ( pos < buffer.byteLength ) ) {
if ( pos + 4 > buffer.byteLength ) {
return rgbe_error( rgbe_read_error );
}
rgbeStart[ 0 ] = buffer[ pos ++ ];
rgbeStart[ 1 ] = buffer[ pos ++ ];
rgbeStart[ 2 ] = buffer[ pos ++ ];
rgbeStart[ 3 ] = buffer[ pos ++ ];
if ( ( 2 != rgbeStart[ 0 ] ) || ( 2 != rgbeStart[ 1 ] ) || ( ( ( rgbeStart[ 2 ] << 8 ) | rgbeStart[ 3 ] ) != scanline_width ) ) {
return rgbe_error( rgbe_format_error, "bad rgbe scanline format" );
}
// read each of the four channels for the scanline into the buffer
// first red, then green, then blue, then exponent
ptr = 0;
while ( ( ptr < ptr_end ) && ( pos < buffer.byteLength ) ) {
count = buffer[ pos ++ ];
isEncodedRun = count > 128;
if ( isEncodedRun ) count -= 128;
if ( ( 0 === count ) || ( ptr + count > ptr_end ) ) {
return rgbe_error( rgbe_format_error, "bad scanline data" );
}
if ( isEncodedRun ) {
// a (encoded) run of the same value
byteValue = buffer[ pos ++ ];
for ( i = 0; i < count; i ++ ) {
scanline_buffer[ ptr ++ ] = byteValue;
}
//ptr += count;
} else {
// a literal-run
scanline_buffer.set( buffer.subarray( pos, pos + count ), ptr );
ptr += count; pos += count;
}
}
// now convert data from buffer into rgba
// first red, then green, then blue, then exponent (alpha)
l = scanline_width; //scanline_buffer.byteLength;
for ( i = 0; i < l; i ++ ) {
off = 0;
data_rgba[ offset ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 1 ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 2 ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 3 ] = scanline_buffer[ i + off ];
offset += 4;
}
num_scanlines --;
}
return data_rgba;
};
var RGBEByteToRGBFloat = function ( sourceArray, sourceOffset, destArray, destOffset ) {
var e = sourceArray[ sourceOffset + 3 ];
var scale = Math.pow( 2.0, e - 128.0 ) / 255.0;
destArray[ destOffset + 0 ] = sourceArray[ sourceOffset + 0 ] * scale;
destArray[ destOffset + 1 ] = sourceArray[ sourceOffset + 1 ] * scale;
destArray[ destOffset + 2 ] = sourceArray[ sourceOffset + 2 ] * scale;
};
var RGBEByteToRGBHalf = ( function () {
// Source: http://gamedev.stackexchange.com/questions/17326/conversion-of-a-number-from-single-precision-floating-point-representation-to-a/17410#17410
var floatView = new Float32Array( 1 );
var int32View = new Int32Array( floatView.buffer );
/* This method is faster than the OpenEXR implementation (very often
* used, eg. in Ogre), with the additional benefit of rounding, inspired
* by James Tursa?s half-precision code. */
function toHalf( val ) {
floatView[ 0 ] = val;
var x = int32View[ 0 ];
var bits = ( x >> 16 ) & 0x8000; /* Get the sign */
var m = ( x >> 12 ) & 0x07ff; /* Keep one extra bit for rounding */
var e = ( x >> 23 ) & 0xff; /* Using int is faster here */
/* If zero, or denormal, or exponent underflows too much for a denormal
* half, return signed zero. */
if ( e < 103 ) return bits;
/* If NaN, return NaN. If Inf or exponent overflow, return Inf. */
if ( e > 142 ) {
bits |= 0x7c00;
/* If exponent was 0xff and one mantissa bit was set, it means NaN,
* not Inf, so make sure we set one mantissa bit too. */
bits |= ( ( e == 255 ) ? 0 : 1 ) && ( x & 0x007fffff );
return bits;
}
/* If exponent underflows but not too much, return a denormal */
if ( e < 113 ) {
m |= 0x0800;
/* Extra rounding may overflow and set mantissa to 0 and exponent
* to 1, which is OK. */
bits |= ( m >> ( 114 - e ) ) + ( ( m >> ( 113 - e ) ) & 1 );
return bits;
}
bits |= ( ( e - 112 ) << 10 ) | ( m >> 1 );
/* Extra rounding. An overflow will set mantissa to 0 and increment
* the exponent, which is OK. */
bits += m & 1;
return bits;
}
return function ( sourceArray, sourceOffset, destArray, destOffset ) {
var e = sourceArray[ sourceOffset + 3 ];
var scale = Math.pow( 2.0, e - 128.0 ) / 255.0;
destArray[ destOffset + 0 ] = toHalf( sourceArray[ sourceOffset + 0 ] * scale );
destArray[ destOffset + 1 ] = toHalf( sourceArray[ sourceOffset + 1 ] * scale );
destArray[ destOffset + 2 ] = toHalf( sourceArray[ sourceOffset + 2 ] * scale );
};
} )();
console.log(Array.from(buffer));
var byteArray = new Uint8Array( buffer );
byteArray.pos = 0;
var rgbe_header_info = RGBE_ReadHeader( byteArray );
if ( RGBE_RETURN_FAILURE !== rgbe_header_info ) {
var w = rgbe_header_info.width,
h = rgbe_header_info.height,
image_rgba_data = RGBE_ReadPixels_RLE( byteArray.subarray( byteArray.pos ), w, h );
if ( RGBE_RETURN_FAILURE !== image_rgba_data ) {
switch ( this.type ) {
case UnsignedByteType:
var data = image_rgba_data;
var format = RGBEFormat; // handled as THREE.RGBAFormat in shaders
var type = UnsignedByteType;
break;
case FloatType:
var numElements = ( image_rgba_data.length / 4 ) * 3;
var floatArray = new Float32Array( numElements );
for ( var j = 0; j < numElements; j ++ ) {
RGBEByteToRGBFloat( image_rgba_data, j * 4, floatArray, j * 3 );
}
var data = floatArray;
var format = RGBFormat;
var type = FloatType;
break;
case HalfFloatType:
var numElements = ( image_rgba_data.length / 4 ) * 3;
var halfArray = new Uint16Array( numElements );
for ( var j = 0; j < numElements; j ++ ) {
RGBEByteToRGBHalf( image_rgba_data, j * 4, halfArray, j * 3 );
}
var data = halfArray;
var format = RGBFormat;
var type = HalfFloatType;
break;
default:
console.error( 'THREE.RGBELoader: unsupported type: ', this.type );
break;
}
return {
width: w, height: h,
data: data,
header: rgbe_header_info.string,
gamma: rgbe_header_info.gamma,
exposure: rgbe_header_info.exposure,
format: format,
type: type
};
}
}
return null;
},
setDataType: function ( value ) {
this.type = value;
return this;
},
load: function ( url, onLoad, onProgress, onError ) {
function onLoadCallback( texture, texData ) {
switch ( texture.type ) {
case UnsignedByteType:
texture.encoding = RGBEEncoding;
texture.minFilter = NearestFilter;
texture.magFilter = NearestFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
case FloatType:
texture.encoding = LinearEncoding;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
case HalfFloatType:
texture.encoding = LinearEncoding;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
}
if ( onLoad ) onLoad( texture, texData );
}
return DataTextureLoader.prototype.load.call( this, url, onLoadCallback, onProgress, onError );
}
} );
Example #21
Source File: GLTFLoader.js From canvas with Apache License 2.0 | 4 votes |
GLTFLoader = ( function () {
function GLTFLoader( manager ) {
Loader.call( this, manager );
this.dracoLoader = null;
this.ddsLoader = null;
}
GLTFLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
constructor: GLTFLoader,
load: function ( url, onLoad, onProgress, onError ) {
var scope = this;
var resourcePath;
if ( this.resourcePath !== '' ) {
resourcePath = this.resourcePath;
} else if ( this.path !== '' ) {
resourcePath = this.path;
} else {
resourcePath = LoaderUtils.extractUrlBase( url );
}
// Tells the LoadingManager to track an extra item, which resolves after
// the model is fully loaded. This means the count of items loaded will
// be incorrect, but ensures manager.onLoad() does not fire early.
scope.manager.itemStart( url );
var _onError = function ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
scope.manager.itemEnd( url );
};
var loader = new FileLoader( scope.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
if ( scope.crossOrigin === 'use-credentials' ) {
loader.setWithCredentials( true );
}
loader.load( url, function ( data ) {
try {
scope.parse( data, resourcePath, function ( gltf ) {
onLoad( gltf );
scope.manager.itemEnd( url );
}, _onError );
} catch ( e ) {
_onError( e );
}
}, onProgress, _onError );
},
setDRACOLoader: function ( dracoLoader ) {
this.dracoLoader = dracoLoader;
return this;
},
setDDSLoader: function ( ddsLoader ) {
this.ddsLoader = ddsLoader;
return this;
},
parse: function ( data, path, onLoad, onError ) {
var content;
var extensions = {};
if ( typeof data === 'string' ) {
content = data;
} else {
var magic = LoaderUtils.decodeText( new Uint8Array( data, 0, 4 ) );
if ( magic === BINARY_EXTENSION_HEADER_MAGIC ) {
try {
extensions[ EXTENSIONS.KHR_BINARY_GLTF ] = new GLTFBinaryExtension( data );
} catch ( error ) {
if ( onError ) onError( error );
return;
}
content = extensions[ EXTENSIONS.KHR_BINARY_GLTF ].content;
} else {
content = LoaderUtils.decodeText( new Uint8Array( data ) );
}
}
var json = JSON.parse( content );
if ( json.asset === undefined || json.asset.version[ 0 ] < 2 ) {
if ( onError ) onError( new Error( 'THREE.GLTFLoader: Unsupported asset. glTF versions >=2.0 are supported.' ) );
return;
}
if ( json.extensionsUsed ) {
for ( var i = 0; i < json.extensionsUsed.length; ++ i ) {
var extensionName = json.extensionsUsed[ i ];
var extensionsRequired = json.extensionsRequired || [];
switch ( extensionName ) {
case EXTENSIONS.KHR_LIGHTS_PUNCTUAL:
extensions[ extensionName ] = new GLTFLightsExtension( json );
break;
case EXTENSIONS.KHR_MATERIALS_CLEARCOAT:
extensions[ extensionName ] = new GLTFMaterialsClearcoatExtension();
break;
case EXTENSIONS.KHR_MATERIALS_UNLIT:
extensions[ extensionName ] = new GLTFMaterialsUnlitExtension();
break;
case EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS:
extensions[ extensionName ] = new GLTFMaterialsPbrSpecularGlossinessExtension();
break;
case EXTENSIONS.KHR_DRACO_MESH_COMPRESSION:
extensions[ extensionName ] = new GLTFDracoMeshCompressionExtension( json, this.dracoLoader );
break;
case EXTENSIONS.MSFT_TEXTURE_DDS:
extensions[ extensionName ] = new GLTFTextureDDSExtension( this.ddsLoader );
break;
case EXTENSIONS.KHR_TEXTURE_TRANSFORM:
extensions[ extensionName ] = new GLTFTextureTransformExtension();
break;
case EXTENSIONS.KHR_MESH_QUANTIZATION:
extensions[ extensionName ] = new GLTFMeshQuantizationExtension();
break;
default:
if ( extensionsRequired.indexOf( extensionName ) >= 0 ) {
console.warn( 'THREE.GLTFLoader: Unknown extension "' + extensionName + '".' );
}
}
}
}
var parser = new GLTFParser( json, extensions, {
path: path || this.resourcePath || '',
crossOrigin: this.crossOrigin,
manager: this.manager
} );
parser.parse( onLoad, onError );
}
} );
/* GLTFREGISTRY */
function GLTFRegistry() {
var objects = {};
return {
get: function ( key ) {
return objects[ key ];
},
add: function ( key, object ) {
objects[ key ] = object;
},
remove: function ( key ) {
delete objects[ key ];
},
removeAll: function () {
objects = {};
}
};
}
/*********************************/
/********** EXTENSIONS ***********/
/*********************************/
var EXTENSIONS = {
KHR_BINARY_GLTF: 'KHR_binary_glTF',
KHR_DRACO_MESH_COMPRESSION: 'KHR_draco_mesh_compression',
KHR_LIGHTS_PUNCTUAL: 'KHR_lights_punctual',
KHR_MATERIALS_CLEARCOAT: 'KHR_materials_clearcoat',
KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS: 'KHR_materials_pbrSpecularGlossiness',
KHR_MATERIALS_UNLIT: 'KHR_materials_unlit',
KHR_TEXTURE_TRANSFORM: 'KHR_texture_transform',
KHR_MESH_QUANTIZATION: 'KHR_mesh_quantization',
MSFT_TEXTURE_DDS: 'MSFT_texture_dds'
};
/**
* DDS Texture Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Vendor/MSFT_texture_dds
*
*/
function GLTFTextureDDSExtension( ddsLoader ) {
if ( ! ddsLoader ) {
throw new Error( 'THREE.GLTFLoader: Attempting to load .dds texture without importing DDSLoader' );
}
this.name = EXTENSIONS.MSFT_TEXTURE_DDS;
this.ddsLoader = ddsLoader;
}
/**
* Punctual Lights Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_lights_punctual
*/
function GLTFLightsExtension( json ) {
this.name = EXTENSIONS.KHR_LIGHTS_PUNCTUAL;
var extension = ( json.extensions && json.extensions[ EXTENSIONS.KHR_LIGHTS_PUNCTUAL ] ) || {};
this.lightDefs = extension.lights || [];
}
GLTFLightsExtension.prototype.loadLight = function ( lightIndex ) {
var lightDef = this.lightDefs[ lightIndex ];
var lightNode;
var color = new Color( 0xffffff );
if ( lightDef.color !== undefined ) color.fromArray( lightDef.color );
var range = lightDef.range !== undefined ? lightDef.range : 0;
switch ( lightDef.type ) {
case 'directional':
lightNode = new DirectionalLight( color );
lightNode.target.position.set( 0, 0, - 1 );
lightNode.add( lightNode.target );
break;
case 'point':
lightNode = new PointLight( color );
lightNode.distance = range;
break;
case 'spot':
lightNode = new SpotLight( color );
lightNode.distance = range;
// Handle spotlight properties.
lightDef.spot = lightDef.spot || {};
lightDef.spot.innerConeAngle = lightDef.spot.innerConeAngle !== undefined ? lightDef.spot.innerConeAngle : 0;
lightDef.spot.outerConeAngle = lightDef.spot.outerConeAngle !== undefined ? lightDef.spot.outerConeAngle : Math.PI / 4.0;
lightNode.angle = lightDef.spot.outerConeAngle;
lightNode.penumbra = 1.0 - lightDef.spot.innerConeAngle / lightDef.spot.outerConeAngle;
lightNode.target.position.set( 0, 0, - 1 );
lightNode.add( lightNode.target );
break;
default:
throw new Error( 'THREE.GLTFLoader: Unexpected light type, "' + lightDef.type + '".' );
}
// Some lights (e.g. spot) default to a position other than the origin. Reset the position
// here, because node-level parsing will only override position if explicitly specified.
lightNode.position.set( 0, 0, 0 );
lightNode.decay = 2;
if ( lightDef.intensity !== undefined ) lightNode.intensity = lightDef.intensity;
lightNode.name = lightDef.name || ( 'light_' + lightIndex );
return Promise.resolve( lightNode );
};
/**
* Unlit Materials Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_unlit
*/
function GLTFMaterialsUnlitExtension() {
this.name = EXTENSIONS.KHR_MATERIALS_UNLIT;
}
GLTFMaterialsUnlitExtension.prototype.getMaterialType = function () {
return MeshBasicMaterial;
};
GLTFMaterialsUnlitExtension.prototype.extendParams = function ( materialParams, materialDef, parser ) {
var pending = [];
materialParams.color = new Color( 1.0, 1.0, 1.0 );
materialParams.opacity = 1.0;
var metallicRoughness = materialDef.pbrMetallicRoughness;
if ( metallicRoughness ) {
if ( Array.isArray( metallicRoughness.baseColorFactor ) ) {
var array = metallicRoughness.baseColorFactor;
materialParams.color.fromArray( array );
materialParams.opacity = array[ 3 ];
}
if ( metallicRoughness.baseColorTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'map', metallicRoughness.baseColorTexture ) );
}
}
return Promise.all( pending );
};
/**
* Clearcoat Materials Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_clearcoat
*/
function GLTFMaterialsClearcoatExtension() {
this.name = EXTENSIONS.KHR_MATERIALS_CLEARCOAT;
}
GLTFMaterialsClearcoatExtension.prototype.getMaterialType = function () {
return MeshPhysicalMaterial;
};
GLTFMaterialsClearcoatExtension.prototype.extendParams = function ( materialParams, materialDef, parser ) {
var pending = [];
var extension = materialDef.extensions[ this.name ];
if ( extension.clearcoatFactor !== undefined ) {
materialParams.clearcoat = extension.clearcoatFactor;
}
if ( extension.clearcoatTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'clearcoatMap', extension.clearcoatTexture ) );
}
if ( extension.clearcoatRoughnessFactor !== undefined ) {
materialParams.clearcoatRoughness = extension.clearcoatRoughnessFactor;
}
if ( extension.clearcoatRoughnessTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'clearcoatRoughnessMap', extension.clearcoatRoughnessTexture ) );
}
if ( extension.clearcoatNormalTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'clearcoatNormalMap', extension.clearcoatNormalTexture ) );
if ( extension.clearcoatNormalTexture.scale !== undefined ) {
var scale = extension.clearcoatNormalTexture.scale;
materialParams.clearcoatNormalScale = new Vector2( scale, scale );
}
}
return Promise.all( pending );
};
/* BINARY EXTENSION */
var BINARY_EXTENSION_HEADER_MAGIC = 'glTF';
var BINARY_EXTENSION_HEADER_LENGTH = 12;
var BINARY_EXTENSION_CHUNK_TYPES = { JSON: 0x4E4F534A, BIN: 0x004E4942 };
function GLTFBinaryExtension( data ) {
this.name = EXTENSIONS.KHR_BINARY_GLTF;
this.content = null;
this.body = null;
var headerView = new DataView( data, 0, BINARY_EXTENSION_HEADER_LENGTH );
this.header = {
magic: LoaderUtils.decodeText( new Uint8Array( data.slice( 0, 4 ) ) ),
version: headerView.getUint32( 4, true ),
length: headerView.getUint32( 8, true )
};
if ( this.header.magic !== BINARY_EXTENSION_HEADER_MAGIC ) {
throw new Error( 'THREE.GLTFLoader: Unsupported glTF-Binary header.' );
} else if ( this.header.version < 2.0 ) {
throw new Error( 'THREE.GLTFLoader: Legacy binary file detected.' );
}
var chunkView = new DataView( data, BINARY_EXTENSION_HEADER_LENGTH );
var chunkIndex = 0;
while ( chunkIndex < chunkView.byteLength ) {
var chunkLength = chunkView.getUint32( chunkIndex, true );
chunkIndex += 4;
var chunkType = chunkView.getUint32( chunkIndex, true );
chunkIndex += 4;
if ( chunkType === BINARY_EXTENSION_CHUNK_TYPES.JSON ) {
var contentArray = new Uint8Array( data, BINARY_EXTENSION_HEADER_LENGTH + chunkIndex, chunkLength );
this.content = LoaderUtils.decodeText( contentArray );
} else if ( chunkType === BINARY_EXTENSION_CHUNK_TYPES.BIN ) {
var byteOffset = BINARY_EXTENSION_HEADER_LENGTH + chunkIndex;
this.body = data.slice( byteOffset, byteOffset + chunkLength );
}
// Clients must ignore chunks with unknown types.
chunkIndex += chunkLength;
}
if ( this.content === null ) {
throw new Error( 'THREE.GLTFLoader: JSON content not found.' );
}
}
/**
* DRACO Mesh Compression Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_draco_mesh_compression
*/
function GLTFDracoMeshCompressionExtension( json, dracoLoader ) {
if ( ! dracoLoader ) {
throw new Error( 'THREE.GLTFLoader: No DRACOLoader instance provided.' );
}
this.name = EXTENSIONS.KHR_DRACO_MESH_COMPRESSION;
this.json = json;
this.dracoLoader = dracoLoader;
this.dracoLoader.preload();
}
GLTFDracoMeshCompressionExtension.prototype.decodePrimitive = function ( primitive, parser ) {
var json = this.json;
var dracoLoader = this.dracoLoader;
var bufferViewIndex = primitive.extensions[ this.name ].bufferView;
var gltfAttributeMap = primitive.extensions[ this.name ].attributes;
var threeAttributeMap = {};
var attributeNormalizedMap = {};
var attributeTypeMap = {};
for ( var attributeName in gltfAttributeMap ) {
var threeAttributeName = ATTRIBUTES[ attributeName ] || attributeName.toLowerCase();
threeAttributeMap[ threeAttributeName ] = gltfAttributeMap[ attributeName ];
}
for ( attributeName in primitive.attributes ) {
var threeAttributeName = ATTRIBUTES[ attributeName ] || attributeName.toLowerCase();
if ( gltfAttributeMap[ attributeName ] !== undefined ) {
var accessorDef = json.accessors[ primitive.attributes[ attributeName ] ];
var componentType = WEBGL_COMPONENT_TYPES[ accessorDef.componentType ];
attributeTypeMap[ threeAttributeName ] = componentType;
attributeNormalizedMap[ threeAttributeName ] = accessorDef.normalized === true;
}
}
return parser.getDependency( 'bufferView', bufferViewIndex ).then( function ( bufferView ) {
return new Promise( function ( resolve ) {
dracoLoader.decodeDracoFile( bufferView, function ( geometry ) {
for ( var attributeName in geometry.attributes ) {
var attribute = geometry.attributes[ attributeName ];
var normalized = attributeNormalizedMap[ attributeName ];
if ( normalized !== undefined ) attribute.normalized = normalized;
}
resolve( geometry );
}, threeAttributeMap, attributeTypeMap );
} );
} );
};
/**
* Texture Transform Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_texture_transform
*/
function GLTFTextureTransformExtension() {
this.name = EXTENSIONS.KHR_TEXTURE_TRANSFORM;
}
GLTFTextureTransformExtension.prototype.extendTexture = function ( texture, transform ) {
texture = texture.clone();
if ( transform.offset !== undefined ) {
texture.offset.fromArray( transform.offset );
}
if ( transform.rotation !== undefined ) {
texture.rotation = transform.rotation;
}
if ( transform.scale !== undefined ) {
texture.repeat.fromArray( transform.scale );
}
if ( transform.texCoord !== undefined ) {
console.warn( 'THREE.GLTFLoader: Custom UV sets in "' + this.name + '" extension not yet supported.' );
}
texture.needsUpdate = true;
return texture;
};
/**
* Specular-Glossiness Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_pbrSpecularGlossiness
*/
/**
* A sub class of StandardMaterial with some of the functionality
* changed via the `onBeforeCompile` callback
* @pailhead
*/
function GLTFMeshStandardSGMaterial( params ) {
MeshStandardMaterial.call( this );
this.isGLTFSpecularGlossinessMaterial = true;
//various chunks that need replacing
var specularMapParsFragmentChunk = [
'#ifdef USE_SPECULARMAP',
' uniform sampler2D specularMap;',
'#endif'
].join( '\n' );
var glossinessMapParsFragmentChunk = [
'#ifdef USE_GLOSSINESSMAP',
' uniform sampler2D glossinessMap;',
'#endif'
].join( '\n' );
var specularMapFragmentChunk = [
'vec3 specularFactor = specular;',
'#ifdef USE_SPECULARMAP',
' vec4 texelSpecular = texture2D( specularMap, vUv );',
' texelSpecular = sRGBToLinear( texelSpecular );',
' // reads channel RGB, compatible with a glTF Specular-Glossiness (RGBA) texture',
' specularFactor *= texelSpecular.rgb;',
'#endif'
].join( '\n' );
var glossinessMapFragmentChunk = [
'float glossinessFactor = glossiness;',
'#ifdef USE_GLOSSINESSMAP',
' vec4 texelGlossiness = texture2D( glossinessMap, vUv );',
' // reads channel A, compatible with a glTF Specular-Glossiness (RGBA) texture',
' glossinessFactor *= texelGlossiness.a;',
'#endif'
].join( '\n' );
var lightPhysicalFragmentChunk = [
'PhysicalMaterial material;',
'material.diffuseColor = diffuseColor.rgb;',
'vec3 dxy = max( abs( dFdx( geometryNormal ) ), abs( dFdy( geometryNormal ) ) );',
'float geometryRoughness = max( max( dxy.x, dxy.y ), dxy.z );',
'material.specularRoughness = max( 1.0 - glossinessFactor, 0.0525 );// 0.0525 corresponds to the base mip of a 256 cubemap.',
'material.specularRoughness += geometryRoughness;',
'material.specularRoughness = min( material.specularRoughness, 1.0 );',
'material.specularColor = specularFactor.rgb;',
].join( '\n' );
var uniforms = {
specular: { value: new Color().setHex( 0xffffff ) },
glossiness: { value: 1 },
specularMap: { value: null },
glossinessMap: { value: null }
};
this._extraUniforms = uniforms;
// please see #14031 or #13198 for an alternate approach
this.onBeforeCompile = function ( shader ) {
for ( var uniformName in uniforms ) {
shader.uniforms[ uniformName ] = uniforms[ uniformName ];
}
shader.fragmentShader = shader.fragmentShader.replace( 'uniform float roughness;', 'uniform vec3 specular;' );
shader.fragmentShader = shader.fragmentShader.replace( 'uniform float metalness;', 'uniform float glossiness;' );
shader.fragmentShader = shader.fragmentShader.replace( '#include <roughnessmap_pars_fragment>', specularMapParsFragmentChunk );
shader.fragmentShader = shader.fragmentShader.replace( '#include <metalnessmap_pars_fragment>', glossinessMapParsFragmentChunk );
shader.fragmentShader = shader.fragmentShader.replace( '#include <roughnessmap_fragment>', specularMapFragmentChunk );
shader.fragmentShader = shader.fragmentShader.replace( '#include <metalnessmap_fragment>', glossinessMapFragmentChunk );
shader.fragmentShader = shader.fragmentShader.replace( '#include <lights_physical_fragment>', lightPhysicalFragmentChunk );
};
/*eslint-disable*/
Object.defineProperties(
this,
{
specular: {
get: function () { return uniforms.specular.value; },
set: function ( v ) { uniforms.specular.value = v; }
},
specularMap: {
get: function () { return uniforms.specularMap.value; },
set: function ( v ) { uniforms.specularMap.value = v; }
},
glossiness: {
get: function () { return uniforms.glossiness.value; },
set: function ( v ) { uniforms.glossiness.value = v; }
},
glossinessMap: {
get: function () { return uniforms.glossinessMap.value; },
set: function ( v ) {
uniforms.glossinessMap.value = v;
//how about something like this - @pailhead
if ( v ) {
this.defines.USE_GLOSSINESSMAP = '';
// set USE_ROUGHNESSMAP to enable vUv
this.defines.USE_ROUGHNESSMAP = '';
} else {
delete this.defines.USE_ROUGHNESSMAP;
delete this.defines.USE_GLOSSINESSMAP;
}
}
}
}
);
/*eslint-enable*/
delete this.metalness;
delete this.roughness;
delete this.metalnessMap;
delete this.roughnessMap;
this.setValues( params );
}
GLTFMeshStandardSGMaterial.prototype = Object.create( MeshStandardMaterial.prototype );
GLTFMeshStandardSGMaterial.prototype.constructor = GLTFMeshStandardSGMaterial;
GLTFMeshStandardSGMaterial.prototype.copy = function ( source ) {
MeshStandardMaterial.prototype.copy.call( this, source );
this.specularMap = source.specularMap;
this.specular.copy( source.specular );
this.glossinessMap = source.glossinessMap;
this.glossiness = source.glossiness;
delete this.metalness;
delete this.roughness;
delete this.metalnessMap;
delete this.roughnessMap;
return this;
};
function GLTFMaterialsPbrSpecularGlossinessExtension() {
return {
name: EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS,
specularGlossinessParams: [
'color',
'map',
'lightMap',
'lightMapIntensity',
'aoMap',
'aoMapIntensity',
'emissive',
'emissiveIntensity',
'emissiveMap',
'bumpMap',
'bumpScale',
'normalMap',
'normalMapType',
'displacementMap',
'displacementScale',
'displacementBias',
'specularMap',
'specular',
'glossinessMap',
'glossiness',
'alphaMap',
'envMap',
'envMapIntensity',
'refractionRatio',
],
getMaterialType: function () {
return GLTFMeshStandardSGMaterial;
},
extendParams: function ( materialParams, materialDef, parser ) {
var pbrSpecularGlossiness = materialDef.extensions[ this.name ];
materialParams.color = new Color( 1.0, 1.0, 1.0 );
materialParams.opacity = 1.0;
var pending = [];
if ( Array.isArray( pbrSpecularGlossiness.diffuseFactor ) ) {
var array = pbrSpecularGlossiness.diffuseFactor;
materialParams.color.fromArray( array );
materialParams.opacity = array[ 3 ];
}
if ( pbrSpecularGlossiness.diffuseTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'map', pbrSpecularGlossiness.diffuseTexture ) );
}
materialParams.emissive = new Color( 0.0, 0.0, 0.0 );
materialParams.glossiness = pbrSpecularGlossiness.glossinessFactor !== undefined ? pbrSpecularGlossiness.glossinessFactor : 1.0;
materialParams.specular = new Color( 1.0, 1.0, 1.0 );
if ( Array.isArray( pbrSpecularGlossiness.specularFactor ) ) {
materialParams.specular.fromArray( pbrSpecularGlossiness.specularFactor );
}
if ( pbrSpecularGlossiness.specularGlossinessTexture !== undefined ) {
var specGlossMapDef = pbrSpecularGlossiness.specularGlossinessTexture;
pending.push( parser.assignTexture( materialParams, 'glossinessMap', specGlossMapDef ) );
pending.push( parser.assignTexture( materialParams, 'specularMap', specGlossMapDef ) );
}
return Promise.all( pending );
},
createMaterial: function ( materialParams ) {
var material = new GLTFMeshStandardSGMaterial( materialParams );
material.fog = true;
material.color = materialParams.color;
material.map = materialParams.map === undefined ? null : materialParams.map;
material.lightMap = null;
material.lightMapIntensity = 1.0;
material.aoMap = materialParams.aoMap === undefined ? null : materialParams.aoMap;
material.aoMapIntensity = 1.0;
material.emissive = materialParams.emissive;
material.emissiveIntensity = 1.0;
material.emissiveMap = materialParams.emissiveMap === undefined ? null : materialParams.emissiveMap;
material.bumpMap = materialParams.bumpMap === undefined ? null : materialParams.bumpMap;
material.bumpScale = 1;
material.normalMap = materialParams.normalMap === undefined ? null : materialParams.normalMap;
material.normalMapType = TangentSpaceNormalMap;
if ( materialParams.normalScale ) material.normalScale = materialParams.normalScale;
material.displacementMap = null;
material.displacementScale = 1;
material.displacementBias = 0;
material.specularMap = materialParams.specularMap === undefined ? null : materialParams.specularMap;
material.specular = materialParams.specular;
material.glossinessMap = materialParams.glossinessMap === undefined ? null : materialParams.glossinessMap;
material.glossiness = materialParams.glossiness;
material.alphaMap = null;
material.envMap = materialParams.envMap === undefined ? null : materialParams.envMap;
material.envMapIntensity = 1.0;
material.refractionRatio = 0.98;
return material;
},
};
}
/**
* Mesh Quantization Extension
*
* Specification: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_mesh_quantization
*/
function GLTFMeshQuantizationExtension() {
this.name = EXTENSIONS.KHR_MESH_QUANTIZATION;
}
/*********************************/
/********** INTERPOLATION ********/
/*********************************/
// Spline Interpolation
// Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#appendix-c-spline-interpolation
function GLTFCubicSplineInterpolant( parameterPositions, sampleValues, sampleSize, resultBuffer ) {
Interpolant.call( this, parameterPositions, sampleValues, sampleSize, resultBuffer );
}
GLTFCubicSplineInterpolant.prototype = Object.create( Interpolant.prototype );
GLTFCubicSplineInterpolant.prototype.constructor = GLTFCubicSplineInterpolant;
GLTFCubicSplineInterpolant.prototype.copySampleValue_ = function ( index ) {
// Copies a sample value to the result buffer. See description of glTF
// CUBICSPLINE values layout in interpolate_() function below.
var result = this.resultBuffer,
values = this.sampleValues,
valueSize = this.valueSize,
offset = index * valueSize * 3 + valueSize;
for ( var i = 0; i !== valueSize; i ++ ) {
result[ i ] = values[ offset + i ];
}
return result;
};
GLTFCubicSplineInterpolant.prototype.beforeStart_ = GLTFCubicSplineInterpolant.prototype.copySampleValue_;
GLTFCubicSplineInterpolant.prototype.afterEnd_ = GLTFCubicSplineInterpolant.prototype.copySampleValue_;
GLTFCubicSplineInterpolant.prototype.interpolate_ = function ( i1, t0, t, t1 ) {
var result = this.resultBuffer;
var values = this.sampleValues;
var stride = this.valueSize;
var stride2 = stride * 2;
var stride3 = stride * 3;
var td = t1 - t0;
var p = ( t - t0 ) / td;
var pp = p * p;
var ppp = pp * p;
var offset1 = i1 * stride3;
var offset0 = offset1 - stride3;
var s2 = - 2 * ppp + 3 * pp;
var s3 = ppp - pp;
var s0 = 1 - s2;
var s1 = s3 - pp + p;
// Layout of keyframe output values for CUBICSPLINE animations:
// [ inTangent_1, splineVertex_1, outTangent_1, inTangent_2, splineVertex_2, ... ]
for ( var i = 0; i !== stride; i ++ ) {
var p0 = values[ offset0 + i + stride ]; // splineVertex_k
var m0 = values[ offset0 + i + stride2 ] * td; // outTangent_k * (t_k+1 - t_k)
var p1 = values[ offset1 + i + stride ]; // splineVertex_k+1
var m1 = values[ offset1 + i ] * td; // inTangent_k+1 * (t_k+1 - t_k)
result[ i ] = s0 * p0 + s1 * m0 + s2 * p1 + s3 * m1;
}
return result;
};
/*********************************/
/********** INTERNALS ************/
/*********************************/
/* CONSTANTS */
var WEBGL_CONSTANTS = {
FLOAT: 5126,
//FLOAT_MAT2: 35674,
FLOAT_MAT3: 35675,
FLOAT_MAT4: 35676,
FLOAT_VEC2: 35664,
FLOAT_VEC3: 35665,
FLOAT_VEC4: 35666,
LINEAR: 9729,
REPEAT: 10497,
SAMPLER_2D: 35678,
POINTS: 0,
LINES: 1,
LINE_LOOP: 2,
LINE_STRIP: 3,
TRIANGLES: 4,
TRIANGLE_STRIP: 5,
TRIANGLE_FAN: 6,
UNSIGNED_BYTE: 5121,
UNSIGNED_SHORT: 5123
};
var WEBGL_COMPONENT_TYPES = {
5120: Int8Array,
5121: Uint8Array,
5122: Int16Array,
5123: Uint16Array,
5125: Uint32Array,
5126: Float32Array
};
var WEBGL_FILTERS = {
9728: NearestFilter,
9729: LinearFilter,
9984: NearestMipmapNearestFilter,
9985: LinearMipmapNearestFilter,
9986: NearestMipmapLinearFilter,
9987: LinearMipmapLinearFilter
};
var WEBGL_WRAPPINGS = {
33071: ClampToEdgeWrapping,
33648: MirroredRepeatWrapping,
10497: RepeatWrapping
};
var WEBGL_TYPE_SIZES = {
'SCALAR': 1,
'VEC2': 2,
'VEC3': 3,
'VEC4': 4,
'MAT2': 4,
'MAT3': 9,
'MAT4': 16
};
var ATTRIBUTES = {
POSITION: 'position',
NORMAL: 'normal',
TANGENT: 'tangent',
TEXCOORD_0: 'uv',
TEXCOORD_1: 'uv2',
COLOR_0: 'color',
WEIGHTS_0: 'skinWeight',
JOINTS_0: 'skinIndex',
};
var PATH_PROPERTIES = {
scale: 'scale',
translation: 'position',
rotation: 'quaternion',
weights: 'morphTargetInfluences'
};
var INTERPOLATION = {
CUBICSPLINE: undefined, // We use a custom interpolant (GLTFCubicSplineInterpolation) for CUBICSPLINE tracks. Each
// keyframe track will be initialized with a default interpolation type, then modified.
LINEAR: InterpolateLinear,
STEP: InterpolateDiscrete
};
var ALPHA_MODES = {
OPAQUE: 'OPAQUE',
MASK: 'MASK',
BLEND: 'BLEND'
};
var MIME_TYPE_FORMATS = {
'image/png': RGBAFormat,
'image/jpeg': RGBFormat
};
/* UTILITY FUNCTIONS */
function resolveURL( url, path ) {
// Invalid URL
if ( typeof url !== 'string' || url === '' ) return '';
// Host Relative URL
if ( /^https?:\/\//i.test( path ) && /^\//.test( url ) ) {
path = path.replace( /(^https?:\/\/[^\/]+).*/i, '$1' );
}
// Absolute URL http://,https://,//
if ( /^(https?:)?\/\//i.test( url ) ) return url;
// Data URI
if ( /^data:.*,.*$/i.test( url ) ) return url;
// Blob URL
if ( /^blob:.*$/i.test( url ) ) return url;
// Relative URL
return path + url;
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#default-material
*/
function createDefaultMaterial( cache ) {
if ( cache[ 'DefaultMaterial' ] === undefined ) {
cache[ 'DefaultMaterial' ] = new MeshStandardMaterial( {
color: 0xFFFFFF,
emissive: 0x000000,
metalness: 1,
roughness: 1,
transparent: false,
depthTest: true,
side: FrontSide
} );
}
return cache[ 'DefaultMaterial' ];
}
function addUnknownExtensionsToUserData( knownExtensions, object, objectDef ) {
// Add unknown glTF extensions to an object's userData.
for ( var name in objectDef.extensions ) {
if ( knownExtensions[ name ] === undefined ) {
object.userData.gltfExtensions = object.userData.gltfExtensions || {};
object.userData.gltfExtensions[ name ] = objectDef.extensions[ name ];
}
}
}
/**
* @param {Object3D|Material|BufferGeometry} object
* @param {GLTF.definition} gltfDef
*/
function assignExtrasToUserData( object, gltfDef ) {
if ( gltfDef.extras !== undefined ) {
if ( typeof gltfDef.extras === 'object' ) {
Object.assign( object.userData, gltfDef.extras );
} else {
console.warn( 'THREE.GLTFLoader: Ignoring primitive type .extras, ' + gltfDef.extras );
}
}
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#morph-targets
*
* @param {BufferGeometry} geometry
* @param {Array<GLTF.Target>} targets
* @param {GLTFParser} parser
* @return {Promise<BufferGeometry>}
*/
function addMorphTargets( geometry, targets, parser ) {
var hasMorphPosition = false;
var hasMorphNormal = false;
for ( var i = 0, il = targets.length; i < il; i ++ ) {
var target = targets[ i ];
if ( target.POSITION !== undefined ) hasMorphPosition = true;
if ( target.NORMAL !== undefined ) hasMorphNormal = true;
if ( hasMorphPosition && hasMorphNormal ) break;
}
if ( ! hasMorphPosition && ! hasMorphNormal ) return Promise.resolve( geometry );
var pendingPositionAccessors = [];
var pendingNormalAccessors = [];
for ( var i = 0, il = targets.length; i < il; i ++ ) {
var target = targets[ i ];
if ( hasMorphPosition ) {
var pendingAccessor = target.POSITION !== undefined
? parser.getDependency( 'accessor', target.POSITION )
: geometry.attributes.position;
pendingPositionAccessors.push( pendingAccessor );
}
if ( hasMorphNormal ) {
var pendingAccessor = target.NORMAL !== undefined
? parser.getDependency( 'accessor', target.NORMAL )
: geometry.attributes.normal;
pendingNormalAccessors.push( pendingAccessor );
}
}
return Promise.all( [
Promise.all( pendingPositionAccessors ),
Promise.all( pendingNormalAccessors )
] ).then( function ( accessors ) {
var morphPositions = accessors[ 0 ];
var morphNormals = accessors[ 1 ];
if ( hasMorphPosition ) geometry.morphAttributes.position = morphPositions;
if ( hasMorphNormal ) geometry.morphAttributes.normal = morphNormals;
geometry.morphTargetsRelative = true;
return geometry;
} );
}
/**
* @param {Mesh} mesh
* @param {GLTF.Mesh} meshDef
*/
function updateMorphTargets( mesh, meshDef ) {
mesh.updateMorphTargets();
if ( meshDef.weights !== undefined ) {
for ( var i = 0, il = meshDef.weights.length; i < il; i ++ ) {
mesh.morphTargetInfluences[ i ] = meshDef.weights[ i ];
}
}
// .extras has user-defined data, so check that .extras.targetNames is an array.
if ( meshDef.extras && Array.isArray( meshDef.extras.targetNames ) ) {
var targetNames = meshDef.extras.targetNames;
if ( mesh.morphTargetInfluences.length === targetNames.length ) {
mesh.morphTargetDictionary = {};
for ( var i = 0, il = targetNames.length; i < il; i ++ ) {
mesh.morphTargetDictionary[ targetNames[ i ] ] = i;
}
} else {
console.warn( 'THREE.GLTFLoader: Invalid extras.targetNames length. Ignoring names.' );
}
}
}
function createPrimitiveKey( primitiveDef ) {
var dracoExtension = primitiveDef.extensions && primitiveDef.extensions[ EXTENSIONS.KHR_DRACO_MESH_COMPRESSION ];
var geometryKey;
if ( dracoExtension ) {
geometryKey = 'draco:' + dracoExtension.bufferView
+ ':' + dracoExtension.indices
+ ':' + createAttributesKey( dracoExtension.attributes );
} else {
geometryKey = primitiveDef.indices + ':' + createAttributesKey( primitiveDef.attributes ) + ':' + primitiveDef.mode;
}
return geometryKey;
}
function createAttributesKey( attributes ) {
var attributesKey = '';
var keys = Object.keys( attributes ).sort();
for ( var i = 0, il = keys.length; i < il; i ++ ) {
attributesKey += keys[ i ] + ':' + attributes[ keys[ i ] ] + ';';
}
return attributesKey;
}
/* GLTF PARSER */
function GLTFParser( json, extensions, options ) {
this.json = json || {};
this.extensions = extensions || {};
this.options = options || {};
// loader object cache
this.cache = new GLTFRegistry();
// BufferGeometry caching
this.primitiveCache = {};
this.textureLoader = new TextureLoader( this.options.manager );
this.textureLoader.setCrossOrigin( this.options.crossOrigin );
this.fileLoader = new FileLoader( this.options.manager );
this.fileLoader.setResponseType( 'arraybuffer' );
if ( this.options.crossOrigin === 'use-credentials' ) {
this.fileLoader.setWithCredentials( true );
}
}
GLTFParser.prototype.parse = function ( onLoad, onError ) {
var parser = this;
var json = this.json;
var extensions = this.extensions;
// Clear the loader cache
this.cache.removeAll();
// Mark the special nodes/meshes in json for efficient parse
this.markDefs();
Promise.all( [
this.getDependencies( 'scene' ),
this.getDependencies( 'animation' ),
this.getDependencies( 'camera' ),
] ).then( function ( dependencies ) {
var result = {
scene: dependencies[ 0 ][ json.scene || 0 ],
scenes: dependencies[ 0 ],
animations: dependencies[ 1 ],
cameras: dependencies[ 2 ],
asset: json.asset,
parser: parser,
userData: {}
};
addUnknownExtensionsToUserData( extensions, result, json );
assignExtrasToUserData( result, json );
onLoad( result );
} ).catch( onError );
};
/**
* Marks the special nodes/meshes in json for efficient parse.
*/
GLTFParser.prototype.markDefs = function () {
var nodeDefs = this.json.nodes || [];
var skinDefs = this.json.skins || [];
var meshDefs = this.json.meshes || [];
var meshReferences = {};
var meshUses = {};
// Nothing in the node definition indicates whether it is a Bone or an
// Object3D. Use the skins' joint references to mark bones.
for ( var skinIndex = 0, skinLength = skinDefs.length; skinIndex < skinLength; skinIndex ++ ) {
var joints = skinDefs[ skinIndex ].joints;
for ( var i = 0, il = joints.length; i < il; i ++ ) {
nodeDefs[ joints[ i ] ].isBone = true;
}
}
// Meshes can (and should) be reused by multiple nodes in a glTF asset. To
// avoid having more than one Mesh with the same name, count
// references and rename instances below.
//
// Example: CesiumMilkTruck sample model reuses "Wheel" meshes.
for ( var nodeIndex = 0, nodeLength = nodeDefs.length; nodeIndex < nodeLength; nodeIndex ++ ) {
var nodeDef = nodeDefs[ nodeIndex ];
if ( nodeDef.mesh !== undefined ) {
if ( meshReferences[ nodeDef.mesh ] === undefined ) {
meshReferences[ nodeDef.mesh ] = meshUses[ nodeDef.mesh ] = 0;
}
meshReferences[ nodeDef.mesh ] ++;
// Nothing in the mesh definition indicates whether it is
// a SkinnedMesh or Mesh. Use the node's mesh reference
// to mark SkinnedMesh if node has skin.
if ( nodeDef.skin !== undefined ) {
meshDefs[ nodeDef.mesh ].isSkinnedMesh = true;
}
}
}
this.json.meshReferences = meshReferences;
this.json.meshUses = meshUses;
};
/**
* Requests the specified dependency asynchronously, with caching.
* @param {string} type
* @param {number} index
* @return {Promise<Object3D|Material|THREE.Texture|AnimationClip|ArrayBuffer|Object>}
*/
GLTFParser.prototype.getDependency = function ( type, index ) {
var cacheKey = type + ':' + index;
var dependency = this.cache.get( cacheKey );
if ( ! dependency ) {
switch ( type ) {
case 'scene':
dependency = this.loadScene( index );
break;
case 'node':
dependency = this.loadNode( index );
break;
case 'mesh':
dependency = this.loadMesh( index );
break;
case 'accessor':
dependency = this.loadAccessor( index );
break;
case 'bufferView':
dependency = this.loadBufferView( index );
break;
case 'buffer':
dependency = this.loadBuffer( index );
break;
case 'material':
dependency = this.loadMaterial( index );
break;
case 'texture':
dependency = this.loadTexture( index );
break;
case 'skin':
dependency = this.loadSkin( index );
break;
case 'animation':
dependency = this.loadAnimation( index );
break;
case 'camera':
dependency = this.loadCamera( index );
break;
case 'light':
dependency = this.extensions[ EXTENSIONS.KHR_LIGHTS_PUNCTUAL ].loadLight( index );
break;
default:
throw new Error( 'Unknown type: ' + type );
}
this.cache.add( cacheKey, dependency );
}
return dependency;
};
/**
* Requests all dependencies of the specified type asynchronously, with caching.
* @param {string} type
* @return {Promise<Array<Object>>}
*/
GLTFParser.prototype.getDependencies = function ( type ) {
var dependencies = this.cache.get( type );
if ( ! dependencies ) {
var parser = this;
var defs = this.json[ type + ( type === 'mesh' ? 'es' : 's' ) ] || [];
dependencies = Promise.all( defs.map( function ( def, index ) {
return parser.getDependency( type, index );
} ) );
this.cache.add( type, dependencies );
}
return dependencies;
};
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#buffers-and-buffer-views
* @param {number} bufferIndex
* @return {Promise<ArrayBuffer>}
*/
GLTFParser.prototype.loadBuffer = function ( bufferIndex ) {
var bufferDef = this.json.buffers[ bufferIndex ];
var loader = this.fileLoader;
if ( bufferDef.type && bufferDef.type !== 'arraybuffer' ) {
throw new Error( 'THREE.GLTFLoader: ' + bufferDef.type + ' buffer type is not supported.' );
}
// If present, GLB container is required to be the first buffer.
if ( bufferDef.uri === undefined && bufferIndex === 0 ) {
return Promise.resolve( this.extensions[ EXTENSIONS.KHR_BINARY_GLTF ].body );
}
var options = this.options;
return new Promise( function ( resolve, reject ) {
loader.load( resolveURL( bufferDef.uri, options.path ), resolve, undefined, function () {
reject( new Error( 'THREE.GLTFLoader: Failed to load buffer "' + bufferDef.uri + '".' ) );
} );
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#buffers-and-buffer-views
* @param {number} bufferViewIndex
* @return {Promise<ArrayBuffer>}
*/
GLTFParser.prototype.loadBufferView = function ( bufferViewIndex ) {
var bufferViewDef = this.json.bufferViews[ bufferViewIndex ];
return this.getDependency( 'buffer', bufferViewDef.buffer ).then( function ( buffer ) {
var byteLength = bufferViewDef.byteLength || 0;
var byteOffset = bufferViewDef.byteOffset || 0;
return buffer.slice( byteOffset, byteOffset + byteLength );
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#accessors
* @param {number} accessorIndex
* @return {Promise<BufferAttribute|InterleavedBufferAttribute>}
*/
GLTFParser.prototype.loadAccessor = function ( accessorIndex ) {
var parser = this;
var json = this.json;
var accessorDef = this.json.accessors[ accessorIndex ];
if ( accessorDef.bufferView === undefined && accessorDef.sparse === undefined ) {
// Ignore empty accessors, which may be used to declare runtime
// information about attributes coming from another source (e.g. Draco
// compression extension).
return Promise.resolve( null );
}
var pendingBufferViews = [];
if ( accessorDef.bufferView !== undefined ) {
pendingBufferViews.push( this.getDependency( 'bufferView', accessorDef.bufferView ) );
} else {
pendingBufferViews.push( null );
}
if ( accessorDef.sparse !== undefined ) {
pendingBufferViews.push( this.getDependency( 'bufferView', accessorDef.sparse.indices.bufferView ) );
pendingBufferViews.push( this.getDependency( 'bufferView', accessorDef.sparse.values.bufferView ) );
}
return Promise.all( pendingBufferViews ).then( function ( bufferViews ) {
var bufferView = bufferViews[ 0 ];
var itemSize = WEBGL_TYPE_SIZES[ accessorDef.type ];
var TypedArray = WEBGL_COMPONENT_TYPES[ accessorDef.componentType ];
// For VEC3: itemSize is 3, elementBytes is 4, itemBytes is 12.
var elementBytes = TypedArray.BYTES_PER_ELEMENT;
var itemBytes = elementBytes * itemSize;
var byteOffset = accessorDef.byteOffset || 0;
var byteStride = accessorDef.bufferView !== undefined ? json.bufferViews[ accessorDef.bufferView ].byteStride : undefined;
var normalized = accessorDef.normalized === true;
var array, bufferAttribute;
// The buffer is not interleaved if the stride is the item size in bytes.
if ( byteStride && byteStride !== itemBytes ) {
// Each "slice" of the buffer, as defined by 'count' elements of 'byteStride' bytes, gets its own InterleavedBuffer
// This makes sure that IBA.count reflects accessor.count properly
var ibSlice = Math.floor( byteOffset / byteStride );
var ibCacheKey = 'InterleavedBuffer:' + accessorDef.bufferView + ':' + accessorDef.componentType + ':' + ibSlice + ':' + accessorDef.count;
var ib = parser.cache.get( ibCacheKey );
if ( ! ib ) {
array = new TypedArray( bufferView, ibSlice * byteStride, accessorDef.count * byteStride / elementBytes );
// Integer parameters to IB/IBA are in array elements, not bytes.
ib = new InterleavedBuffer( array, byteStride / elementBytes );
parser.cache.add( ibCacheKey, ib );
}
bufferAttribute = new InterleavedBufferAttribute( ib, itemSize, ( byteOffset % byteStride ) / elementBytes, normalized );
} else {
if ( bufferView === null ) {
array = new TypedArray( accessorDef.count * itemSize );
} else {
array = new TypedArray( bufferView, byteOffset, accessorDef.count * itemSize );
}
bufferAttribute = new BufferAttribute( array, itemSize, normalized );
}
// https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#sparse-accessors
if ( accessorDef.sparse !== undefined ) {
var itemSizeIndices = WEBGL_TYPE_SIZES.SCALAR;
var TypedArrayIndices = WEBGL_COMPONENT_TYPES[ accessorDef.sparse.indices.componentType ];
var byteOffsetIndices = accessorDef.sparse.indices.byteOffset || 0;
var byteOffsetValues = accessorDef.sparse.values.byteOffset || 0;
var sparseIndices = new TypedArrayIndices( bufferViews[ 1 ], byteOffsetIndices, accessorDef.sparse.count * itemSizeIndices );
var sparseValues = new TypedArray( bufferViews[ 2 ], byteOffsetValues, accessorDef.sparse.count * itemSize );
if ( bufferView !== null ) {
// Avoid modifying the original ArrayBuffer, if the bufferView wasn't initialized with zeroes.
bufferAttribute = new BufferAttribute( bufferAttribute.array.slice(), bufferAttribute.itemSize, bufferAttribute.normalized );
}
for ( var i = 0, il = sparseIndices.length; i < il; i ++ ) {
var index = sparseIndices[ i ];
bufferAttribute.setX( index, sparseValues[ i * itemSize ] );
if ( itemSize >= 2 ) bufferAttribute.setY( index, sparseValues[ i * itemSize + 1 ] );
if ( itemSize >= 3 ) bufferAttribute.setZ( index, sparseValues[ i * itemSize + 2 ] );
if ( itemSize >= 4 ) bufferAttribute.setW( index, sparseValues[ i * itemSize + 3 ] );
if ( itemSize >= 5 ) throw new Error( 'THREE.GLTFLoader: Unsupported itemSize in sparse BufferAttribute.' );
}
}
return bufferAttribute;
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#textures
* @param {number} textureIndex
* @return {Promise<THREE.Texture>}
*/
GLTFParser.prototype.loadTexture = function ( textureIndex ) {
var parser = this;
var json = this.json;
var options = this.options;
var textureLoader = this.textureLoader;
var URL = self.URL || self.webkitURL;
var textureDef = json.textures[ textureIndex ];
var textureExtensions = textureDef.extensions || {};
var source;
if ( textureExtensions[ EXTENSIONS.MSFT_TEXTURE_DDS ] ) {
source = json.images[ textureExtensions[ EXTENSIONS.MSFT_TEXTURE_DDS ].source ];
} else {
source = json.images[ textureDef.source ];
}
var sourceURI = source.uri;
var isObjectURL = false;
if ( source.bufferView !== undefined ) {
// Load binary image data from bufferView, if provided.
sourceURI = parser.getDependency( 'bufferView', source.bufferView ).then( function ( bufferView ) {
isObjectURL = true;
var blob = new Blob( [ bufferView ], { type: source.mimeType } );
sourceURI = URL.createObjectURL( blob );
return sourceURI;
} );
}
return Promise.resolve( sourceURI ).then( function ( sourceURI ) {
// Load Texture resource.
var loader = options.manager.getHandler( sourceURI );
if ( ! loader ) {
loader = textureExtensions[ EXTENSIONS.MSFT_TEXTURE_DDS ]
? parser.extensions[ EXTENSIONS.MSFT_TEXTURE_DDS ].ddsLoader
: textureLoader;
}
return new Promise( function ( resolve, reject ) {
loader.load( resolveURL( sourceURI, options.path ), resolve, undefined, reject );
} );
} ).then( function ( texture ) {
// Clean up resources and configure Texture.
if ( isObjectURL === true ) {
URL.revokeObjectURL( sourceURI );
}
texture.flipY = false;
if ( textureDef.name ) texture.name = textureDef.name;
// Ignore unknown mime types, like DDS files.
if ( source.mimeType in MIME_TYPE_FORMATS ) {
texture.format = MIME_TYPE_FORMATS[ source.mimeType ];
}
var samplers = json.samplers || {};
var sampler = samplers[ textureDef.sampler ] || {};
texture.magFilter = WEBGL_FILTERS[ sampler.magFilter ] || LinearFilter;
texture.minFilter = WEBGL_FILTERS[ sampler.minFilter ] || LinearMipmapLinearFilter;
texture.wrapS = WEBGL_WRAPPINGS[ sampler.wrapS ] || RepeatWrapping;
texture.wrapT = WEBGL_WRAPPINGS[ sampler.wrapT ] || RepeatWrapping;
return texture;
} );
};
/**
* Asynchronously assigns a texture to the given material parameters.
* @param {Object} materialParams
* @param {string} mapName
* @param {Object} mapDef
* @return {Promise}
*/
GLTFParser.prototype.assignTexture = function ( materialParams, mapName, mapDef ) {
var parser = this;
return this.getDependency( 'texture', mapDef.index ).then( function ( texture ) {
if ( ! texture.isCompressedTexture ) {
switch ( mapName ) {
case 'aoMap':
case 'emissiveMap':
case 'metalnessMap':
case 'normalMap':
case 'roughnessMap':
texture.format = RGBFormat;
break;
}
}
// Materials sample aoMap from UV set 1 and other maps from UV set 0 - this can't be configured
// However, we will copy UV set 0 to UV set 1 on demand for aoMap
if ( mapDef.texCoord !== undefined && mapDef.texCoord != 0 && ! ( mapName === 'aoMap' && mapDef.texCoord == 1 ) ) {
console.warn( 'THREE.GLTFLoader: Custom UV set ' + mapDef.texCoord + ' for texture ' + mapName + ' not yet supported.' );
}
if ( parser.extensions[ EXTENSIONS.KHR_TEXTURE_TRANSFORM ] ) {
var transform = mapDef.extensions !== undefined ? mapDef.extensions[ EXTENSIONS.KHR_TEXTURE_TRANSFORM ] : undefined;
if ( transform ) {
texture = parser.extensions[ EXTENSIONS.KHR_TEXTURE_TRANSFORM ].extendTexture( texture, transform );
}
}
materialParams[ mapName ] = texture;
} );
};
/**
* Assigns final material to a Mesh, Line, or Points instance. The instance
* already has a material (generated from the glTF material options alone)
* but reuse of the same glTF material may require multiple threejs materials
* to accomodate different primitive types, defines, etc. New materials will
* be created if necessary, and reused from a cache.
* @param {Object3D} mesh Mesh, Line, or Points instance.
*/
GLTFParser.prototype.assignFinalMaterial = function ( mesh ) {
var geometry = mesh.geometry;
var material = mesh.material;
var useVertexTangents = geometry.attributes.tangent !== undefined;
var useVertexColors = geometry.attributes.color !== undefined;
var useFlatShading = geometry.attributes.normal === undefined;
var useSkinning = mesh.isSkinnedMesh === true;
var useMorphTargets = Object.keys( geometry.morphAttributes ).length > 0;
var useMorphNormals = useMorphTargets && geometry.morphAttributes.normal !== undefined;
if ( mesh.isPoints ) {
var cacheKey = 'PointsMaterial:' + material.uuid;
var pointsMaterial = this.cache.get( cacheKey );
if ( ! pointsMaterial ) {
pointsMaterial = new PointsMaterial();
Material.prototype.copy.call( pointsMaterial, material );
pointsMaterial.color.copy( material.color );
pointsMaterial.map = material.map;
pointsMaterial.sizeAttenuation = false; // glTF spec says points should be 1px
this.cache.add( cacheKey, pointsMaterial );
}
material = pointsMaterial;
} else if ( mesh.isLine ) {
var cacheKey = 'LineBasicMaterial:' + material.uuid;
var lineMaterial = this.cache.get( cacheKey );
if ( ! lineMaterial ) {
lineMaterial = new LineBasicMaterial();
Material.prototype.copy.call( lineMaterial, material );
lineMaterial.color.copy( material.color );
this.cache.add( cacheKey, lineMaterial );
}
material = lineMaterial;
}
// Clone the material if it will be modified
if ( useVertexTangents || useVertexColors || useFlatShading || useSkinning || useMorphTargets ) {
var cacheKey = 'ClonedMaterial:' + material.uuid + ':';
if ( material.isGLTFSpecularGlossinessMaterial ) cacheKey += 'specular-glossiness:';
if ( useSkinning ) cacheKey += 'skinning:';
if ( useVertexTangents ) cacheKey += 'vertex-tangents:';
if ( useVertexColors ) cacheKey += 'vertex-colors:';
if ( useFlatShading ) cacheKey += 'flat-shading:';
if ( useMorphTargets ) cacheKey += 'morph-targets:';
if ( useMorphNormals ) cacheKey += 'morph-normals:';
var cachedMaterial = this.cache.get( cacheKey );
if ( ! cachedMaterial ) {
cachedMaterial = material.clone();
if ( useSkinning ) cachedMaterial.skinning = true;
if ( useVertexTangents ) cachedMaterial.vertexTangents = true;
if ( useVertexColors ) cachedMaterial.vertexColors = true;
if ( useFlatShading ) cachedMaterial.flatShading = true;
if ( useMorphTargets ) cachedMaterial.morphTargets = true;
if ( useMorphNormals ) cachedMaterial.morphNormals = true;
this.cache.add( cacheKey, cachedMaterial );
}
material = cachedMaterial;
}
// workarounds for mesh and geometry
if ( material.aoMap && geometry.attributes.uv2 === undefined && geometry.attributes.uv !== undefined ) {
geometry.setAttribute( 'uv2', geometry.attributes.uv );
}
// https://github.com/mrdoob/three.js/issues/11438#issuecomment-507003995
if ( material.normalScale && ! useVertexTangents ) {
material.normalScale.y = - material.normalScale.y;
}
if ( material.clearcoatNormalScale && ! useVertexTangents ) {
material.clearcoatNormalScale.y = - material.clearcoatNormalScale.y;
}
mesh.material = material;
};
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#materials
* @param {number} materialIndex
* @return {Promise<Material>}
*/
GLTFParser.prototype.loadMaterial = function ( materialIndex ) {
var parser = this;
var json = this.json;
var extensions = this.extensions;
var materialDef = json.materials[ materialIndex ];
var materialType;
var materialParams = {};
var materialExtensions = materialDef.extensions || {};
var pending = [];
if ( materialExtensions[ EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS ] ) {
var sgExtension = extensions[ EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS ];
materialType = sgExtension.getMaterialType();
pending.push( sgExtension.extendParams( materialParams, materialDef, parser ) );
} else if ( materialExtensions[ EXTENSIONS.KHR_MATERIALS_UNLIT ] ) {
var kmuExtension = extensions[ EXTENSIONS.KHR_MATERIALS_UNLIT ];
materialType = kmuExtension.getMaterialType();
pending.push( kmuExtension.extendParams( materialParams, materialDef, parser ) );
} else {
// Specification:
// https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#metallic-roughness-material
materialType = MeshStandardMaterial;
var metallicRoughness = materialDef.pbrMetallicRoughness || {};
materialParams.color = new Color( 1.0, 1.0, 1.0 );
materialParams.opacity = 1.0;
if ( Array.isArray( metallicRoughness.baseColorFactor ) ) {
var array = metallicRoughness.baseColorFactor;
materialParams.color.fromArray( array );
materialParams.opacity = array[ 3 ];
}
if ( metallicRoughness.baseColorTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'map', metallicRoughness.baseColorTexture ) );
}
materialParams.metalness = metallicRoughness.metallicFactor !== undefined ? metallicRoughness.metallicFactor : 1.0;
materialParams.roughness = metallicRoughness.roughnessFactor !== undefined ? metallicRoughness.roughnessFactor : 1.0;
if ( metallicRoughness.metallicRoughnessTexture !== undefined ) {
pending.push( parser.assignTexture( materialParams, 'metalnessMap', metallicRoughness.metallicRoughnessTexture ) );
pending.push( parser.assignTexture( materialParams, 'roughnessMap', metallicRoughness.metallicRoughnessTexture ) );
}
}
if ( materialDef.doubleSided === true ) {
materialParams.side = DoubleSide;
}
var alphaMode = materialDef.alphaMode || ALPHA_MODES.OPAQUE;
if ( alphaMode === ALPHA_MODES.BLEND ) {
materialParams.transparent = true;
// See: https://github.com/mrdoob/three.js/issues/17706
materialParams.depthWrite = false;
} else {
materialParams.transparent = false;
if ( alphaMode === ALPHA_MODES.MASK ) {
materialParams.alphaTest = materialDef.alphaCutoff !== undefined ? materialDef.alphaCutoff : 0.5;
}
}
if ( materialDef.normalTexture !== undefined && materialType !== MeshBasicMaterial ) {
pending.push( parser.assignTexture( materialParams, 'normalMap', materialDef.normalTexture ) );
materialParams.normalScale = new Vector2( 1, 1 );
if ( materialDef.normalTexture.scale !== undefined ) {
materialParams.normalScale.set( materialDef.normalTexture.scale, materialDef.normalTexture.scale );
}
}
if ( materialDef.occlusionTexture !== undefined && materialType !== MeshBasicMaterial ) {
pending.push( parser.assignTexture( materialParams, 'aoMap', materialDef.occlusionTexture ) );
if ( materialDef.occlusionTexture.strength !== undefined ) {
materialParams.aoMapIntensity = materialDef.occlusionTexture.strength;
}
}
if ( materialDef.emissiveFactor !== undefined && materialType !== MeshBasicMaterial ) {
materialParams.emissive = new Color().fromArray( materialDef.emissiveFactor );
}
if ( materialDef.emissiveTexture !== undefined && materialType !== MeshBasicMaterial ) {
pending.push( parser.assignTexture( materialParams, 'emissiveMap', materialDef.emissiveTexture ) );
}
if ( materialExtensions[ EXTENSIONS.KHR_MATERIALS_CLEARCOAT ] ) {
var clearcoatExtension = extensions[ EXTENSIONS.KHR_MATERIALS_CLEARCOAT ];
materialType = clearcoatExtension.getMaterialType();
pending.push( clearcoatExtension.extendParams( materialParams, { extensions: materialExtensions }, parser ) );
}
return Promise.all( pending ).then( function () {
var material;
if ( materialType === GLTFMeshStandardSGMaterial ) {
material = extensions[ EXTENSIONS.KHR_MATERIALS_PBR_SPECULAR_GLOSSINESS ].createMaterial( materialParams );
} else {
material = new materialType( materialParams );
}
if ( materialDef.name ) material.name = materialDef.name;
// baseColorTexture, emissiveTexture, and specularGlossinessTexture use sRGB encoding.
if ( material.map ) material.map.encoding = sRGBEncoding;
if ( material.emissiveMap ) material.emissiveMap.encoding = sRGBEncoding;
assignExtrasToUserData( material, materialDef );
if ( materialDef.extensions ) addUnknownExtensionsToUserData( extensions, material, materialDef );
return material;
} );
};
/**
* @param {BufferGeometry} geometry
* @param {GLTF.Primitive} primitiveDef
* @param {GLTFParser} parser
*/
function computeBounds( geometry, primitiveDef, parser ) {
var attributes = primitiveDef.attributes;
var box = new Box3();
if ( attributes.POSITION !== undefined ) {
var accessor = parser.json.accessors[ attributes.POSITION ];
var min = accessor.min;
var max = accessor.max;
// glTF requires 'min' and 'max', but VRM (which extends glTF) currently ignores that requirement.
if ( min !== undefined && max !== undefined ) {
box.set(
new Vector3( min[ 0 ], min[ 1 ], min[ 2 ] ),
new Vector3( max[ 0 ], max[ 1 ], max[ 2 ] ) );
} else {
console.warn( 'THREE.GLTFLoader: Missing min/max properties for accessor POSITION.' );
return;
}
} else {
return;
}
var targets = primitiveDef.targets;
if ( targets !== undefined ) {
var maxDisplacement = new Vector3();
var vector = new Vector3();
for ( var i = 0, il = targets.length; i < il; i ++ ) {
var target = targets[ i ];
if ( target.POSITION !== undefined ) {
var accessor = parser.json.accessors[ target.POSITION ];
var min = accessor.min;
var max = accessor.max;
// glTF requires 'min' and 'max', but VRM (which extends glTF) currently ignores that requirement.
if ( min !== undefined && max !== undefined ) {
// we need to get max of absolute components because target weight is [-1,1]
vector.setX( Math.max( Math.abs( min[ 0 ] ), Math.abs( max[ 0 ] ) ) );
vector.setY( Math.max( Math.abs( min[ 1 ] ), Math.abs( max[ 1 ] ) ) );
vector.setZ( Math.max( Math.abs( min[ 2 ] ), Math.abs( max[ 2 ] ) ) );
// Note: this assumes that the sum of all weights is at most 1. This isn't quite correct - it's more conservative
// to assume that each target can have a max weight of 1. However, for some use cases - notably, when morph targets
// are used to implement key-frame animations and as such only two are active at a time - this results in very large
// boxes. So for now we make a box that's sometimes a touch too small but is hopefully mostly of reasonable size.
maxDisplacement.max( vector );
} else {
console.warn( 'THREE.GLTFLoader: Missing min/max properties for accessor POSITION.' );
}
}
}
// As per comment above this box isn't conservative, but has a reasonable size for a very large number of morph targets.
box.expandByVector( maxDisplacement );
}
geometry.boundingBox = box;
var sphere = new Sphere();
box.getCenter( sphere.center );
sphere.radius = box.min.distanceTo( box.max ) / 2;
geometry.boundingSphere = sphere;
}
/**
* @param {BufferGeometry} geometry
* @param {GLTF.Primitive} primitiveDef
* @param {GLTFParser} parser
* @return {Promise<BufferGeometry>}
*/
function addPrimitiveAttributes( geometry, primitiveDef, parser ) {
var attributes = primitiveDef.attributes;
var pending = [];
function assignAttributeAccessor( accessorIndex, attributeName ) {
return parser.getDependency( 'accessor', accessorIndex )
.then( function ( accessor ) {
geometry.setAttribute( attributeName, accessor );
} );
}
for ( var gltfAttributeName in attributes ) {
var threeAttributeName = ATTRIBUTES[ gltfAttributeName ] || gltfAttributeName.toLowerCase();
// Skip attributes already provided by e.g. Draco extension.
if ( threeAttributeName in geometry.attributes ) continue;
pending.push( assignAttributeAccessor( attributes[ gltfAttributeName ], threeAttributeName ) );
}
if ( primitiveDef.indices !== undefined && ! geometry.index ) {
var accessor = parser.getDependency( 'accessor', primitiveDef.indices ).then( function ( accessor ) {
geometry.setIndex( accessor );
} );
pending.push( accessor );
}
assignExtrasToUserData( geometry, primitiveDef );
computeBounds( geometry, primitiveDef, parser );
return Promise.all( pending ).then( function () {
return primitiveDef.targets !== undefined
? addMorphTargets( geometry, primitiveDef.targets, parser )
: geometry;
} );
}
/**
* @param {BufferGeometry} geometry
* @param {Number} drawMode
* @return {BufferGeometry}
*/
function toTrianglesDrawMode( geometry, drawMode ) {
var index = geometry.getIndex();
// generate index if not present
if ( index === null ) {
var indices = [];
var position = geometry.getAttribute( 'position' );
if ( position !== undefined ) {
for ( var i = 0; i < position.count; i ++ ) {
indices.push( i );
}
geometry.setIndex( indices );
index = geometry.getIndex();
} else {
console.error( 'THREE.GLTFLoader.toTrianglesDrawMode(): Undefined position attribute. Processing not possible.' );
return geometry;
}
}
//
var numberOfTriangles = index.count - 2;
var newIndices = [];
if ( drawMode === TriangleFanDrawMode ) {
// gl.TRIANGLE_FAN
for ( var i = 1; i <= numberOfTriangles; i ++ ) {
newIndices.push( index.getX( 0 ) );
newIndices.push( index.getX( i ) );
newIndices.push( index.getX( i + 1 ) );
}
} else {
// gl.TRIANGLE_STRIP
for ( var i = 0; i < numberOfTriangles; i ++ ) {
if ( i % 2 === 0 ) {
newIndices.push( index.getX( i ) );
newIndices.push( index.getX( i + 1 ) );
newIndices.push( index.getX( i + 2 ) );
} else {
newIndices.push( index.getX( i + 2 ) );
newIndices.push( index.getX( i + 1 ) );
newIndices.push( index.getX( i ) );
}
}
}
if ( ( newIndices.length / 3 ) !== numberOfTriangles ) {
console.error( 'THREE.GLTFLoader.toTrianglesDrawMode(): Unable to generate correct amount of triangles.' );
}
// build final geometry
var newGeometry = geometry.clone();
newGeometry.setIndex( newIndices );
return newGeometry;
}
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#geometry
*
* Creates BufferGeometries from primitives.
*
* @param {Array<GLTF.Primitive>} primitives
* @return {Promise<Array<BufferGeometry>>}
*/
GLTFParser.prototype.loadGeometries = function ( primitives ) {
var parser = this;
var extensions = this.extensions;
var cache = this.primitiveCache;
function createDracoPrimitive( primitive ) {
return extensions[ EXTENSIONS.KHR_DRACO_MESH_COMPRESSION ]
.decodePrimitive( primitive, parser )
.then( function ( geometry ) {
return addPrimitiveAttributes( geometry, primitive, parser );
} );
}
var pending = [];
for ( var i = 0, il = primitives.length; i < il; i ++ ) {
var primitive = primitives[ i ];
var cacheKey = createPrimitiveKey( primitive );
// See if we've already created this geometry
var cached = cache[ cacheKey ];
if ( cached ) {
// Use the cached geometry if it exists
pending.push( cached.promise );
} else {
var geometryPromise;
if ( primitive.extensions && primitive.extensions[ EXTENSIONS.KHR_DRACO_MESH_COMPRESSION ] ) {
// Use DRACO geometry if available
geometryPromise = createDracoPrimitive( primitive );
} else {
// Otherwise create a new geometry
geometryPromise = addPrimitiveAttributes( new BufferGeometry(), primitive, parser );
}
// Cache this geometry
cache[ cacheKey ] = { primitive: primitive, promise: geometryPromise };
pending.push( geometryPromise );
}
}
return Promise.all( pending );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#meshes
* @param {number} meshIndex
* @return {Promise<Group|Mesh|SkinnedMesh>}
*/
GLTFParser.prototype.loadMesh = function ( meshIndex ) {
var parser = this;
var json = this.json;
var meshDef = json.meshes[ meshIndex ];
var primitives = meshDef.primitives;
var pending = [];
for ( var i = 0, il = primitives.length; i < il; i ++ ) {
var material = primitives[ i ].material === undefined
? createDefaultMaterial( this.cache )
: this.getDependency( 'material', primitives[ i ].material );
pending.push( material );
}
pending.push( parser.loadGeometries( primitives ) );
return Promise.all( pending ).then( function ( results ) {
var materials = results.slice( 0, results.length - 1 );
var geometries = results[ results.length - 1 ];
var meshes = [];
for ( var i = 0, il = geometries.length; i < il; i ++ ) {
var geometry = geometries[ i ];
var primitive = primitives[ i ];
// 1. create Mesh
var mesh;
var material = materials[ i ];
if ( primitive.mode === WEBGL_CONSTANTS.TRIANGLES ||
primitive.mode === WEBGL_CONSTANTS.TRIANGLE_STRIP ||
primitive.mode === WEBGL_CONSTANTS.TRIANGLE_FAN ||
primitive.mode === undefined ) {
// .isSkinnedMesh isn't in glTF spec. See .markDefs()
mesh = meshDef.isSkinnedMesh === true
? new SkinnedMesh( geometry, material )
: new Mesh( geometry, material );
if ( mesh.isSkinnedMesh === true && ! mesh.geometry.attributes.skinWeight.normalized ) {
// we normalize floating point skin weight array to fix malformed assets (see #15319)
// it's important to skip this for non-float32 data since normalizeSkinWeights assumes non-normalized inputs
mesh.normalizeSkinWeights();
}
if ( primitive.mode === WEBGL_CONSTANTS.TRIANGLE_STRIP ) {
mesh.geometry = toTrianglesDrawMode( mesh.geometry, TriangleStripDrawMode );
} else if ( primitive.mode === WEBGL_CONSTANTS.TRIANGLE_FAN ) {
mesh.geometry = toTrianglesDrawMode( mesh.geometry, TriangleFanDrawMode );
}
} else if ( primitive.mode === WEBGL_CONSTANTS.LINES ) {
mesh = new LineSegments( geometry, material );
} else if ( primitive.mode === WEBGL_CONSTANTS.LINE_STRIP ) {
mesh = new Line( geometry, material );
} else if ( primitive.mode === WEBGL_CONSTANTS.LINE_LOOP ) {
mesh = new LineLoop( geometry, material );
} else if ( primitive.mode === WEBGL_CONSTANTS.POINTS ) {
mesh = new Points( geometry, material );
} else {
throw new Error( 'THREE.GLTFLoader: Primitive mode unsupported: ' + primitive.mode );
}
if ( Object.keys( mesh.geometry.morphAttributes ).length > 0 ) {
updateMorphTargets( mesh, meshDef );
}
mesh.name = meshDef.name || ( 'mesh_' + meshIndex );
if ( geometries.length > 1 ) mesh.name += '_' + i;
assignExtrasToUserData( mesh, meshDef );
parser.assignFinalMaterial( mesh );
meshes.push( mesh );
}
if ( meshes.length === 1 ) {
return meshes[ 0 ];
}
var group = new Group();
for ( var i = 0, il = meshes.length; i < il; i ++ ) {
group.add( meshes[ i ] );
}
return group;
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#cameras
* @param {number} cameraIndex
* @return {Promise<THREE.Camera>}
*/
GLTFParser.prototype.loadCamera = function ( cameraIndex ) {
var camera;
var cameraDef = this.json.cameras[ cameraIndex ];
var params = cameraDef[ cameraDef.type ];
if ( ! params ) {
console.warn( 'THREE.GLTFLoader: Missing camera parameters.' );
return;
}
if ( cameraDef.type === 'perspective' ) {
camera = new PerspectiveCamera( MathUtils.radToDeg( params.yfov ), params.aspectRatio || 1, params.znear || 1, params.zfar || 2e6 );
} else if ( cameraDef.type === 'orthographic' ) {
camera = new OrthographicCamera( - params.xmag, params.xmag, params.ymag, - params.ymag, params.znear, params.zfar );
}
if ( cameraDef.name ) camera.name = cameraDef.name;
assignExtrasToUserData( camera, cameraDef );
return Promise.resolve( camera );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#skins
* @param {number} skinIndex
* @return {Promise<Object>}
*/
GLTFParser.prototype.loadSkin = function ( skinIndex ) {
var skinDef = this.json.skins[ skinIndex ];
var skinEntry = { joints: skinDef.joints };
if ( skinDef.inverseBindMatrices === undefined ) {
return Promise.resolve( skinEntry );
}
return this.getDependency( 'accessor', skinDef.inverseBindMatrices ).then( function ( accessor ) {
skinEntry.inverseBindMatrices = accessor;
return skinEntry;
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#animations
* @param {number} animationIndex
* @return {Promise<AnimationClip>}
*/
GLTFParser.prototype.loadAnimation = function ( animationIndex ) {
var json = this.json;
var animationDef = json.animations[ animationIndex ];
var pendingNodes = [];
var pendingInputAccessors = [];
var pendingOutputAccessors = [];
var pendingSamplers = [];
var pendingTargets = [];
for ( var i = 0, il = animationDef.channels.length; i < il; i ++ ) {
var channel = animationDef.channels[ i ];
var sampler = animationDef.samplers[ channel.sampler ];
var target = channel.target;
var name = target.node !== undefined ? target.node : target.id; // NOTE: target.id is deprecated.
var input = animationDef.parameters !== undefined ? animationDef.parameters[ sampler.input ] : sampler.input;
var output = animationDef.parameters !== undefined ? animationDef.parameters[ sampler.output ] : sampler.output;
pendingNodes.push( this.getDependency( 'node', name ) );
pendingInputAccessors.push( this.getDependency( 'accessor', input ) );
pendingOutputAccessors.push( this.getDependency( 'accessor', output ) );
pendingSamplers.push( sampler );
pendingTargets.push( target );
}
return Promise.all( [
Promise.all( pendingNodes ),
Promise.all( pendingInputAccessors ),
Promise.all( pendingOutputAccessors ),
Promise.all( pendingSamplers ),
Promise.all( pendingTargets )
] ).then( function ( dependencies ) {
var nodes = dependencies[ 0 ];
var inputAccessors = dependencies[ 1 ];
var outputAccessors = dependencies[ 2 ];
var samplers = dependencies[ 3 ];
var targets = dependencies[ 4 ];
var tracks = [];
for ( var i = 0, il = nodes.length; i < il; i ++ ) {
var node = nodes[ i ];
var inputAccessor = inputAccessors[ i ];
var outputAccessor = outputAccessors[ i ];
var sampler = samplers[ i ];
var target = targets[ i ];
if ( node === undefined ) continue;
node.updateMatrix();
node.matrixAutoUpdate = true;
var TypedKeyframeTrack;
switch ( PATH_PROPERTIES[ target.path ] ) {
case PATH_PROPERTIES.weights:
TypedKeyframeTrack = NumberKeyframeTrack;
break;
case PATH_PROPERTIES.rotation:
TypedKeyframeTrack = QuaternionKeyframeTrack;
break;
case PATH_PROPERTIES.position:
case PATH_PROPERTIES.scale:
default:
TypedKeyframeTrack = VectorKeyframeTrack;
break;
}
var targetName = node.name ? node.name : node.uuid;
var interpolation = sampler.interpolation !== undefined ? INTERPOLATION[ sampler.interpolation ] : InterpolateLinear;
var targetNames = [];
if ( PATH_PROPERTIES[ target.path ] === PATH_PROPERTIES.weights ) {
// Node may be a Group (glTF mesh with several primitives) or a Mesh.
node.traverse( function ( object ) {
if ( object.isMesh === true && object.morphTargetInfluences ) {
targetNames.push( object.name ? object.name : object.uuid );
}
} );
} else {
targetNames.push( targetName );
}
var outputArray = outputAccessor.array;
if ( outputAccessor.normalized ) {
var scale;
if ( outputArray.constructor === Int8Array ) {
scale = 1 / 127;
} else if ( outputArray.constructor === Uint8Array ) {
scale = 1 / 255;
} else if ( outputArray.constructor == Int16Array ) {
scale = 1 / 32767;
} else if ( outputArray.constructor === Uint16Array ) {
scale = 1 / 65535;
} else {
throw new Error( 'THREE.GLTFLoader: Unsupported output accessor component type.' );
}
var scaled = new Float32Array( outputArray.length );
for ( var j = 0, jl = outputArray.length; j < jl; j ++ ) {
scaled[ j ] = outputArray[ j ] * scale;
}
outputArray = scaled;
}
for ( var j = 0, jl = targetNames.length; j < jl; j ++ ) {
var track = new TypedKeyframeTrack(
targetNames[ j ] + '.' + PATH_PROPERTIES[ target.path ],
inputAccessor.array,
outputArray,
interpolation
);
// Override interpolation with custom factory method.
if ( sampler.interpolation === 'CUBICSPLINE' ) {
track.createInterpolant = function InterpolantFactoryMethodGLTFCubicSpline( result ) {
// A CUBICSPLINE keyframe in glTF has three output values for each input value,
// representing inTangent, splineVertex, and outTangent. As a result, track.getValueSize()
// must be divided by three to get the interpolant's sampleSize argument.
return new GLTFCubicSplineInterpolant( this.times, this.values, this.getValueSize() / 3, result );
};
// Mark as CUBICSPLINE. `track.getInterpolation()` doesn't support custom interpolants.
track.createInterpolant.isInterpolantFactoryMethodGLTFCubicSpline = true;
}
tracks.push( track );
}
}
var name = animationDef.name ? animationDef.name : 'animation_' + animationIndex;
return new AnimationClip( name, undefined, tracks );
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#nodes-and-hierarchy
* @param {number} nodeIndex
* @return {Promise<Object3D>}
*/
GLTFParser.prototype.loadNode = function ( nodeIndex ) {
var json = this.json;
var extensions = this.extensions;
var parser = this;
var meshReferences = json.meshReferences;
var meshUses = json.meshUses;
var nodeDef = json.nodes[ nodeIndex ];
return ( function () {
var pending = [];
if ( nodeDef.mesh !== undefined ) {
pending.push( parser.getDependency( 'mesh', nodeDef.mesh ).then( function ( mesh ) {
var node;
if ( meshReferences[ nodeDef.mesh ] > 1 ) {
var instanceNum = meshUses[ nodeDef.mesh ] ++;
node = mesh.clone();
node.name += '_instance_' + instanceNum;
} else {
node = mesh;
}
// if weights are provided on the node, override weights on the mesh.
if ( nodeDef.weights !== undefined ) {
node.traverse( function ( o ) {
if ( ! o.isMesh ) return;
for ( var i = 0, il = nodeDef.weights.length; i < il; i ++ ) {
o.morphTargetInfluences[ i ] = nodeDef.weights[ i ];
}
} );
}
return node;
} ) );
}
if ( nodeDef.camera !== undefined ) {
pending.push( parser.getDependency( 'camera', nodeDef.camera ) );
}
if ( nodeDef.extensions
&& nodeDef.extensions[ EXTENSIONS.KHR_LIGHTS_PUNCTUAL ]
&& nodeDef.extensions[ EXTENSIONS.KHR_LIGHTS_PUNCTUAL ].light !== undefined ) {
pending.push( parser.getDependency( 'light', nodeDef.extensions[ EXTENSIONS.KHR_LIGHTS_PUNCTUAL ].light ) );
}
return Promise.all( pending );
}() ).then( function ( objects ) {
var node;
// .isBone isn't in glTF spec. See .markDefs
if ( nodeDef.isBone === true ) {
node = new Bone();
} else if ( objects.length > 1 ) {
node = new Group();
} else if ( objects.length === 1 ) {
node = objects[ 0 ];
} else {
node = new Object3D();
}
if ( node !== objects[ 0 ] ) {
for ( var i = 0, il = objects.length; i < il; i ++ ) {
node.add( objects[ i ] );
}
}
if ( nodeDef.name ) {
node.userData.name = nodeDef.name;
node.name = PropertyBinding.sanitizeNodeName( nodeDef.name );
}
assignExtrasToUserData( node, nodeDef );
if ( nodeDef.extensions ) addUnknownExtensionsToUserData( extensions, node, nodeDef );
if ( nodeDef.matrix !== undefined ) {
var matrix = new Matrix4();
matrix.fromArray( nodeDef.matrix );
node.applyMatrix4( matrix );
} else {
if ( nodeDef.translation !== undefined ) {
node.position.fromArray( nodeDef.translation );
}
if ( nodeDef.rotation !== undefined ) {
node.quaternion.fromArray( nodeDef.rotation );
}
if ( nodeDef.scale !== undefined ) {
node.scale.fromArray( nodeDef.scale );
}
}
return node;
} );
};
/**
* Specification: https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#scenes
* @param {number} sceneIndex
* @return {Promise<Group>}
*/
GLTFParser.prototype.loadScene = function () {
// scene node hierachy builder
function buildNodeHierachy( nodeId, parentObject, json, parser ) {
var nodeDef = json.nodes[ nodeId ];
return parser.getDependency( 'node', nodeId ).then( function ( node ) {
if ( nodeDef.skin === undefined ) return node;
// build skeleton here as well
var skinEntry;
return parser.getDependency( 'skin', nodeDef.skin ).then( function ( skin ) {
skinEntry = skin;
var pendingJoints = [];
for ( var i = 0, il = skinEntry.joints.length; i < il; i ++ ) {
pendingJoints.push( parser.getDependency( 'node', skinEntry.joints[ i ] ) );
}
return Promise.all( pendingJoints );
} ).then( function ( jointNodes ) {
node.traverse( function ( mesh ) {
if ( ! mesh.isMesh ) return;
var bones = [];
var boneInverses = [];
for ( var j = 0, jl = jointNodes.length; j < jl; j ++ ) {
var jointNode = jointNodes[ j ];
if ( jointNode ) {
bones.push( jointNode );
var mat = new Matrix4();
if ( skinEntry.inverseBindMatrices !== undefined ) {
mat.fromArray( skinEntry.inverseBindMatrices.array, j * 16 );
}
boneInverses.push( mat );
} else {
console.warn( 'THREE.GLTFLoader: Joint "%s" could not be found.', skinEntry.joints[ j ] );
}
}
mesh.bind( new Skeleton( bones, boneInverses ), mesh.matrixWorld );
} );
return node;
} );
} ).then( function ( node ) {
// build node hierachy
parentObject.add( node );
var pending = [];
if ( nodeDef.children ) {
var children = nodeDef.children;
for ( var i = 0, il = children.length; i < il; i ++ ) {
var child = children[ i ];
pending.push( buildNodeHierachy( child, node, json, parser ) );
}
}
return Promise.all( pending );
} );
}
return function loadScene( sceneIndex ) {
var json = this.json;
var extensions = this.extensions;
var sceneDef = this.json.scenes[ sceneIndex ];
var parser = this;
// Loader returns Group, not Scene.
// See: https://github.com/mrdoob/three.js/issues/18342#issuecomment-578981172
var scene = new Group();
if ( sceneDef.name ) scene.name = sceneDef.name;
assignExtrasToUserData( scene, sceneDef );
if ( sceneDef.extensions ) addUnknownExtensionsToUserData( extensions, scene, sceneDef );
var nodeIds = sceneDef.nodes || [];
var pending = [];
for ( var i = 0, il = nodeIds.length; i < il; i ++ ) {
pending.push( buildNodeHierachy( nodeIds[ i ], scene, json, parser ) );
}
return Promise.all( pending ).then( function () {
return scene;
} );
};
}();
return GLTFLoader;
} )()
Example #22
Source File: BlurPass.js From three-viewer with MIT License | 4 votes |
/**
* Constructs a new blur pass.
*
* @param {Object} [options] - The options.
* @param {Number} [options.resolutionScale=0.5] - Deprecated. Adjust the height or width instead for consistent results.
* @param {Number} [options.width=Resizer.AUTO_SIZE] - The blur render width.
* @param {Number} [options.height=Resizer.AUTO_SIZE] - The blur render height.
* @param {KernelSize} [options.kernelSize=KernelSize.LARGE] - The blur kernel size.
*/
constructor({
resolutionScale = 0.5,
width = Resizer.AUTO_SIZE,
height = Resizer.AUTO_SIZE,
kernelSize = KernelSize.LARGE
} = {}) {
super("BlurPass");
/**
* A render target.
*
* @type {WebGLRenderTarget}
* @private
*/
this.renderTargetA = new WebGLRenderTarget(1, 1, {
minFilter: LinearFilter,
magFilter: LinearFilter,
stencilBuffer: false,
depthBuffer: false
});
this.renderTargetA.texture.name = "Blur.Target.A";
/**
* A second render target.
*
* @type {WebGLRenderTarget}
* @private
*/
this.renderTargetB = this.renderTargetA.clone();
this.renderTargetB.texture.name = "Blur.Target.B";
/**
* The desired render resolution.
*
* It's recommended to set the height or the width to an absolute value for
* consistent results across different devices and resolutions.
*
* Use {@link Resizer.AUTO_SIZE} for the width or height to automatically
* calculate it based on its counterpart and the original aspect ratio.
*
* @type {Resizer}
*/
this.resolution = new Resizer(this, width, height);
this.resolution.scale = resolutionScale;
/**
* A convolution shader material.
*
* @type {ConvolutionMaterial}
* @private
*/
this.convolutionMaterial = new ConvolutionMaterial();
/**
* A convolution shader material that uses dithering.
*
* @type {ConvolutionMaterial}
* @private
*/
this.ditheredConvolutionMaterial = new ConvolutionMaterial();
this.ditheredConvolutionMaterial.dithering = true;
/**
* Whether the blurred result should also be dithered using noise.
*
* @type {Boolean}
* @deprecated Set the frameBufferType of the EffectComposer to HalfFloatType instead.
*/
this.dithering = false;
this.kernelSize = kernelSize;
}
Example #23
Source File: 3MFLoader.js From canvas with Apache License 2.0 | 4 votes |
ThreeMFLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
constructor: ThreeMFLoader,
load: function ( url, onLoad, onProgress, onError ) {
var scope = this;
var loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setResponseType( 'arraybuffer' );
loader.load( url, function ( buffer ) {
try {
onLoad( scope.parse( buffer ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
},
parse: function ( data ) {
var scope = this;
var textureLoader = new TextureLoader( this.manager );
function loadDocument( data ) {
var zip = null;
var file = null;
var relsName;
var modelRelsName;
var modelPartNames = [];
var printTicketPartNames = [];
var texturesPartNames = [];
var otherPartNames = [];
var rels;
var modelRels;
var modelParts = {};
var printTicketParts = {};
var texturesParts = {};
var otherParts = {};
try {
zip = new JSZip( data );
} catch ( e ) {
if ( e instanceof ReferenceError ) {
console.error( 'THREE.3MFLoader: jszip missing and file is compressed.' );
return null;
}
}
for ( file in zip.files ) {
if ( file.match( /\_rels\/.rels$/ ) ) {
relsName = file;
} else if ( file.match( /3D\/_rels\/.*\.model\.rels$/ ) ) {
modelRelsName = file;
} else if ( file.match( /^3D\/.*\.model$/ ) ) {
modelPartNames.push( file );
} else if ( file.match( /^3D\/Metadata\/.*\.xml$/ ) ) {
printTicketPartNames.push( file );
} else if ( file.match( /^3D\/Textures?\/.*/ ) ) {
texturesPartNames.push( file );
} else if ( file.match( /^3D\/Other\/.*/ ) ) {
otherPartNames.push( file );
}
}
//
var relsView = new Uint8Array( zip.file( relsName ).asArrayBuffer() );
var relsFileText = LoaderUtils.decodeText( relsView );
rels = parseRelsXml( relsFileText );
//
if ( modelRelsName ) {
var relsView = new Uint8Array( zip.file( modelRelsName ).asArrayBuffer() );
var relsFileText = LoaderUtils.decodeText( relsView );
modelRels = parseRelsXml( relsFileText );
}
//
for ( var i = 0; i < modelPartNames.length; i ++ ) {
var modelPart = modelPartNames[ i ];
var view = new Uint8Array( zip.file( modelPart ).asArrayBuffer() );
var fileText = LoaderUtils.decodeText( view );
var xmlData = new DOMParser().parseFromString( fileText, 'application/xml' );
if ( xmlData.documentElement.nodeName.toLowerCase() !== 'model' ) {
console.error( 'THREE.3MFLoader: Error loading 3MF - no 3MF document found: ', modelPart );
}
var modelNode = xmlData.querySelector( 'model' );
var extensions = {};
for ( var i = 0; i < modelNode.attributes.length; i ++ ) {
var attr = modelNode.attributes[ i ];
if ( attr.name.match( /^xmlns:(.+)$/ ) ) {
extensions[ attr.value ] = RegExp.$1;
}
}
var modelData = parseModelNode( modelNode );
modelData[ 'xml' ] = modelNode;
if ( 0 < Object.keys( extensions ).length ) {
modelData[ 'extensions' ] = extensions;
}
modelParts[ modelPart ] = modelData;
}
//
for ( var i = 0; i < texturesPartNames.length; i ++ ) {
var texturesPartName = texturesPartNames[ i ];
texturesParts[ texturesPartName ] = zip.file( texturesPartName ).asArrayBuffer();
}
return {
rels: rels,
modelRels: modelRels,
model: modelParts,
printTicket: printTicketParts,
texture: texturesParts,
other: otherParts
};
}
function parseRelsXml( relsFileText ) {
var relationships = [];
var relsXmlData = new DOMParser().parseFromString( relsFileText, 'application/xml' );
var relsNodes = relsXmlData.querySelectorAll( 'Relationship' );
for ( var i = 0; i < relsNodes.length; i ++ ) {
var relsNode = relsNodes[ i ];
var relationship = {
target: relsNode.getAttribute( 'Target' ), //required
id: relsNode.getAttribute( 'Id' ), //required
type: relsNode.getAttribute( 'Type' ) //required
};
relationships.push( relationship );
}
return relationships;
}
function parseMetadataNodes( metadataNodes ) {
var metadataData = {};
for ( var i = 0; i < metadataNodes.length; i ++ ) {
var metadataNode = metadataNodes[ i ];
var name = metadataNode.getAttribute( 'name' );
var validNames = [
'Title',
'Designer',
'Description',
'Copyright',
'LicenseTerms',
'Rating',
'CreationDate',
'ModificationDate'
];
if ( 0 <= validNames.indexOf( name ) ) {
metadataData[ name ] = metadataNode.textContent;
}
}
return metadataData;
}
function parseBasematerialsNode( basematerialsNode ) {
var basematerialsData = {
id: basematerialsNode.getAttribute( 'id' ), // required
basematerials: []
};
var basematerialNodes = basematerialsNode.querySelectorAll( 'base' );
for ( var i = 0; i < basematerialNodes.length; i ++ ) {
var basematerialNode = basematerialNodes[ i ];
var basematerialData = parseBasematerialNode( basematerialNode );
basematerialData.index = i; // the order and count of the material nodes form an implicit 0-based index
basematerialsData.basematerials.push( basematerialData );
}
return basematerialsData;
}
function parseTexture2DNode( texture2DNode ) {
var texture2dData = {
id: texture2DNode.getAttribute( 'id' ), // required
path: texture2DNode.getAttribute( 'path' ), // required
contenttype: texture2DNode.getAttribute( 'contenttype' ), // required
tilestyleu: texture2DNode.getAttribute( 'tilestyleu' ),
tilestylev: texture2DNode.getAttribute( 'tilestylev' ),
filter: texture2DNode.getAttribute( 'filter' ),
};
return texture2dData;
}
function parseTextures2DGroupNode( texture2DGroupNode ) {
var texture2DGroupData = {
id: texture2DGroupNode.getAttribute( 'id' ), // required
texid: texture2DGroupNode.getAttribute( 'texid' ), // required
displaypropertiesid: texture2DGroupNode.getAttribute( 'displaypropertiesid' )
};
var tex2coordNodes = texture2DGroupNode.querySelectorAll( 'tex2coord' );
var uvs = [];
for ( var i = 0; i < tex2coordNodes.length; i ++ ) {
var tex2coordNode = tex2coordNodes[ i ];
var u = tex2coordNode.getAttribute( 'u' );
var v = tex2coordNode.getAttribute( 'v' );
uvs.push( parseFloat( u ), parseFloat( v ) );
}
texture2DGroupData[ 'uvs' ] = new Float32Array( uvs );
return texture2DGroupData;
}
function parseColorGroupNode( colorGroupNode ) {
var colorGroupData = {
id: colorGroupNode.getAttribute( 'id' ), // required
displaypropertiesid: colorGroupNode.getAttribute( 'displaypropertiesid' )
};
var colorNodes = colorGroupNode.querySelectorAll( 'color' );
var colors = [];
var colorObject = new Color();
for ( var i = 0; i < colorNodes.length; i ++ ) {
var colorNode = colorNodes[ i ];
var color = colorNode.getAttribute( 'color' );
colorObject.setStyle( color.substring( 0, 7 ) );
colorObject.convertSRGBToLinear(); // color is in sRGB
colors.push( colorObject.r, colorObject.g, colorObject.b );
}
colorGroupData[ 'colors' ] = new Float32Array( colors );
return colorGroupData;
}
function parseMetallicDisplaypropertiesNode( metallicDisplaypropetiesNode ) {
var metallicDisplaypropertiesData = {
id: metallicDisplaypropetiesNode.getAttribute( 'id' ) // required
};
var metallicNodes = metallicDisplaypropetiesNode.querySelectorAll( 'pbmetallic' );
var metallicData = [];
for ( var i = 0; i < metallicNodes.length; i ++ ) {
var metallicNode = metallicNodes[ i ];
metallicData.push( {
name: metallicNode.getAttribute( 'name' ), // required
metallicness: parseFloat( metallicNode.getAttribute( 'metallicness' ) ), // required
roughness: parseFloat( metallicNode.getAttribute( 'roughness' ) ) // required
} );
}
metallicDisplaypropertiesData.data = metallicData;
return metallicDisplaypropertiesData;
}
function parseBasematerialNode( basematerialNode ) {
var basematerialData = {};
basematerialData[ 'name' ] = basematerialNode.getAttribute( 'name' ); // required
basematerialData[ 'displaycolor' ] = basematerialNode.getAttribute( 'displaycolor' ); // required
basematerialData[ 'displaypropertiesid' ] = basematerialNode.getAttribute( 'displaypropertiesid' );
return basematerialData;
}
function parseMeshNode( meshNode ) {
var meshData = {};
var vertices = [];
var vertexNodes = meshNode.querySelectorAll( 'vertices vertex' );
for ( var i = 0; i < vertexNodes.length; i ++ ) {
var vertexNode = vertexNodes[ i ];
var x = vertexNode.getAttribute( 'x' );
var y = vertexNode.getAttribute( 'y' );
var z = vertexNode.getAttribute( 'z' );
vertices.push( parseFloat( x ), parseFloat( y ), parseFloat( z ) );
}
meshData[ 'vertices' ] = new Float32Array( vertices );
var triangleProperties = [];
var triangles = [];
var triangleNodes = meshNode.querySelectorAll( 'triangles triangle' );
for ( var i = 0; i < triangleNodes.length; i ++ ) {
var triangleNode = triangleNodes[ i ];
var v1 = triangleNode.getAttribute( 'v1' );
var v2 = triangleNode.getAttribute( 'v2' );
var v3 = triangleNode.getAttribute( 'v3' );
var p1 = triangleNode.getAttribute( 'p1' );
var p2 = triangleNode.getAttribute( 'p2' );
var p3 = triangleNode.getAttribute( 'p3' );
var pid = triangleNode.getAttribute( 'pid' );
var triangleProperty = {};
triangleProperty[ 'v1' ] = parseInt( v1, 10 );
triangleProperty[ 'v2' ] = parseInt( v2, 10 );
triangleProperty[ 'v3' ] = parseInt( v3, 10 );
triangles.push( triangleProperty[ 'v1' ], triangleProperty[ 'v2' ], triangleProperty[ 'v3' ] );
// optional
if ( p1 ) {
triangleProperty[ 'p1' ] = parseInt( p1, 10 );
}
if ( p2 ) {
triangleProperty[ 'p2' ] = parseInt( p2, 10 );
}
if ( p3 ) {
triangleProperty[ 'p3' ] = parseInt( p3, 10 );
}
if ( pid ) {
triangleProperty[ 'pid' ] = pid;
}
if ( 0 < Object.keys( triangleProperty ).length ) {
triangleProperties.push( triangleProperty );
}
}
meshData[ 'triangleProperties' ] = triangleProperties;
meshData[ 'triangles' ] = new Uint32Array( triangles );
return meshData;
}
function parseComponentsNode( componentsNode ) {
var components = [];
var componentNodes = componentsNode.querySelectorAll( 'component' );
for ( var i = 0; i < componentNodes.length; i ++ ) {
var componentNode = componentNodes[ i ];
var componentData = parseComponentNode( componentNode );
components.push( componentData );
}
return components;
}
function parseComponentNode( componentNode ) {
var componentData = {};
componentData[ 'objectId' ] = componentNode.getAttribute( 'objectid' ); // required
var transform = componentNode.getAttribute( 'transform' );
if ( transform ) {
componentData[ 'transform' ] = parseTransform( transform );
}
return componentData;
}
function parseTransform( transform ) {
var t = [];
transform.split( ' ' ).forEach( function ( s ) {
t.push( parseFloat( s ) );
} );
var matrix = new Matrix4();
matrix.set(
t[ 0 ], t[ 3 ], t[ 6 ], t[ 9 ],
t[ 1 ], t[ 4 ], t[ 7 ], t[ 10 ],
t[ 2 ], t[ 5 ], t[ 8 ], t[ 11 ],
0.0, 0.0, 0.0, 1.0
);
return matrix;
}
function parseObjectNode( objectNode ) {
var objectData = {
type: objectNode.getAttribute( 'type' )
};
var id = objectNode.getAttribute( 'id' );
if ( id ) {
objectData[ 'id' ] = id;
}
var pid = objectNode.getAttribute( 'pid' );
if ( pid ) {
objectData[ 'pid' ] = pid;
}
var pindex = objectNode.getAttribute( 'pindex' );
if ( pindex ) {
objectData[ 'pindex' ] = pindex;
}
var thumbnail = objectNode.getAttribute( 'thumbnail' );
if ( thumbnail ) {
objectData[ 'thumbnail' ] = thumbnail;
}
var partnumber = objectNode.getAttribute( 'partnumber' );
if ( partnumber ) {
objectData[ 'partnumber' ] = partnumber;
}
var name = objectNode.getAttribute( 'name' );
if ( name ) {
objectData[ 'name' ] = name;
}
var meshNode = objectNode.querySelector( 'mesh' );
if ( meshNode ) {
objectData[ 'mesh' ] = parseMeshNode( meshNode );
}
var componentsNode = objectNode.querySelector( 'components' );
if ( componentsNode ) {
objectData[ 'components' ] = parseComponentsNode( componentsNode );
}
return objectData;
}
function parseResourcesNode( resourcesNode ) {
var resourcesData = {};
resourcesData[ 'basematerials' ] = {};
var basematerialsNodes = resourcesNode.querySelectorAll( 'basematerials' );
for ( var i = 0; i < basematerialsNodes.length; i ++ ) {
var basematerialsNode = basematerialsNodes[ i ];
var basematerialsData = parseBasematerialsNode( basematerialsNode );
resourcesData[ 'basematerials' ][ basematerialsData[ 'id' ] ] = basematerialsData;
}
//
resourcesData[ 'texture2d' ] = {};
var textures2DNodes = resourcesNode.querySelectorAll( 'texture2d' );
for ( var i = 0; i < textures2DNodes.length; i ++ ) {
var textures2DNode = textures2DNodes[ i ];
var texture2DData = parseTexture2DNode( textures2DNode );
resourcesData[ 'texture2d' ][ texture2DData[ 'id' ] ] = texture2DData;
}
//
resourcesData[ 'colorgroup' ] = {};
var colorGroupNodes = resourcesNode.querySelectorAll( 'colorgroup' );
for ( var i = 0; i < colorGroupNodes.length; i ++ ) {
var colorGroupNode = colorGroupNodes[ i ];
var colorGroupData = parseColorGroupNode( colorGroupNode );
resourcesData[ 'colorgroup' ][ colorGroupData[ 'id' ] ] = colorGroupData;
}
//
resourcesData[ 'pbmetallicdisplayproperties' ] = {};
var pbmetallicdisplaypropertiesNodes = resourcesNode.querySelectorAll( 'pbmetallicdisplayproperties' );
for ( var i = 0; i < pbmetallicdisplaypropertiesNodes.length; i ++ ) {
var pbmetallicdisplaypropertiesNode = pbmetallicdisplaypropertiesNodes[ i ];
var pbmetallicdisplaypropertiesData = parseMetallicDisplaypropertiesNode( pbmetallicdisplaypropertiesNode );
resourcesData[ 'pbmetallicdisplayproperties' ][ pbmetallicdisplaypropertiesData[ 'id' ] ] = pbmetallicdisplaypropertiesData;
}
//
resourcesData[ 'texture2dgroup' ] = {};
var textures2DGroupNodes = resourcesNode.querySelectorAll( 'texture2dgroup' );
for ( var i = 0; i < textures2DGroupNodes.length; i ++ ) {
var textures2DGroupNode = textures2DGroupNodes[ i ];
var textures2DGroupData = parseTextures2DGroupNode( textures2DGroupNode );
resourcesData[ 'texture2dgroup' ][ textures2DGroupData[ 'id' ] ] = textures2DGroupData;
}
//
resourcesData[ 'object' ] = {};
var objectNodes = resourcesNode.querySelectorAll( 'object' );
for ( var i = 0; i < objectNodes.length; i ++ ) {
var objectNode = objectNodes[ i ];
var objectData = parseObjectNode( objectNode );
resourcesData[ 'object' ][ objectData[ 'id' ] ] = objectData;
}
return resourcesData;
}
function parseBuildNode( buildNode ) {
var buildData = [];
var itemNodes = buildNode.querySelectorAll( 'item' );
for ( var i = 0; i < itemNodes.length; i ++ ) {
var itemNode = itemNodes[ i ];
var buildItem = {
objectId: itemNode.getAttribute( 'objectid' )
};
var transform = itemNode.getAttribute( 'transform' );
if ( transform ) {
buildItem[ 'transform' ] = parseTransform( transform );
}
buildData.push( buildItem );
}
return buildData;
}
function parseModelNode( modelNode ) {
var modelData = { unit: modelNode.getAttribute( 'unit' ) || 'millimeter' };
var metadataNodes = modelNode.querySelectorAll( 'metadata' );
if ( metadataNodes ) {
modelData[ 'metadata' ] = parseMetadataNodes( metadataNodes );
}
var resourcesNode = modelNode.querySelector( 'resources' );
if ( resourcesNode ) {
modelData[ 'resources' ] = parseResourcesNode( resourcesNode );
}
var buildNode = modelNode.querySelector( 'build' );
if ( buildNode ) {
modelData[ 'build' ] = parseBuildNode( buildNode );
}
return modelData;
}
function buildTexture( texture2dgroup, objects, modelData, textureData ) {
var texid = texture2dgroup.texid;
var texture2ds = modelData.resources.texture2d;
var texture2d = texture2ds[ texid ];
if ( texture2d ) {
var data = textureData[ texture2d.path ];
var type = texture2d.contenttype;
var blob = new Blob( [ data ], { type: type } );
var sourceURI = URL.createObjectURL( blob );
var texture = textureLoader.load( sourceURI, function () {
URL.revokeObjectURL( sourceURI );
} );
texture.encoding = sRGBEncoding;
// texture parameters
switch ( texture2d.tilestyleu ) {
case 'wrap':
texture.wrapS = RepeatWrapping;
break;
case 'mirror':
texture.wrapS = MirroredRepeatWrapping;
break;
case 'none':
case 'clamp':
texture.wrapS = ClampToEdgeWrapping;
break;
default:
texture.wrapS = RepeatWrapping;
}
switch ( texture2d.tilestylev ) {
case 'wrap':
texture.wrapT = RepeatWrapping;
break;
case 'mirror':
texture.wrapT = MirroredRepeatWrapping;
break;
case 'none':
case 'clamp':
texture.wrapT = ClampToEdgeWrapping;
break;
default:
texture.wrapT = RepeatWrapping;
}
switch ( texture2d.filter ) {
case 'auto':
texture.magFilter = LinearFilter;
texture.minFilter = LinearMipmapLinearFilter;
break;
case 'linear':
texture.magFilter = LinearFilter;
texture.minFilter = LinearFilter;
break;
case 'nearest':
texture.magFilter = NearestFilter;
texture.minFilter = NearestFilter;
break;
default:
texture.magFilter = LinearFilter;
texture.minFilter = LinearMipmapLinearFilter;
}
return texture;
} else {
return null;
}
}
function buildBasematerialsMeshes( basematerials, triangleProperties, modelData, meshData, textureData, objectData ) {
var objectPindex = objectData.pindex;
var materialMap = {};
for ( var i = 0, l = triangleProperties.length; i < l; i ++ ) {
var triangleProperty = triangleProperties[ i ];
var pindex = ( triangleProperty.p1 !== undefined ) ? triangleProperty.p1 : objectPindex;
if ( materialMap[ pindex ] === undefined ) materialMap[ pindex ] = [];
materialMap[ pindex ].push( triangleProperty );
}
//
var keys = Object.keys( materialMap );
var meshes = [];
for ( var i = 0, l = keys.length; i < l; i ++ ) {
var materialIndex = keys[ i ];
var trianglePropertiesProps = materialMap[ materialIndex ];
var basematerialData = basematerials.basematerials[ materialIndex ];
var material = getBuild( basematerialData, objects, modelData, textureData, objectData, buildBasematerial );
//
var geometry = new BufferGeometry();
var positionData = [];
var vertices = meshData.vertices;
for ( var j = 0, jl = trianglePropertiesProps.length; j < jl; j ++ ) {
var triangleProperty = trianglePropertiesProps[ j ];
positionData.push( vertices[ ( triangleProperty.v1 * 3 ) + 0 ] );
positionData.push( vertices[ ( triangleProperty.v1 * 3 ) + 1 ] );
positionData.push( vertices[ ( triangleProperty.v1 * 3 ) + 2 ] );
positionData.push( vertices[ ( triangleProperty.v2 * 3 ) + 0 ] );
positionData.push( vertices[ ( triangleProperty.v2 * 3 ) + 1 ] );
positionData.push( vertices[ ( triangleProperty.v2 * 3 ) + 2 ] );
positionData.push( vertices[ ( triangleProperty.v3 * 3 ) + 0 ] );
positionData.push( vertices[ ( triangleProperty.v3 * 3 ) + 1 ] );
positionData.push( vertices[ ( triangleProperty.v3 * 3 ) + 2 ] );
}
geometry.setAttribute( 'position', new Float32BufferAttribute( positionData, 3 ) );
//
var mesh = new Mesh( geometry, material );
meshes.push( mesh );
}
return meshes;
}
function buildTexturedMesh( texture2dgroup, triangleProperties, modelData, meshData, textureData, objectData ) {
// geometry
var geometry = new BufferGeometry();
var positionData = [];
var uvData = [];
var vertices = meshData.vertices;
var uvs = texture2dgroup.uvs;
for ( var i = 0, l = triangleProperties.length; i < l; i ++ ) {
var triangleProperty = triangleProperties[ i ];
positionData.push( vertices[ ( triangleProperty.v1 * 3 ) + 0 ] );
positionData.push( vertices[ ( triangleProperty.v1 * 3 ) + 1 ] );
positionData.push( vertices[ ( triangleProperty.v1 * 3 ) + 2 ] );
positionData.push( vertices[ ( triangleProperty.v2 * 3 ) + 0 ] );
positionData.push( vertices[ ( triangleProperty.v2 * 3 ) + 1 ] );
positionData.push( vertices[ ( triangleProperty.v2 * 3 ) + 2 ] );
positionData.push( vertices[ ( triangleProperty.v3 * 3 ) + 0 ] );
positionData.push( vertices[ ( triangleProperty.v3 * 3 ) + 1 ] );
positionData.push( vertices[ ( triangleProperty.v3 * 3 ) + 2 ] );
//
uvData.push( uvs[ ( triangleProperty.p1 * 2 ) + 0 ] );
uvData.push( uvs[ ( triangleProperty.p1 * 2 ) + 1 ] );
uvData.push( uvs[ ( triangleProperty.p2 * 2 ) + 0 ] );
uvData.push( uvs[ ( triangleProperty.p2 * 2 ) + 1 ] );
uvData.push( uvs[ ( triangleProperty.p3 * 2 ) + 0 ] );
uvData.push( uvs[ ( triangleProperty.p3 * 2 ) + 1 ] );
}
geometry.setAttribute( 'position', new Float32BufferAttribute( positionData, 3 ) );
geometry.setAttribute( 'uv', new Float32BufferAttribute( uvData, 2 ) );
// material
var texture = getBuild( texture2dgroup, objects, modelData, textureData, objectData, buildTexture );
var material = new MeshPhongMaterial( { map: texture, flatShading: true } );
// mesh
var mesh = new Mesh( geometry, material );
return mesh;
}
function buildVertexColorMesh( colorgroup, triangleProperties, modelData, meshData ) {
// geometry
var geometry = new BufferGeometry();
var positionData = [];
var colorData = [];
var vertices = meshData.vertices;
var colors = colorgroup.colors;
for ( var i = 0, l = triangleProperties.length; i < l; i ++ ) {
var triangleProperty = triangleProperties[ i ];
var v1 = triangleProperty.v1;
var v2 = triangleProperty.v2;
var v3 = triangleProperty.v3;
positionData.push( vertices[ ( v1 * 3 ) + 0 ] );
positionData.push( vertices[ ( v1 * 3 ) + 1 ] );
positionData.push( vertices[ ( v1 * 3 ) + 2 ] );
positionData.push( vertices[ ( v2 * 3 ) + 0 ] );
positionData.push( vertices[ ( v2 * 3 ) + 1 ] );
positionData.push( vertices[ ( v2 * 3 ) + 2 ] );
positionData.push( vertices[ ( v3 * 3 ) + 0 ] );
positionData.push( vertices[ ( v3 * 3 ) + 1 ] );
positionData.push( vertices[ ( v3 * 3 ) + 2 ] );
//
var p1 = triangleProperty.p1;
var p2 = triangleProperty.p2;
var p3 = triangleProperty.p3;
colorData.push( colors[ ( p1 * 3 ) + 0 ] );
colorData.push( colors[ ( p1 * 3 ) + 1 ] );
colorData.push( colors[ ( p1 * 3 ) + 2 ] );
colorData.push( colors[ ( ( p2 || p1 ) * 3 ) + 0 ] );
colorData.push( colors[ ( ( p2 || p1 ) * 3 ) + 1 ] );
colorData.push( colors[ ( ( p2 || p1 ) * 3 ) + 2 ] );
colorData.push( colors[ ( ( p3 || p1 ) * 3 ) + 0 ] );
colorData.push( colors[ ( ( p3 || p1 ) * 3 ) + 1 ] );
colorData.push( colors[ ( ( p3 || p1 ) * 3 ) + 2 ] );
}
geometry.setAttribute( 'position', new Float32BufferAttribute( positionData, 3 ) );
geometry.setAttribute( 'color', new Float32BufferAttribute( colorData, 3 ) );
// material
var material = new MeshPhongMaterial( { vertexColors: true, flatShading: true } );
// mesh
var mesh = new Mesh( geometry, material );
return mesh;
}
function buildDefaultMesh( meshData ) {
var geometry = new BufferGeometry();
geometry.setIndex( new BufferAttribute( meshData[ 'triangles' ], 1 ) );
geometry.setAttribute( 'position', new BufferAttribute( meshData[ 'vertices' ], 3 ) );
var material = new MeshPhongMaterial( { color: 0xaaaaff, flatShading: true } );
var mesh = new Mesh( geometry, material );
return mesh;
}
function buildMeshes( resourceMap, modelData, meshData, textureData, objectData ) {
var keys = Object.keys( resourceMap );
var meshes = [];
for ( var i = 0, il = keys.length; i < il; i ++ ) {
var resourceId = keys[ i ];
var triangleProperties = resourceMap[ resourceId ];
var resourceType = getResourceType( resourceId, modelData );
switch ( resourceType ) {
case 'material':
var basematerials = modelData.resources.basematerials[ resourceId ];
var newMeshes = buildBasematerialsMeshes( basematerials, triangleProperties, modelData, meshData, textureData, objectData );
for ( var j = 0, jl = newMeshes.length; j < jl; j ++ ) {
meshes.push( newMeshes[ j ] );
}
break;
case 'texture':
var texture2dgroup = modelData.resources.texture2dgroup[ resourceId ];
meshes.push( buildTexturedMesh( texture2dgroup, triangleProperties, modelData, meshData, textureData, objectData ) );
break;
case 'vertexColors':
var colorgroup = modelData.resources.colorgroup[ resourceId ];
meshes.push( buildVertexColorMesh( colorgroup, triangleProperties, modelData, meshData ) );
break;
case 'default':
meshes.push( buildDefaultMesh( meshData ) );
break;
default:
console.error( 'THREE.3MFLoader: Unsupported resource type.' );
}
}
return meshes;
}
function getResourceType( pid, modelData ) {
if ( modelData.resources.texture2dgroup[ pid ] !== undefined ) {
return 'texture';
} else if ( modelData.resources.basematerials[ pid ] !== undefined ) {
return 'material';
} else if ( modelData.resources.colorgroup[ pid ] !== undefined ) {
return 'vertexColors';
} else if ( pid === 'default' ) {
return 'default';
} else {
return undefined;
}
}
function analyzeObject( modelData, meshData, objectData ) {
var resourceMap = {};
var triangleProperties = meshData[ 'triangleProperties' ];
var objectPid = objectData.pid;
for ( var i = 0, l = triangleProperties.length; i < l; i ++ ) {
var triangleProperty = triangleProperties[ i ];
var pid = ( triangleProperty.pid !== undefined ) ? triangleProperty.pid : objectPid;
if ( pid === undefined ) pid = 'default';
if ( resourceMap[ pid ] === undefined ) resourceMap[ pid ] = [];
resourceMap[ pid ].push( triangleProperty );
}
return resourceMap;
}
function buildGroup( meshData, objects, modelData, textureData, objectData ) {
var group = new Group();
var resourceMap = analyzeObject( modelData, meshData, objectData );
var meshes = buildMeshes( resourceMap, modelData, meshData, textureData, objectData );
for ( var i = 0, l = meshes.length; i < l; i ++ ) {
group.add( meshes[ i ] );
}
return group;
}
function applyExtensions( extensions, meshData, modelXml ) {
if ( ! extensions ) {
return;
}
var availableExtensions = [];
var keys = Object.keys( extensions );
for ( var i = 0; i < keys.length; i ++ ) {
var ns = keys[ i ];
for ( var j = 0; j < scope.availableExtensions.length; j ++ ) {
var extension = scope.availableExtensions[ j ];
if ( extension.ns === ns ) {
availableExtensions.push( extension );
}
}
}
for ( var i = 0; i < availableExtensions.length; i ++ ) {
var extension = availableExtensions[ i ];
extension.apply( modelXml, extensions[ extension[ 'ns' ] ], meshData );
}
}
function getBuild( data, objects, modelData, textureData, objectData, builder ) {
if ( data.build !== undefined ) return data.build;
data.build = builder( data, objects, modelData, textureData, objectData );
return data.build;
}
function buildBasematerial( materialData, objects, modelData ) {
var material;
var displaypropertiesid = materialData.displaypropertiesid;
var pbmetallicdisplayproperties = modelData.resources.pbmetallicdisplayproperties;
if ( displaypropertiesid !== null && pbmetallicdisplayproperties[ displaypropertiesid ] !== undefined ) {
// metallic display property, use StandardMaterial
var pbmetallicdisplayproperty = pbmetallicdisplayproperties[ displaypropertiesid ];
var metallicData = pbmetallicdisplayproperty.data[ materialData.index ];
material = new MeshStandardMaterial( { flatShading: true, roughness: metallicData.roughness, metalness: metallicData.metallicness } );
} else {
// otherwise use PhongMaterial
material = new MeshPhongMaterial( { flatShading: true } );
}
material.name = materialData.name;
// displaycolor MUST be specified with a value of a 6 or 8 digit hexadecimal number, e.g. "#RRGGBB" or "#RRGGBBAA"
var displaycolor = materialData.displaycolor;
var color = displaycolor.substring( 0, 7 );
material.color.setStyle( color );
material.color.convertSRGBToLinear(); // displaycolor is in sRGB
// process alpha if set
if ( displaycolor.length === 9 ) {
material.opacity = parseInt( displaycolor.charAt( 7 ) + displaycolor.charAt( 8 ), 16 ) / 255;
}
return material;
}
function buildComposite( compositeData, objects, modelData, textureData ) {
var composite = new Group();
for ( var j = 0; j < compositeData.length; j ++ ) {
var component = compositeData[ j ];
var build = objects[ component.objectId ];
if ( build === undefined ) {
buildObject( component.objectId, objects, modelData, textureData );
build = objects[ component.objectId ];
}
var object3D = build.clone();
// apply component transform
var transform = component.transform;
if ( transform ) {
object3D.applyMatrix4( transform );
}
composite.add( object3D );
}
return composite;
}
function buildObject( objectId, objects, modelData, textureData ) {
var objectData = modelData[ 'resources' ][ 'object' ][ objectId ];
if ( objectData[ 'mesh' ] ) {
var meshData = objectData[ 'mesh' ];
var extensions = modelData[ 'extensions' ];
var modelXml = modelData[ 'xml' ];
applyExtensions( extensions, meshData, modelXml );
objects[ objectData.id ] = getBuild( meshData, objects, modelData, textureData, objectData, buildGroup );
} else {
var compositeData = objectData[ 'components' ];
objects[ objectData.id ] = getBuild( compositeData, objects, modelData, textureData, objectData, buildComposite );
}
}
function buildObjects( data3mf ) {
var modelsData = data3mf.model;
var modelRels = data3mf.modelRels;
var objects = {};
var modelsKeys = Object.keys( modelsData );
var textureData = {};
// evaluate model relationships to textures
if ( modelRels ) {
for ( var i = 0, l = modelRels.length; i < l; i ++ ) {
var modelRel = modelRels[ i ];
var textureKey = modelRel.target.substring( 1 );
if ( data3mf.texture[ textureKey ] ) {
textureData[ modelRel.target ] = data3mf.texture[ textureKey ];
}
}
}
// start build
for ( var i = 0; i < modelsKeys.length; i ++ ) {
var modelsKey = modelsKeys[ i ];
var modelData = modelsData[ modelsKey ];
var objectIds = Object.keys( modelData[ 'resources' ][ 'object' ] );
for ( var j = 0; j < objectIds.length; j ++ ) {
var objectId = objectIds[ j ];
buildObject( objectId, objects, modelData, textureData );
}
}
return objects;
}
function build( objects, data3mf ) {
var group = new Group();
var relationship = data3mf[ 'rels' ][ 0 ];
var buildData = data3mf.model[ relationship[ 'target' ].substring( 1 ) ][ 'build' ];
for ( var i = 0; i < buildData.length; i ++ ) {
var buildItem = buildData[ i ];
var object3D = objects[ buildItem[ 'objectId' ] ];
// apply transform
var transform = buildItem[ 'transform' ];
if ( transform ) {
object3D.applyMatrix4( transform );
}
group.add( object3D );
}
return group;
}
var data3mf = loadDocument( data );
var objects = buildObjects( data3mf );
return build( objects, data3mf );
},
addExtension: function ( extension ) {
this.availableExtensions.push( extension );
}
} );