WebGL shader attribute is not being passed accurately - three.js

I'm attempting to adopt the threejs example for wireframes ( https://threejs.org/examples/?q=wire#webgl_materials_wireframe ) using WebGL instancing.
This simple reproduction codepen ( https://codepen.io/ubermario/pen/gzByjP?editors=1000 ) shows that a wireframed cube is rendered when a 'center' attribute is passed to the vertex/fragment shaders with THREE.BufferAttribute.
However, it is not renedered as wireframe when the 'center' attribute is passed to the same shaders with THREE.InstancedBufferAttribute.
No errors are generated. Ideas?
<html lang="en">
<head>
<title>Adopted from three.js webgl - materials - wireframe</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
margin: 0px;
background-color: #000000;
overflow: hidden;
}
</style>
</head>
<body>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r83/three.js"></script>
<script type="x-shader/x-vertex" id="vertexShader">
attribute vec3 center;
varying vec3 vCenter;
void main() {
vCenter = center;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script type="x-shader/x-fragment" id="fragmentShader">
varying vec3 vCenter;
float edgeFactorTri() {
vec3 d = fwidth( vCenter.xyz );
vec3 a3 = smoothstep( vec3( 0.0 ), d * 1.5, vCenter.xyz );
return min( min( a3.x, a3.y ), a3.z );
}
void main() {
gl_FragColor.rgb = mix( vec3( 1.0 ), vec3( 0.0 ), edgeFactorTri() );
gl_FragColor.a = 1.0;
}
</script>
<script>
var camera, scene, renderer;
init();
renderer.render( scene, camera );
function init() {
var bufferGeometry, material, mesh;
camera = new THREE.PerspectiveCamera( 40, window.innerWidth / window.innerHeight, 1, 2000 );
camera.position.z = 700;
scene = new THREE.Scene();
var geometry1 = new THREE.BoxBufferGeometry( 100, 100,100 );
geometry1.addAttribute( 'center', new THREE.BufferAttribute( fnGetFloat32ArrayCenters( geometry1 ), 3 ) );
var material_1 = new THREE.ShaderMaterial( {
uniforms: {},
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent
} );
material_1.extensions.derivatives = true;
mesh1 = new THREE.Mesh( geometry1, material_1 );
mesh1.position.x = -100;
scene.add( mesh1 );
var bufferGeometry = new THREE.BoxBufferGeometry( 100, 100,100 );
var geometry2 = new THREE.InstancedBufferGeometry();
geometry2.index = bufferGeometry.index;
geometry2.attributes.position = bufferGeometry.attributes.position;
geometry2.attributes.uv = bufferGeometry.attributes.uv;
//Now with instancing
geometry2.addAttribute( 'center', new THREE.InstancedBufferAttribute( fnGetFloat32ArrayCenters( geometry2 ), 3 ) );
var material_2 = new THREE.ShaderMaterial( {
uniforms: {},
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent
} );
material_2.extensions.derivatives = true;
mesh2 = new THREE.Mesh( geometry2, material_2 );
mesh2.position.x = 100;
scene.add( mesh2 );
// renderer
renderer = new THREE.WebGLRenderer( { antialias: true } );
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
// events
window.addEventListener( 'resize', onWindowResize, false );
function fnGetFloat32ArrayCenters( geometry ) {
var vectors = [
new THREE.Vector3( 1, 0, 0 ),
new THREE.Vector3( 0, 1, 0 ),
new THREE.Vector3( 0, 0, 1 )
];
var position = geometry.attributes.position;
var centers = new Float32Array( position.count * 3 );
for ( var i = 0, l = position.count; i < l; i ++ ) {
vectors[ i % 3 ].toArray( centers, i * 3 );
}
return centers;
} //fnGetFloat32ArrayCenters
} //init
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
</script>
</body>
</html>

Solution: The problem appears to be a bug in three.js r83 with meshes created on BoxBufferGeometry vs InstancedBufferGeometry; see the updated codepen that illustrates this in the comments.
The wireframe attribute for material_1 is not applied when its value is false. It appears to be 'stuck' on 'true'; forcing it to 'false' will have no affect on the first cube.
This lead to the misperception that the cube based on material_2 was in error when it was right all along; because it was respecting the default wireframe attribute value of 'false'. When set to 'true' it will display as a wireframed cube.
<html lang="en">
<head>
<title>Adopted from three.js webgl - materials - wireframe</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
margin: 0px;
background-color: #000000;
overflow: hidden;
}
</style>
</head>
<body>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r83/three.js"></script>
<script type="x-shader/x-vertex" id="vertexShader">
attribute vec3 center;
varying vec3 vCenter;
void main() {
vCenter = center;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script type="x-shader/x-fragment" id="fragmentShader">
varying vec3 vCenter;
float edgeFactorTri() {
vec3 d = fwidth( vCenter.xyz );
vec3 a3 = smoothstep( vec3( 0.0 ), d * 1.5, vCenter.xyz );
return min( min( a3.x, a3.y ), a3.z );
}
void main() {
gl_FragColor.rgb = mix( vec3( 1.0 ), vec3( 0.0 ), edgeFactorTri() );
gl_FragColor.a = 1.0;
}
</script>
<script>
var camera, scene, renderer;
init();
renderer.render( scene, camera );
function init() {
var bufferGeometry, material, mesh;
camera = new THREE.PerspectiveCamera( 40, window.innerWidth / window.innerHeight, 1, 2000 );
camera.position.z = 700;
scene = new THREE.Scene();
var geometry1 = new THREE.BoxBufferGeometry( 100, 100,100 );
geometry1.addAttribute( 'center', new THREE.BufferAttribute( fnGetFloat32ArrayCenters( geometry1 ), 3 ) );
var material_1 = new THREE.ShaderMaterial( {
uniforms: {},
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent,
wireframe: false //bug:For r83/three.js, property is not applied when the THREE.Mesh constructor is invoked. https://threejs.org/docs/index.html#api/materials/ShaderMaterial.wireframe
} );
material_1.extensions.derivatives = true;
mesh1 = new THREE.Mesh( geometry1, material_1 ); //bug: material_1's wireframe:false property is not applied for r83/three.js
mesh1.position.x = -100;
scene.add( mesh1 );
var bufferGeometry = new THREE.BoxBufferGeometry( 100, 100,100 );
var geometry2 = new THREE.InstancedBufferGeometry();
geometry2.index = bufferGeometry.index;
geometry2.attributes.position = bufferGeometry.attributes.position;
geometry2.attributes.uv = bufferGeometry.attributes.uv;
//Now with instancing
geometry2.addAttribute( 'center', new THREE.InstancedBufferAttribute( fnGetFloat32ArrayCenters( geometry2 ), 3 ) );
var material_2 = new THREE.ShaderMaterial( {
uniforms: {},
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent,
wireframe:true //See: documentation: https://threejs.org/docs/index.html#api/materials/ShaderMaterial.wireframe
} );
material_2.extensions.derivatives = true;
mesh2 = new THREE.Mesh( geometry2, material_2 );
mesh2.position.x = 100;
scene.add( mesh2 );
// renderer
renderer = new THREE.WebGLRenderer( { antialias: true } );
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
// events
window.addEventListener( 'resize', onWindowResize, false );
function fnGetFloat32ArrayCenters( geometry ) {
var vectors = [
new THREE.Vector3( 1, 0, 0 ),
new THREE.Vector3( 0, 1, 0 ),
new THREE.Vector3( 0, 0, 1 )
];
var position = geometry.attributes.position;
var centers = new Float32Array( position.count * 3 );
for ( var i = 0, l = position.count; i < l; i ++ ) {
vectors[ i % 3 ].toArray( centers, i * 3 );
}
return centers;
} //fnGetFloat32ArrayCenters
} //init
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
</script>
</body>
</html>

Related

How to apply WebGL instancing to Three.js Wireframe example

The Three.js WebGL-based wireframe example utilizes the BufferGeometry with BufferAttribute design pattern.
How would you translate the example for InstancedBufferGeometry with InstancedBufferAttribute ; say two instances (copies) of the wirefames?
Note: This question is a clarification/simplification of a previous question that will hopefully benefit a lot of Three.js/WebGL programmers developing performance optimization techniques.
See this codepen for a simplified version of the wireframe example with instancing; animation from the original example was removed for simplicity.
<!DOCTYPE html>
<html lang="en">
<head>
<title>Adopted from three.js webgl - materials - wireframe</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
margin: 0px;
background-color: #000000;
overflow: hidden;
}
</style>
</head>
<body>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r83/three.js"></script>
<script type="x-shader/x-vertex" id="vertexShader">
attribute vec3 offset;
void main() {
vec3 newPosition = position + offset;
gl_Position = projectionMatrix * modelViewMatrix * vec4( newPosition, 1.0 );
}
</script>
<script type="x-shader/x-fragment" id="fragmentShader">
void main() {
gl_FragColor.rgb = vec3(1.0);
gl_FragColor.a = 1.0;
}
</script>
<script>
var camera, scene, renderer;
init();
renderer.render( scene, camera );
function init() {
var material, mesh;
camera = new THREE.PerspectiveCamera( 40, window.innerWidth / window.innerHeight, 1, 2000 );
camera.position.z = 700;
scene = new THREE.Scene();
var instances = 3;
var size = 100;
var spacing = 50;
var offsets = [];
var geometry = new THREE.InstancedBufferGeometry().copy(new THREE.BoxBufferGeometry( size, size,size ))
for (var i = 0; i < instances; i++) {
offsets = offsets.concat( fnGetOffsets( geometry, (-1 *(size + spacing)) + (i * (size + spacing)) ) );
}
geometry.addAttribute( 'offset', new THREE.InstancedBufferAttribute(new Float32Array( offsets ) , 3 ) );
var material = new THREE.ShaderMaterial( {
uniforms: {},
wireframe: true,
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent
} );
material.extensions.derivatives = true;
var mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
// renderer
renderer = new THREE.WebGLRenderer( { antialias: true } );
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
// events
window.addEventListener( 'resize', onWindowResize, false );
function fnGetOffsets( geometry,offsetX ) {
var offset = new THREE.Vector3( offsetX, 0, 0 );
var position = geometry.attributes.position;
var offsets = [];
for ( var i = 0; i < position.count; i ++ ) {
offsets.push(offsetX,0,0)
}
return offsets;
} //fnGetOffsets
} //init
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
</script>
</body>
</html>

three.js - webgl ocean demo

If you are familiar with the examples for three.js - there is an an example called webgl ocean demo
I can't see the actual materials when I run it locally or when I change the paths to the local resources. Has anyone else resolved this problem? I want to experiment with a different image instead of the globe that is floating in the ocean.
Here is the code:
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgl - geometry - terrain</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
color: #000;
font-family:Monospace;
font-size:13px;
margin: 0px;
overflow: hidden;
}
#info {
position: absolute;
top: 0px; width: 100%;
text-align:center;
padding: 5px;
}
a {
color: #a06851;
}
</style>
</head>
<body>
<div id="info">three.js - webgl ocean demo</div>
<script src="../build/three.min.js"></script>
<script src="js/controls/OrbitControls.js"></script>
<script src="js/Mirror.js"></script>
<script src="js/WaterShader.js"></script>
<script src="js/Detector.js"></script>
<script src="js/libs/stats.min.js"></script>
<script>
if ( ! Detector.webgl ) {
Detector.addGetWebGLMessage();
document.getElementById( 'container' ).innerHTML = "";
}
var container, stats;
var camera, scene, renderer;
var sphere;
var parameters = {
width: 2000,
height: 2000,
widthSegments: 250,
heightSegments: 250,
depth: 1500,
param: 4,
filterparam: 1
};
var waterNormals;
init();
animate();
function init() {
container = document.createElement( 'div' );
document.body.appendChild( container );
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
container.appendChild( renderer.domElement );
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera( 55, window.innerWidth / window.innerHeight, 0.5, 3000000 );
camera.position.set( 2000, 750, 2000 );
controls = new THREE.OrbitControls( camera, renderer.domElement );
controls.enablePan = false;
controls.minDistance = 1000.0;
controls.maxDistance = 5000.0;
controls.maxPolarAngle = Math.PI * 0.495;
controls.center.set( 0, 500, 0 );
scene.add( new THREE.AmbientLight( 0x444444 ) );
var light = new THREE.DirectionalLight( 0xffffbb, 1 );
light.position.set( - 1, 1, - 1 );
scene.add( light );
waterNormals = new THREE.ImageUtils.loadTexture( 'textures/waternormals.jpg' );
waterNormals.wrapS = waterNormals.wrapT = THREE.RepeatWrapping;
water = new THREE.Water( renderer, camera, scene, {
textureWidth: 512,
textureHeight: 512,
waterNormals: waterNormals,
alpha: 1.0,
sunDirection: light.position.clone().normalize(),
sunColor: 0xffffff,
waterColor: 0x001e0f,
distortionScale: 50.0,
} );
mirrorMesh = new THREE.Mesh(
new THREE.PlaneBufferGeometry( parameters.width * 500, parameters.height * 500 ),
water.material
);
mirrorMesh.add( water );
mirrorMesh.rotation.x = - Math.PI * 0.5;
scene.add( mirrorMesh );
// load skybox
var cubeMap = new THREE.CubeTexture( [] );
cubeMap.format = THREE.RGBFormat;
var loader = new THREE.ImageLoader();
loader.load( 'textures/skyboxsun25degtest.png', function ( image ) {
var getSide = function ( x, y ) {
var size = 1024;
var canvas = document.createElement( 'canvas' );
canvas.width = size;
canvas.height = size;
var context = canvas.getContext( '2d' );
context.drawImage( image, - x * size, - y * size );
return canvas;
};
cubeMap.images[ 0 ] = getSide( 2, 1 ); // px
cubeMap.images[ 1 ] = getSide( 0, 1 ); // nx
cubeMap.images[ 2 ] = getSide( 1, 0 ); // py
cubeMap.images[ 3 ] = getSide( 1, 2 ); // ny
cubeMap.images[ 4 ] = getSide( 1, 1 ); // pz
cubeMap.images[ 5 ] = getSide( 3, 1 ); // nz
cubeMap.needsUpdate = true;
} );
var cubeShader = THREE.ShaderLib[ 'cube' ];
cubeShader.uniforms[ 'tCube' ].value = cubeMap;
var skyBoxMaterial = new THREE.ShaderMaterial( {
fragmentShader: cubeShader.fragmentShader,
vertexShader: cubeShader.vertexShader,
uniforms: cubeShader.uniforms,
depthWrite: false,
side: THREE.BackSide
} );
var skyBox = new THREE.Mesh(
new THREE.BoxGeometry( 1000000, 1000000, 1000000 ),
skyBoxMaterial
);
scene.add( skyBox );
var geometry = new THREE.IcosahedronGeometry( 400, 4 );
for ( var i = 0, j = geometry.faces.length; i < j; i ++ ) {
geometry.faces[ i ].color.setHex( Math.random() * 0xffffff );
}
var material = new THREE.MeshPhongMaterial( {
vertexColors: THREE.FaceColors,
shininess: 100,
envMap: cubeMap
} );
sphere = new THREE.Mesh( geometry, material );
scene.add( sphere );
}
//
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
var time = performance.now() * 0.001;
sphere.position.y = Math.sin( time ) * 500 + 250;
sphere.rotation.x = time * 0.5;
sphere.rotation.z = time * 0.51;
water.material.uniforms.time.value += 1.0 / 60.0;
controls.update();
water.render();
renderer.render( scene, camera );
}
</script>
</body>
</html>
Here is my code - just changed the relative paths
<!DOCTYPE html>
<html lang="en">
<head>
<title>Test</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
color: #000;
font-family:Monospace;
font-size:13px;
margin: 0px;
overflow: hidden;
}
#info {
position: absolute;
top: 0px; width: 100%;
text-align:center;
padding: 5px;
}
a {
color: #a06851;
}
</style>
</head>
<body>
<div id="info">AA ocean demo</div>
<script src="js/three.min.js"></script>
<script src="js/controls/OrbitControls.js"></script>
<script src="js/Mirror.js"></script>
<script src="js/WaterShader.js"></script>
<script src="js/Detector.js"></script>
<script src="js/libs/stats.min.js"></script>
<script>
if ( ! Detector.webgl ) {
Detector.addGetWebGLMessage();
document.getElementById( 'container' ).innerHTML = "";
}
var container, stats;
var camera, scene, renderer;
var sphere;
var parameters = {
width: 2000,
height: 2000,
widthSegments: 250,
heightSegments: 250,
depth: 1500,
param: 4,
filterparam: 1
};
var waterNormals;
init();
animate();
function init() {
container = document.createElement( 'div' );
document.body.appendChild( container );
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
container.appendChild( renderer.domElement );
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera( 55, window.innerWidth / window.innerHeight, 0.5, 3000000 );
camera.position.set( 2000, 750, 2000 );
controls = new THREE.OrbitControls( camera, renderer.domElement );
controls.enablePan = false;
controls.minDistance = 1000.0;
controls.maxDistance = 5000.0;
controls.maxPolarAngle = Math.PI * 0.495;
controls.center.set( 0, 500, 0 );
scene.add( new THREE.AmbientLight( 0x444444 ) );
var light = new THREE.DirectionalLight( 0xffffbb, 1 );
light.position.set( - 1, 1, - 1 );
scene.add( light );
waterNormals = new THREE.ImageUtils.loadTexture( 'js/waternormals.jpg' );
waterNormals.wrapS = waterNormals.wrapT = THREE.RepeatWrapping;
water = new THREE.Water( renderer, camera, scene, {
textureWidth: 512,
textureHeight: 512,
waterNormals: waterNormals,
alpha: 1.0,
sunDirection: light.position.clone().normalize(),
sunColor: 0xffffff,
waterColor: 0x001e0f,
distortionScale: 50.0,
} );
mirrorMesh = new THREE.Mesh(
new THREE.PlaneBufferGeometry( parameters.width * 500, parameters.height * 500 ),
water.material
);
mirrorMesh.add( water );
mirrorMesh.rotation.x = - Math.PI * 0.5;
scene.add( mirrorMesh );
// load skybox
var cubeMap = new THREE.CubeTexture( [] );
cubeMap.format = THREE.RGBFormat;
var loader = new THREE.ImageLoader();
loader.load( 'js/skyboxsun25degtest.png', function ( image ) {
var getSide = function ( x, y ) {
var size = 1024;
var canvas = document.createElement( 'canvas' );
canvas.width = size;
canvas.height = size;
var context = canvas.getContext( '2d' );
context.drawImage( image, - x * size, - y * size );
return canvas;
};
cubeMap.images[ 0 ] = getSide( 2, 1 ); // px
cubeMap.images[ 1 ] = getSide( 0, 1 ); // nx
cubeMap.images[ 2 ] = getSide( 1, 0 ); // py
cubeMap.images[ 3 ] = getSide( 1, 2 ); // ny
cubeMap.images[ 4 ] = getSide( 1, 1 ); // pz
cubeMap.images[ 5 ] = getSide( 3, 1 ); // nz
cubeMap.needsUpdate = true;
} );
var cubeShader = THREE.ShaderLib[ 'cube' ];
cubeShader.uniforms[ 'tCube' ].value = cubeMap;
var skyBoxMaterial = new THREE.ShaderMaterial( {
fragmentShader: cubeShader.fragmentShader,
vertexShader: cubeShader.vertexShader,
uniforms: cubeShader.uniforms,
depthWrite: false,
side: THREE.BackSide
} );
var skyBox = new THREE.Mesh(
new THREE.BoxGeometry( 1000000, 1000000, 1000000 ),
skyBoxMaterial
);
scene.add( skyBox );
var geometry = new THREE.IcosahedronGeometry( 400, 4 );
for ( var i = 0, j = geometry.faces.length; i < j; i ++ ) {
geometry.faces[ i ].color.setHex( Math.random() * 0xffffff );
}
var material = new THREE.MeshPhongMaterial( {
vertexColors: THREE.FaceColors,
shininess: 100,
envMap: cubeMap
} );
sphere = new THREE.Mesh( geometry, material );
scene.add( sphere );
}
//
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
var time = performance.now() * 0.001;
sphere.position.y = Math.sin( time ) * 500 + 250;
sphere.rotation.x = time * 0.5;
sphere.rotation.z = time * 0.51;
water.material.uniforms.time.value += 1.0 / 60.0;
controls.update();
water.render();
renderer.render( scene, camera );
}
</script>
</body>
</html>
If you check your browser's console, you'll see error messages regarding the local access of files: by default, modern browsers do not allow this. This problem isn't three.js related at all, it is a common browser security policy.
Basically, there are two workarounds:
run a local server or put your code online
modify the browser settings to allow local file access.
As a lot of people trying their hands on three.js run into this issue when loading textures, there is a dedicated Three.js Wiki page on the topic.

Threejs, using ShaderMaterial to fake a hole in geometries

In Release 53 of Three.js I could fake a hole in a geometry with another geometry using a shader material with
vertex:void main() {
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
and
fragment:void main() {
gl_FragColor = vec4(1.0, 0.0, 1.0, 0.0); //alpha is zero
}
I could peep through the 'hole' and saw objects behind. Since release 54 I just see the inner object white, I can't peep through anymore.
How can I get it work again?
my complete sample:
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgl - geometry - cube</title>
<meta charset="utf-8">
<style>
body {
margin: 0px;
background-color: #DDFFDD;
overflow: hidden;
}
</style>
</head>
<body>
<script src="../build/old/three_53.js"></script>
<script src="../build/old/controls/TrackballControls_53.js"></script>
<script>
var camera, scene, renderer, controls, pointLight;
init();
animate();
function init() {
renderer = new THREE.WebGLRenderer({antialias: true, alpha: true});
//renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
renderer.sortObjects = false;
document.body.appendChild( renderer.domElement );
camera = new THREE.PerspectiveCamera( 70, window.innerWidth / window.innerHeight, 1, 1000 );
camera.position.z = 400;
controls = new THREE.TrackballControls( camera );
controls.rotateSpeed = 1.0;
controls.zoomSpeed = 1.2;
controls.panSpeed = 0.8;
controls.noZoom = false;
controls.noPan = false;
controls.staticMoving = true;
controls.dynamicDampingFactor = 0.3;
scene = new THREE.Scene();
scene.add( new THREE.AmbientLight( 0x505050 ) );
pointLight = new THREE.PointLight(0xFFFFFF, 0.9);
scene.add(pointLight);
var mainGroup = new THREE.Object3D();
var geometry = new THREE.CubeGeometry( 100, 100, 10 );
var mesh = new THREE.Mesh( geometry, new THREE.MeshLambertMaterial( { color: 0xaaaaaa } ) );
//mesh.renderOrder = 2;
var geometry2 = new THREE.CubeGeometry( 50, 50, 11 );
var material2 = new THREE.ShaderMaterial({vertexShader:'void main() { gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);}', fragmentShader:'void main() { gl_FragColor = vec4(1.0, 0.0, 1.0, 0.0);}'});
var innerGroup = new THREE.Object3D();
var mesh2 = new THREE.Mesh( geometry2, material2 );
//mesh2.renderOrder = 1;
mainGroup.add( mesh );
innerGroup.add(mesh2);
mainGroup.add( innerGroup );
//
var geometry3 = new THREE.SphereGeometry( 50);
var mesh3 = new THREE.Mesh( geometry3, new THREE.MeshLambertMaterial( { color: 0x00ff00 } ) );
mesh3.position.z = -200;
//mesh2.renderOrder = 3;
mainGroup.add( mesh3 );
scene.add(mainGroup);
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
requestAnimationFrame( animate );
controls.update();
pointLight.position.set(camera.position.x, camera.position.y, camera.position.z);
renderer.render( scene, camera );
}
</script>
</body>
It only works, if the 'hole-object' with the shader material is in a group.
If this worked before it's merely by accident. But you can, in 71, get this to work if you use renderOrder:
holeObj.renderOrder = 1;
bgObj.renderOrder = 2;
Now, if holeObj is in front of bgObj then in normal cases you will see through the bgObj. This is because holeObj will still write to the Z-buffer, when it draws its transparent pixels. The bgObj will be masked from that location. But this will only work for a particular view direction without some careful management of the sorting.

Sprite transparency sorting error in three.js

I've just downloaded the latest Three.js master "mrdoob-three.js-d6384d2" , I've modified the "webgl_nearestneighbour.html" to show a transparent image and this is the result: http://i.share.pho.to/8cccac74_l.jpeg
I can't understand if it's by design , if it's a webgl error or if it's a three.js error but, as you can see in the bigger ball, near sprites are clipped while far sprites aren't.
Any information is much appreciated (I'm new to webgl).
edit: here's the code.
<html>
<head>
<meta charset="utf-8">
<title>three.js webgl - nearest neighbour</title>
<style>
html, body {
width: 100%;
height: 100%;
}
body {
background-color: #ffffff;
margin: 0;
overflow: hidden;
font-family: arial;
}
#info {
text-align: center;
padding: 5px;
position: absolute;
width: 100%;
color: white;
}
</style>
</head>
<body>
<div id="info">three.js webgl - typed arrays - nearest neighbour for 500,000 sprites</div>
<script src="../build/three.min.js"></script>
<script src="js/TypedArrayUtils.js"></script>
<script src="js/controls/FirstPersonControls.js"></script>
<script type="x-shader/x-vertex" id="vertexshader">
//uniform float zoom;
attribute float alpha;
varying float vAlpha;
void main() {
vAlpha = 1.0 - alpha;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = 4.0 * ( 300.0 / length( mvPosition.xyz ) );
gl_Position = projectionMatrix * mvPosition;
}
</script>
<script type="x-shader/x-fragment" id="fragmentshader">
uniform sampler2D tex1;
varying float vAlpha;
void main() {
gl_FragColor = texture2D(tex1, gl_PointCoord);
gl_FragColor.r = (1.0 - gl_FragColor.r) * vAlpha + gl_FragColor.r;
}
</script>
<script>
var camera, scene, renderer;
var geometry, material, mesh;
var controls;
var objects = [];
var amountOfParticles = 500000, maxDistance = Math.pow(120, 2);
var positions, alphas, particles, _particleGeom
var clock = new THREE.Clock();
var blocker = document.getElementById( 'blocker' );
var instructions = document.getElementById( 'instructions' );
function init() {
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 1, 1000000);
scene = new THREE.Scene();
controls = new THREE.FirstPersonControls( camera );
controls.movementSpeed = 100;
controls.lookSpeed = 0.1;
var materials = [
new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'textures/cube/skybox/px.jpg' ) } ), // right
new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'textures/cube/skybox/nx.jpg' ) } ), // left
new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'textures/cube/skybox/py.jpg' ) } ), // top
new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'textures/cube/skybox/ny.jpg' ) } ), // bottom
new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'textures/cube/skybox/pz.jpg' ) } ), // back
new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'textures/cube/skybox/nz.jpg' ) } ) // front
];
mesh = new THREE.Mesh( new THREE.BoxGeometry( 10000, 10000, 10000, 7, 7, 7 ), new THREE.MeshFaceMaterial( materials ) );
mesh.scale.x = - 1;
scene.add(mesh);
//
renderer = new THREE.WebGLRenderer(); // Detector.webgl? new THREE.WebGLRenderer(): new THREE.CanvasRenderer()
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
// create the custom shader
var imagePreviewTexture = THREE.ImageUtils.loadTexture( 'textures/football.png');
imagePreviewTexture.minFilter = THREE.LinearMipMapLinearFilter;
imagePreviewTexture.magFilter = THREE.LinearFilter;
pointShaderMaterial = new THREE.ShaderMaterial( {
uniforms: {
tex1: { type: "t", value: imagePreviewTexture },
zoom: { type: 'f', value: 9.0 },
},
attributes: {
alpha: { type: 'f', value: null },
},
vertexShader: document.getElementById( 'vertexshader' ).textContent,
fragmentShader: document.getElementById( 'fragmentshader' ).textContent,
transparent: true
});
//create particles with buffer geometry
var distanceFunction = function(a, b){
return Math.pow(a[0] - b[0], 2) + Math.pow(a[1] - b[1], 2) + Math.pow(a[2] - b[2], 2);
};
positions = new Float32Array( amountOfParticles * 3 );
alphas = new Float32Array( amountOfParticles );
_particleGeom = new THREE.BufferGeometry();
_particleGeom.addAttribute( 'position', new THREE.BufferAttribute( positions, 3 ) );
_particleGeom.addAttribute( 'alpha', new THREE.BufferAttribute( alphas, 1 ) );
particles = new THREE.PointCloud( _particleGeom, pointShaderMaterial );
for (var x = 0; x < amountOfParticles; x++) {
positions[ x * 3 + 0 ] = Math.random() * 1000;
positions[ x * 3 + 1 ] = Math.random() * 1000;
positions[ x * 3 + 2 ] = Math.random() * 1000;
alphas[x] = 1.0;
}
var measureStart = new Date().getTime();
// creating the kdtree takes a lot of time to execute, in turn the nearest neighbour search will be much faster
kdtree = new THREE.TypedArrayUtils.Kdtree( positions, distanceFunction, 3 );
console.log('TIME building kdtree', new Date().getTime() - measureStart);
// display particles after the kd-tree was generated and the sorting of the positions-array is done
scene.add(particles);
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
controls.handleResize();
}
function animate() {
requestAnimationFrame( animate );
//
displayNearest(camera.position);
controls.update( clock.getDelta() )
renderer.render( scene, camera );
}
function displayNearest(position) {
// take the nearest 200 around him. distance^2 'cause we use the manhattan distance and no square is applied in the distance function
var imagePositionsInRange = kdtree.nearest([position.x, position.y, position.z], 100, maxDistance);
// We combine the nearest neighbour with a view frustum. Doesn't make sense if we change the sprites not in our view... well maybe it does. Whatever you want.
var _frustum = new THREE.Frustum();
var _projScreenMatrix = new THREE.Matrix4();
camera.matrixWorldInverse.getInverse( camera.matrixWorld );
_projScreenMatrix.multiplyMatrices( camera.projectionMatrix, camera.matrixWorldInverse );
_frustum.setFromMatrix( _projScreenMatrix );
for ( i = 0, il = imagePositionsInRange.length; i < il; i ++ ) {
var object = imagePositionsInRange[i];
var objectPoint = new THREE.Vector3().fromArray( object[ 0 ].obj );
if (_frustum.containsPoint(objectPoint)){
var objectIndex = object[0].pos;
// set the alpha according to distance
alphas[ objectIndex ] = 1.0 / maxDistance * object[1];
// update the attribute
_particleGeom.attributes.alpha.needsUpdate = true;
}
}
}
init();
animate();
</script>
</body>
</html>
To recap what has been said in the comments, the solution to this problem is to disable alpha blending, and in the shader you have to discard the pixel drawn based on the alpha value of the input texture.
So that this...
void main() {
gl_FragColor = texture2D(tex1, gl_PointCoord);
gl_FragColor.r = (1.0 - gl_FragColor.r) * vAlpha + gl_FragColor.r;
}
...becomes this
void main() {
gl_FragColor = texture2D(tex1, gl_PointCoord);
gl_FragColor.r = (1.0 - gl_FragColor.r) * vAlpha + gl_FragColor.r;
if ( gl_FragColor.a < 0.5 ) discard;
}

Can't send a texture to a custom shader (ShaderPass/EffectComposer)

I'm currently playing with this great library, but I have some difficulties with the EffectComposer.
I can't send a texture to a postprocess shader introduced by a ShaderPass.
I think this is a bug... or I'm doing something stupid (needUpDate, warp,... ??)
(r54, W7, Nv9700mGT, FF 17.0.1 and Chrome 24.0.1312.52)
I used the "webgl_postprocessing.html" example to reproduce the phenomenon just by adding these shaders :
<script id="vertexShaderBasic" type="x-shader/x-vertex">
varying vec2 glTexCoord;
void main() {
glTexCoord = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script id="fragmentShaderBlender" type="x-shader/x-fragment">
varying vec2 glTexCoord;
uniform sampler2D sprite1;
uniform sampler2D previousRender;
void main() {
vec3 color = texture2D( previousRender, glTexCoord ).rgb;
color += texture2D( sprite1, glTexCoord ).rgb;
gl_FragColor.rgb = color;
gl_FragColor.a = 1.0;
}
</script>
this at the begining of the main script to be sure the sprite is loaded :
var composer2;
function start() {
init();
animate();
}
var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", null, start );
this in the composer field, after :
composer.addPass( new THREE.RenderPass( scene, camera ) );
composer2 = new THREE.EffectComposer( renderer );
var uniforms1 = {
sprite1: { type: "t", value: sprite1 }, // <- something wrong here
previousRender: { type: "t", value: null }
};
var blenderShader = {
uniforms: uniforms1,
vertexShader: $( 'vertexShaderBasic' ).textContent,
fragmentShader: $( 'fragmentShaderBlender' ).textContent
};
// link with the previous render
blenderShader.uniforms.previousRender.value = composer.renderTarget2;
// the custom shader
var blenderPass = new THREE.ShaderPass( blenderShader );
blenderPass.renderToScreen = true;
composer2.addPass( blenderPass );
I also coment this, beacause it's not relevent any more :
//effect.renderToScreen = true;
and I add this at the end :
composer2.render();
The link between the two passes work well, but the sprite never appear on the EffectComposer quad that cover the screen...
Thanks and sorry for my english.
EDIT to recap :
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgl - postprocessing</title>
<meta charset="utf-8">
<style>
body {
margin: 0px;
background-color: #000000;
overflow: hidden;
}
</style>
</head>
<body>
<script src="../build/three.min.js"></script>
<script src="js/shaders/CopyShader.js"></script>
<script src="js/shaders/DotScreenShader.js"></script>
<script src="js/shaders/RGBShiftShader.js"></script>
<script src="js/postprocessing/EffectComposer.js"></script>
<script src="js/postprocessing/RenderPass.js"></script>
<script src="js/postprocessing/MaskPass.js"></script>
<script src="js/postprocessing/ShaderPass.js"></script>
<script id="vertexShaderBasic" type="x-shader/x-vertex">
varying vec2 glTexCoord;
void main() {
glTexCoord = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script id="fragmentShaderBlender" type="x-shader/x-fragment">
varying vec2 glTexCoord;
uniform sampler2D sprite1;
uniform sampler2D previousRender;
void main() {
vec3 color = texture2D( previousRender, glTexCoord ).rgb;
color += texture2D( sprite1, glTexCoord ).rgb;
gl_FragColor.rgb = color;
gl_FragColor.a = 1.0;
}
</script>
<script>
var camera, scene, renderer, composer;
var composer2;
var object, light;
function start() {
init();
animate();
}
var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", null, start );
//var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", new THREE.UVMapping(), start ); // change anything
function init() {
renderer = new THREE.WebGLRenderer();
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
//
camera = new THREE.PerspectiveCamera( 70, window.innerWidth / window.innerHeight, 1, 1000 );
camera.position.z = 400;
scene = new THREE.Scene();
scene.fog = new THREE.Fog( 0x000000, 1, 1000 );
object = new THREE.Object3D();
scene.add( object );
var geometry = new THREE.SphereGeometry( 1, 4, 4 );
var material = new THREE.MeshPhongMaterial( { color: 0xffffff, shading: THREE.FlatShading } );
for ( var i = 0; i < 100; i ++ ) {
var mesh = new THREE.Mesh( geometry, material );
mesh.position.set( Math.random() - 0.5, Math.random() - 0.5, Math.random() - 0.5 ).normalize();
mesh.position.multiplyScalar( Math.random() * 400 );
mesh.rotation.set( Math.random() * 2, Math.random() * 2, Math.random() * 2 );
mesh.scale.x = mesh.scale.y = mesh.scale.z = Math.random() * 50;
object.add( mesh );
}
scene.add( new THREE.AmbientLight( 0x222222 ) );
light = new THREE.DirectionalLight( 0xffffff );
light.position.set( 1, 1, 1 );
scene.add( light );
// postprocessing
composer = new THREE.EffectComposer( renderer );
composer.addPass( new THREE.RenderPass( scene, camera ) );
/////////////////////////////////////
/////////////////////////////////////
composer2 = new THREE.EffectComposer( renderer );
var uniforms1 = {
sprite1: { type: "t", value: sprite1 },
previousRender: { type: "t", value: null }
};
//uniforms1.sprite1.value.wrapS = uniforms1.sprite1.value.wrapT = THREE.RepeatWrapping;
var blenderShader = {
uniforms: uniforms1,
vertexShader: $( 'vertexShaderBasic' ).textContent,
fragmentShader: $( 'fragmentShaderBlender' ).textContent
};
blenderShader.uniforms.previousRender.value = composer.renderTarget2;
var blenderPass = new THREE.ShaderPass( blenderShader );
blenderPass.renderToScreen = true;
composer2.addPass( blenderPass );
/////////////////////////////////////
/////////////////////////////////////
var effect = new THREE.ShaderPass( THREE.DotScreenShader );
effect.uniforms[ 'scale' ].value = 4;
composer.addPass( effect );
var effect = new THREE.ShaderPass( THREE.RGBShiftShader );
effect.uniforms[ 'amount' ].value = 0.0015;
//effect.renderToScreen = true;
composer.addPass( effect );
//
window.addEventListener( 'resize', onWindowResize, false );
}
function $( id ) {
return document.getElementById( id );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
requestAnimationFrame( animate );
var time = Date.now();
object.rotation.x += 0.005;
object.rotation.y += 0.01;
composer.render();
composer2.render();
}
</script>
</body>
</html>
What I want :
Good
What I get :
NotGood
I had this issue too, and found a workaround.
I debugged it to determine that the texture ID for the extra texture is different in the shader pass than expected, which is bad. If you look in the ShaderPass constructor, you'll see it clones the uniforms. That seems to be the cause. If I edit that code to restore the original texture object in the cloned uniforms, it works as expected. So that should work for you too.
I'm trying to get some kind of (proper) bug fix integrated into the actual release.
Try this
var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", new THREE.UVMapping(), start );
three.js r.54

Resources