Multiple textures overlapping webGL - three.js

I have two objects that I need to render in two different textures using WebGLRenderTarget. After mapping both textures to a plane using shaders and then adding that plane to the main scene, I am having issues drawing the closer object.
I have created a jsfiddle to show my issue: https://jsfiddle.net/11qb7ay7/82/
The real magic occurs here:
void main() {
vec4 color = texture2D(tex_sphere, vUv);
vec4 color_cube = texture2D(tex_cube, vUv);
gl_FragColor = vec4(color.rgb * color_cube.rgb, 1.0);
}
The sphere is placed in front of the cube relative to the camera. How can I draw the sphere pixels instead of the cubes when they overlap?
To be clear, I am trying to find a way to compute the distance from camera of each pixel and render the closer one first

In general if tex_sphere has an alpha channel the you can mix the colors by the alpha channel:
void main()
{
vec4 color = texture2D(tex_sphere, vUv);
vec4 color_cube = texture2D(tex_cube, vUv);
vec3 mixCol = mix(color_cube.rgb, color.rgb, color.a);
gl_FragColor = vec4(mixCol.rgb, 1.0);
}
If the tex_sphere has a black background, which should be omitted, the you have to check if the color of tex_sphere is not black:
void main()
{
vec4 color = texture2D(tex_sphere, vUv);
vec4 color_cube = texture2D(tex_cube, vUv);
vec3 test = step(1.0/512.0, color.rgb);
float a = max(max(test.r, test.g), test.b);
vec3 mixCol = mix(color_cube.rgb, color.rgb, a);
gl_FragColor = vec4(mixCol.rgb, 1.0);
}
Note, mix interpolates between 2 values according to a floating point interpolation value a in the range [0.0, 1.0]. If the a is equal 0.0 then the 1st value is returned and if the a is equal 1.0 then the 2nd value is returned.
step tests whether a value is less than an edge value. If it is less then 0.0 is returned, else 1.0 is returned.
To get a black background you have to set a black "clear" color when you render the sphere to the render target:
function render() {
controls.update();
renderer.setClearColor(0x00);
renderer.render(sphereScene, camera, renderTargets.sphere, true);
renderer.setClearColor(0xccccff);
renderer.render(cubeScene, camera, renderTargets.cube, true);
renderer.setClearColor(0xccccff);
renderer.render(scene, camera);
}
If you want to use an alpha channel, the you have to set setClearAlpha before you render to the render target:
function render() {
controls.update();
renderer.setClearAlpha(0);
renderer.render(sphereScene, camera, renderTargets.sphere, true);
renderer.setClearAlpha(1);
renderer.render(cubeScene, camera, renderTargets.cube, true);
renderer.render(scene, camera);
}
var scene, renderer, camera, controls;
var cubeScene, sphereScene;
var renderTargets;
init();
animate();
function init() {
scene = new THREE.Scene();
cubeScene = new THREE.Scene();
sphereScene = new THREE.Scene();
renderer = new THREE.WebGLRenderer( { antialias: false, alpha: true } );
renderer.setClearColor(0xccccff);
camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 0.1, 4000 );
camera.position.set(0, 0, 200);
renderer.setSize( window.innerWidth, window.innerHeight );
controls = new THREE.OrbitControls(camera, renderer.domElement);
camera.lookAt( scene.position );
var light = new THREE.HemisphereLight( 0xffffff, 0x444444 );
scene.add( light );
container = document.createElement('div');
document.body.appendChild(container);
container.appendChild(renderer.domElement);
initObjects();
initRenderTargets(window.innerWidth, window.innerHeight);
}
function onResize() {
renderer.setSize( window.innerWidth, window.innerHeight );
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderTargets.sphere.setSize( window.innerWidth, window.innerHeight );
renderTargets.cube.setSize( window.innerWidth, window.innerHeight );
}
function initObjects() {
var cGeometry = new THREE.BoxGeometry( 100, 100, 100 );
var cMaterial = new THREE.MeshBasicMaterial( {color: 0x00ff00} );
var cube = new THREE.Mesh( cGeometry, cMaterial );
cube.position.z = -210;
cube.position.y = 100;
var sGeometry = new THREE.SphereGeometry( 75, 32, 32 );
var sMaterial = new THREE.MeshBasicMaterial({
color: 0xff0000
});
var sphere = new THREE.Mesh( sGeometry, sMaterial );
sphere.position.z = -100;
sphereScene.add( sphere );
cubeScene.add( cube );
}
function initRenderTargets(width, height){
renderTargets = createRenderTargets(width, height);
var uniforms = {
"tex_cube": { type: "t", value: renderTargets.cube.texture },
"tex_sphere": { type: "t", value: renderTargets.sphere.texture }
}
material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: document.getElementById('vs_rt').textContent,
fragmentShader: document.getElementById('fs_rt').textContent
});
var plane = new THREE.PlaneGeometry(width, height);
quad = new THREE.Mesh(plane, material);
scene.add(quad);
}
function createRenderTargets(width, height) {
var parameters = {
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter,
};
return {
cube: new THREE.WebGLRenderTarget( width, height, parameters ),
sphere: new THREE.WebGLRenderTarget( width, height, parameters )
};
}
function animate() {
requestAnimationFrame(animate);
render();
}
//------------------------------------------
// Main rendering
//------------------------------------------
function render() {
controls.update();
renderer.setClearAlpha(0);
renderer.render(sphereScene, camera, renderTargets.sphere, true);
renderer.setClearAlpha(1);
renderer.render(cubeScene, camera, renderTargets.cube, true);
renderer.render(scene, camera);
}
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/three.js/89/three.min.js"></script>
<script type="text/javascript" src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
<script id="vs_rt" type="x-shader/x-vertex">
uniform sampler2D tex_cube;
uniform sampler2D tex_sphere;
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script id="fs_rt" type="x-shader/x-fragment">
uniform sampler2D tex_cube;
uniform sampler2D tex_sphere;
varying vec2 vUv;
void main() {
vec4 color = texture2D(tex_sphere, vUv);
vec4 color_cube = texture2D(tex_cube, vUv);
vec3 mixCol = mix(color_cube.rgb, color.rgb, color.a);
gl_FragColor = vec4(mixCol.rgb, 1.0);
}
</script>

Related

Not getting expect result from THREE.js/GLSL code

Apologies for the vague question but I wasn't sure how to phrase this. I'm trying to write some THREE.js/GLSL code that produces a circular gradient (for some SDF stuff). With the code below I would expect to see the gradient on the plane, but the plane remains white and nothing else renders on it. Can anyone tell me where I'm going wrong?
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>My first three.js app</title>
<style>
body { margin: 0; }
canvas { width: 100%; height: 100% }
</style>
</head>
<body>
<script type="x-shader/x-vertex" id="sdfVS">
varying vec2 vUv; // pass the uv coordinates of each pixel to the frag shader
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script type="x-shader/x-fragment" id="sdfFS">
precision mediump float;
uniform vec2 u_resolution;
varying vec2 vUv;
float circle(vec2 pos, float radius)
{
return distance(pos, vec2(radius));
}
void main()
{
vec2 pos = (gl_FragCoord.xy / vUv) * 2.0 - 1.0;
float circle1 = circle(pos, 0.5);
vec3 color = vec3(circle1);
gl_FragColor = vec4(color, 1.0);
}
</script>
<script src="../../js/three.min.js"></script>
<script>
var scene, camera, renderer, aspect, geometry, material, plane;
var container;
var frustumSize = 2;
init();
animate();
function init() {
container = document.createElement( 'div' );
document.body.appendChild( container );
scene = new THREE.Scene();
scene.background = new THREE.Color(0x0000ff);
aspect = window.innerWidth / window.innerHeight;
camera = new THREE.OrthographicCamera( 0.5 * frustumSize * aspect / - 2, 0.5 * frustumSize * aspect / 2, frustumSize / 2, frustumSize / - 2, 0.1, 1 );
cameraOrthoHelper = new THREE.CameraHelper( camera );
scene.add( cameraOrthoHelper );
var width = 1;
var height = 1;
geometry = new THREE.PlaneGeometry(width, height);
material = new THREE.ShaderMaterial( {
vertexShader: document.getElementById('sdfVS').textContent,
fragmentShader: document.getElementById('sdfFS').textContent,
side: THREE.DoubleSide
} );
plane = new THREE.Mesh( geometry, material );
plane.rotation.x = 0;
plane.rotation.y = THREE.Math.degToRad( -90 );
plane.rotation.z = 0;
scene.add( plane )
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
container.appendChild( renderer.domElement );
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
camera.position.x = -1;
camera.position.y = 0;
camera.position.z = 0;
camera.lookAt( scene.position );
camera.updateMatrixWorld();
renderer.render( scene, camera );
}
</script>
</body>
</html>
Can't get this line:
vec2 pos = (gl_FragCoord.xy / vUv) * 2.0 - 1.0;
Seems like you want to calculate uv coordinates, but if so, then you already have uvs (passed in vUv).
You can do the thing this way, as an option:
var scene, camera, renderer, aspect, geometry, material, plane;
var container;
var clock = new THREE.Clock();
var frustumSize = 2;
init();
animate();
function init() {
container = document.createElement('div');
document.body.appendChild(container);
scene = new THREE.Scene();
scene.background = new THREE.Color(0x0000ff);
aspect = window.innerWidth / window.innerHeight;
camera = new THREE.OrthographicCamera(0.5 * frustumSize * aspect / -2, 0.5 * frustumSize * aspect / 2, frustumSize / 2, frustumSize / -2, 0.1, 1);
cameraOrthoHelper = new THREE.CameraHelper(camera);
scene.add(cameraOrthoHelper);
var width = 1;
var height = 1;
geometry = new THREE.PlaneGeometry(width, height);
material = new THREE.ShaderMaterial({
uniforms: {time: {value: 0}},
vertexShader: document.getElementById('sdfVS').textContent,
fragmentShader: document.getElementById('sdfFS').textContent,
side: THREE.DoubleSide
});
plane = new THREE.Mesh(geometry, material);
plane.rotation.x = 0;
plane.rotation.y = THREE.Math.degToRad(-90);
plane.rotation.z = 0;
scene.add(plane)
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
container.appendChild(renderer.domElement);
window.addEventListener('resize', onWindowResize, false);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
function animate() {
requestAnimationFrame(animate);
render();
}
function render() {
camera.position.x = -1;
camera.position.y = 0;
camera.position.z = 0;
camera.lookAt(scene.position);
camera.updateMatrixWorld();
var t = clock.getElapsedTime();
material.uniforms.time.value = t;
renderer.render(scene, camera);
}
body {
overflow: hidden;
margin: 0;
}
<script src="https://threejs.org/build/three.min.js"></script>
<script type="x-shader/x-vertex" id="sdfVS">
varying vec2 vUv; // pass the uv coordinates of each pixel to the frag shader
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script type="x-shader/x-fragment" id="sdfFS">
precision mediump float;
uniform float time;
uniform vec2 u_resolution;
varying vec2 vUv;
float circle(vec2 uv, vec2 pos, float radius) {
return smoothstep(radius, 0., length(uv - pos));
}
void main()
{
vec2 uv = vUv * 2. - 1.;
vec2 pos = vec2(cos(time), sin(time));
float circle1 = circle(uv, pos, 1.0);
vec3 color = vec3(circle1);
gl_FragColor = vec4(color, 1.0);
}
</script>

ThreeJS - Material which shows surface structure in a scene without light

In my scene I render complex objects, which have a complex surface structure. Furthermore I am not using light in my scene and I am trying to avoid it.
For now I am using the MeshNormalMaterial which shows perfectly the surface structures of my objects.
object with MeshNormalMaterial:
But I want to render certain objects with a unique color (e.g. from dark red to light red based on the surface structure/ similar to the MeshNormalMaterial).
I tried the MeshDepthMaterial for one object, but it rendered the whole object in almost color (no/ sparse color gradation) and not as expected like in this example. Independent of the camera position.
Same object from above with: MeshDepthMaterial
I am using a THREE.PerspectiveCamera with THREE.OrbitControls. Camera properties:
//camera attributes
public fieldOfView: number = 60;
public nearClippingPane: number = 0.1;
public farClippingPane: number = 50000;
Does the MeshNormalMaterial require light or why is this the case? Can I somehow amplify the depth effect of MeshNormalMaterial?
Is ist possible to restrict the RGB Colors of MeshNormalMaterial or do I have to use another Material for my purpose?
I just slightly modified the code of the fragment shader from this SO answer, so all credits to Rabbid76:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight, 0.01, 1000);
camera.position.set(0, 0, 10);
var renderer = new THREE.WebGLRenderer({
antialias: true
});
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
var controls = new THREE.OrbitControls(camera, renderer.domElement);
var colors = {
color1: "#ff0000",
color2: "#ffaaaa"
}
var geometry = new THREE.TorusKnotBufferGeometry(2, 0.5, 100, 16);
var material = new THREE.ShaderMaterial({
uniforms: {
color1: {
value: new THREE.Color(colors.color1)
},
color2: {
value: new THREE.Color(colors.color2)
}
},
vertexShader: vertShader,
fragmentShader: fragShader
});
var mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
var gui = new dat.GUI();
gui.addColor(colors, "color1").onChange(function(value) {
material.uniforms.color1.value.set(value);
});
gui.addColor(colors, "color2").onChange(function(value) {
material.uniforms.color2.value.set(value);
});
render();
function render() {
requestAnimationFrame(render);
renderer.render(scene, camera);
}
body {
overflow: hidden;
margin: 0;
}
<script src="https://threejs.org/build/three.min.js"></script>
<script src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
<script src="https://threejs.org/examples/js/libs/dat.gui.min.js"></script>
<script>
var vertShader = `
varying vec3 vNormal;
void main(void)
{
vNormal = normalMatrix * normalize(normal);
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
`;
var fragShader = `
uniform vec3 color1;
uniform vec3 color2;
varying vec3 vNormal;
void main(void)
{
vec3 view_nv = normalize(vNormal);
vec3 nv_color = view_nv * 0.5 + 0.5;
vec3 c = mix(color1, color2, nv_color.r);
gl_FragColor = vec4(c, 1.0);
}
`;
</script>

Three.js Object3D custom vertex shader

I have a bunch of Object3d in the scene comprised of different meshes, e.g:
const object3d = new Object3D();
const bodyMaterial = new MeshLambertMaterial();
const bezelMaterial = new MeshBasicMaterial({ color: "0xffffff" });
const mesh = new Mesh(MeshFactory.mesh1, [ bodyMaterial, bezelMaterial ]);
const textLabel = TextFactory.createText("mesh1");
object3d.add(mesh, textLabel);
I'd like to move the position of the object3d on each frame,
currently doing that on the CPU:
render() {
object3D.position = calcPosition(); // new Vector3
}
This is slow since I have hundreds of Object3d objects and each one should move separately.
So I'd like to write a vertex shader that moves the xyz on each frame but,
how do I write a shader for Object3D? (change position for all of it's meshes)?
Sounds like you might be going about things the wrong way...
But..
Here is a basic example of vertex displacement in a shader in three.js.
Rotation is done in the render loop, but offset values are sent to the vertex shader.
var camera, scene, renderer, mesh, material;
init();
animate();
function init() {
// Renderer.
renderer = new THREE.WebGLRenderer();
//renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
// Add renderer to page
document.body.appendChild(renderer.domElement);
// Create camera.
camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 1, 1000);
camera.position.z = 400;
// Create scene.
scene = new THREE.Scene();
// Create material
var uniforms = {
xOffset: { type: "f", value: 0.0 },
yOffset: { type: "f", value: 0.0 },
zOffset: { type: "f", value: 0.0 }
};
var vertexShader = document.getElementById('vertexShader').text;
var fragmentShader = document.getElementById('fragmentShader').text;
material = new THREE.ShaderMaterial(
{
uniforms : uniforms,
vertexShader : vertexShader,
fragmentShader : fragmentShader
});
// Create cube and add to scene.
var geometry = new THREE.BoxGeometry(200, 200, 200);
mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
// Create ambient light and add to scene.
var light = new THREE.AmbientLight(0x404040); // soft white light
scene.add(light);
// Create directional light and add to scene.
var directionalLight = new THREE.DirectionalLight(0xffffff);
directionalLight.position.set(1, 1, 1).normalize();
scene.add(directionalLight);
// Add listener for window resize.
window.addEventListener('resize', onWindowResize, false);
// set up gui.
var gui = new dat.GUI();
gui.add(uniforms.xOffset, 'value', -500, 500).name('X Offset');
gui.add(uniforms.yOffset, 'value', -500, 500).name('Y Offset');;
gui.add(uniforms.zOffset, 'value', -500, 500).name('Z Offset');;
}
function animate() {
requestAnimationFrame(animate);
mesh.rotation.x += 0.005;
mesh.rotation.y += 0.01;
renderer.render(scene, camera);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
body {
padding: 0;
margin: 0;
}
canvas {
display: block;
}
<script src="https://rawgit.com/mrdoob/three.js/r89/build/three.min.js"></script>
<script src="https://cdn.rawgit.com/dataarts/dat.gui/v0.6.2/build/dat.gui.min.js"></script>
<script id="vertexShader" type="x-shader/x-vertex">
uniform float xOffset;
uniform float yOffset;
uniform float zOffset;
void main( void ) {
gl_Position = projectionMatrix * modelViewMatrix * vec4(position + vec3(xOffset, yOffset, zOffset),1.0);
}
</script>
<script id="fragmentShader" type="x-shader/x-fragment">
void main() {
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
}
</script>

THREE.JS Shader texture

I'm just looking to create a simple Fragment Shader that draws a specified texture to the mesh. But I can´t display it. I can display the texture file in a basic gemetry, but not work with shaders the only result in a black sphere.
I don´t know what is wrong. Any help would be appreciated!
I am using Three.js THREE.WebGLRenderer 81
<script id="vertexShader" type="x-shader/x-vertex">
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix *
modelViewMatrix *
vec4(position,1.0);
}
</script>
<script id="fragmentShader" type="x-shader/x-fragment">
varying vec2 vUv;
uniform sampler2D texture1;
void main() {
gl_FragColor = texture2D(texture1, vUv); // Displays black sphere
//gl_FragColor = vec4(0.5, 0.2, 1.0, 1.0); // Works; Displays Color
}
</script>
<script>
// Initialize WebGL Renderer
var renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
//var canvas = document.getElementById('canvas').appendChild(renderer.domElement);
document.body.appendChild( renderer.domElement );
renderer.setClearColor(0x888888, 1.0);
// Initialize Scenes
var scene = new THREE.Scene();
// Initialize Camera
var camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 1, 100);
camera.position.z = 10;
// Create Light
var light = new THREE.PointLight(0xFFFFFF);
light.position.set(0, 0, 500);
scene.add(light);
// LIGHT
var light = new THREE.PointLight(0xffffff);
light.position.set(100,250,100);
scene.add(light);
// Create Ball
var vertShader = document.getElementById('vertexShader').textContent;
var fragShader = document.getElementById('fragmentShader').textContent;
var textura = new THREE.TextureLoader().load('./funkyGround.jpg');
var uniforms = {
texture1: { type: 't', value: textura }
};
var material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: vertShader,
fragmentShader: fragShader
});
var ball = new THREE.Mesh(new THREE.SphereGeometry(1, 50, 50), material);
scene.add(ball);
// Render the Scene
renderer.render(scene, camera);
</script>
</body>
After a review, I can find the solution including a render() function
function render() {
requestAnimationFrame( render );
renderer.render( scene, camera );
}
render();

Can't send a texture to a custom shader (ShaderPass/EffectComposer)

I'm currently playing with this great library, but I have some difficulties with the EffectComposer.
I can't send a texture to a postprocess shader introduced by a ShaderPass.
I think this is a bug... or I'm doing something stupid (needUpDate, warp,... ??)
(r54, W7, Nv9700mGT, FF 17.0.1 and Chrome 24.0.1312.52)
I used the "webgl_postprocessing.html" example to reproduce the phenomenon just by adding these shaders :
<script id="vertexShaderBasic" type="x-shader/x-vertex">
varying vec2 glTexCoord;
void main() {
glTexCoord = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script id="fragmentShaderBlender" type="x-shader/x-fragment">
varying vec2 glTexCoord;
uniform sampler2D sprite1;
uniform sampler2D previousRender;
void main() {
vec3 color = texture2D( previousRender, glTexCoord ).rgb;
color += texture2D( sprite1, glTexCoord ).rgb;
gl_FragColor.rgb = color;
gl_FragColor.a = 1.0;
}
</script>
this at the begining of the main script to be sure the sprite is loaded :
var composer2;
function start() {
init();
animate();
}
var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", null, start );
this in the composer field, after :
composer.addPass( new THREE.RenderPass( scene, camera ) );
composer2 = new THREE.EffectComposer( renderer );
var uniforms1 = {
sprite1: { type: "t", value: sprite1 }, // <- something wrong here
previousRender: { type: "t", value: null }
};
var blenderShader = {
uniforms: uniforms1,
vertexShader: $( 'vertexShaderBasic' ).textContent,
fragmentShader: $( 'fragmentShaderBlender' ).textContent
};
// link with the previous render
blenderShader.uniforms.previousRender.value = composer.renderTarget2;
// the custom shader
var blenderPass = new THREE.ShaderPass( blenderShader );
blenderPass.renderToScreen = true;
composer2.addPass( blenderPass );
I also coment this, beacause it's not relevent any more :
//effect.renderToScreen = true;
and I add this at the end :
composer2.render();
The link between the two passes work well, but the sprite never appear on the EffectComposer quad that cover the screen...
Thanks and sorry for my english.
EDIT to recap :
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgl - postprocessing</title>
<meta charset="utf-8">
<style>
body {
margin: 0px;
background-color: #000000;
overflow: hidden;
}
</style>
</head>
<body>
<script src="../build/three.min.js"></script>
<script src="js/shaders/CopyShader.js"></script>
<script src="js/shaders/DotScreenShader.js"></script>
<script src="js/shaders/RGBShiftShader.js"></script>
<script src="js/postprocessing/EffectComposer.js"></script>
<script src="js/postprocessing/RenderPass.js"></script>
<script src="js/postprocessing/MaskPass.js"></script>
<script src="js/postprocessing/ShaderPass.js"></script>
<script id="vertexShaderBasic" type="x-shader/x-vertex">
varying vec2 glTexCoord;
void main() {
glTexCoord = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script id="fragmentShaderBlender" type="x-shader/x-fragment">
varying vec2 glTexCoord;
uniform sampler2D sprite1;
uniform sampler2D previousRender;
void main() {
vec3 color = texture2D( previousRender, glTexCoord ).rgb;
color += texture2D( sprite1, glTexCoord ).rgb;
gl_FragColor.rgb = color;
gl_FragColor.a = 1.0;
}
</script>
<script>
var camera, scene, renderer, composer;
var composer2;
var object, light;
function start() {
init();
animate();
}
var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", null, start );
//var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", new THREE.UVMapping(), start ); // change anything
function init() {
renderer = new THREE.WebGLRenderer();
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
//
camera = new THREE.PerspectiveCamera( 70, window.innerWidth / window.innerHeight, 1, 1000 );
camera.position.z = 400;
scene = new THREE.Scene();
scene.fog = new THREE.Fog( 0x000000, 1, 1000 );
object = new THREE.Object3D();
scene.add( object );
var geometry = new THREE.SphereGeometry( 1, 4, 4 );
var material = new THREE.MeshPhongMaterial( { color: 0xffffff, shading: THREE.FlatShading } );
for ( var i = 0; i < 100; i ++ ) {
var mesh = new THREE.Mesh( geometry, material );
mesh.position.set( Math.random() - 0.5, Math.random() - 0.5, Math.random() - 0.5 ).normalize();
mesh.position.multiplyScalar( Math.random() * 400 );
mesh.rotation.set( Math.random() * 2, Math.random() * 2, Math.random() * 2 );
mesh.scale.x = mesh.scale.y = mesh.scale.z = Math.random() * 50;
object.add( mesh );
}
scene.add( new THREE.AmbientLight( 0x222222 ) );
light = new THREE.DirectionalLight( 0xffffff );
light.position.set( 1, 1, 1 );
scene.add( light );
// postprocessing
composer = new THREE.EffectComposer( renderer );
composer.addPass( new THREE.RenderPass( scene, camera ) );
/////////////////////////////////////
/////////////////////////////////////
composer2 = new THREE.EffectComposer( renderer );
var uniforms1 = {
sprite1: { type: "t", value: sprite1 },
previousRender: { type: "t", value: null }
};
//uniforms1.sprite1.value.wrapS = uniforms1.sprite1.value.wrapT = THREE.RepeatWrapping;
var blenderShader = {
uniforms: uniforms1,
vertexShader: $( 'vertexShaderBasic' ).textContent,
fragmentShader: $( 'fragmentShaderBlender' ).textContent
};
blenderShader.uniforms.previousRender.value = composer.renderTarget2;
var blenderPass = new THREE.ShaderPass( blenderShader );
blenderPass.renderToScreen = true;
composer2.addPass( blenderPass );
/////////////////////////////////////
/////////////////////////////////////
var effect = new THREE.ShaderPass( THREE.DotScreenShader );
effect.uniforms[ 'scale' ].value = 4;
composer.addPass( effect );
var effect = new THREE.ShaderPass( THREE.RGBShiftShader );
effect.uniforms[ 'amount' ].value = 0.0015;
//effect.renderToScreen = true;
composer.addPass( effect );
//
window.addEventListener( 'resize', onWindowResize, false );
}
function $( id ) {
return document.getElementById( id );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
requestAnimationFrame( animate );
var time = Date.now();
object.rotation.x += 0.005;
object.rotation.y += 0.01;
composer.render();
composer2.render();
}
</script>
</body>
</html>
What I want :
Good
What I get :
NotGood
I had this issue too, and found a workaround.
I debugged it to determine that the texture ID for the extra texture is different in the shader pass than expected, which is bad. If you look in the ShaderPass constructor, you'll see it clones the uniforms. That seems to be the cause. If I edit that code to restore the original texture object in the cloned uniforms, it works as expected. So that should work for you too.
I'm trying to get some kind of (proper) bug fix integrated into the actual release.
Try this
var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", new THREE.UVMapping(), start );
three.js r.54

Resources