I've just downloaded the latest Three.js master "mrdoob-three.js-d6384d2" , I've modified the "webgl_nearestneighbour.html" to show a transparent image and this is the result: http://i.share.pho.to/8cccac74_l.jpeg
I can't understand if it's by design , if it's a webgl error or if it's a three.js error but, as you can see in the bigger ball, near sprites are clipped while far sprites aren't.
Any information is much appreciated (I'm new to webgl).
edit: here's the code.
<html>
<head>
<meta charset="utf-8">
<title>three.js webgl - nearest neighbour</title>
<style>
html, body {
width: 100%;
height: 100%;
}
body {
background-color: #ffffff;
margin: 0;
overflow: hidden;
font-family: arial;
}
#info {
text-align: center;
padding: 5px;
position: absolute;
width: 100%;
color: white;
}
</style>
</head>
<body>
<div id="info">three.js webgl - typed arrays - nearest neighbour for 500,000 sprites</div>
<script src="../build/three.min.js"></script>
<script src="js/TypedArrayUtils.js"></script>
<script src="js/controls/FirstPersonControls.js"></script>
<script type="x-shader/x-vertex" id="vertexshader">
//uniform float zoom;
attribute float alpha;
varying float vAlpha;
void main() {
vAlpha = 1.0 - alpha;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = 4.0 * ( 300.0 / length( mvPosition.xyz ) );
gl_Position = projectionMatrix * mvPosition;
}
</script>
<script type="x-shader/x-fragment" id="fragmentshader">
uniform sampler2D tex1;
varying float vAlpha;
void main() {
gl_FragColor = texture2D(tex1, gl_PointCoord);
gl_FragColor.r = (1.0 - gl_FragColor.r) * vAlpha + gl_FragColor.r;
}
</script>
<script>
var camera, scene, renderer;
var geometry, material, mesh;
var controls;
var objects = [];
var amountOfParticles = 500000, maxDistance = Math.pow(120, 2);
var positions, alphas, particles, _particleGeom
var clock = new THREE.Clock();
var blocker = document.getElementById( 'blocker' );
var instructions = document.getElementById( 'instructions' );
function init() {
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 1, 1000000);
scene = new THREE.Scene();
controls = new THREE.FirstPersonControls( camera );
controls.movementSpeed = 100;
controls.lookSpeed = 0.1;
var materials = [
new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'textures/cube/skybox/px.jpg' ) } ), // right
new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'textures/cube/skybox/nx.jpg' ) } ), // left
new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'textures/cube/skybox/py.jpg' ) } ), // top
new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'textures/cube/skybox/ny.jpg' ) } ), // bottom
new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'textures/cube/skybox/pz.jpg' ) } ), // back
new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'textures/cube/skybox/nz.jpg' ) } ) // front
];
mesh = new THREE.Mesh( new THREE.BoxGeometry( 10000, 10000, 10000, 7, 7, 7 ), new THREE.MeshFaceMaterial( materials ) );
mesh.scale.x = - 1;
scene.add(mesh);
//
renderer = new THREE.WebGLRenderer(); // Detector.webgl? new THREE.WebGLRenderer(): new THREE.CanvasRenderer()
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
// create the custom shader
var imagePreviewTexture = THREE.ImageUtils.loadTexture( 'textures/football.png');
imagePreviewTexture.minFilter = THREE.LinearMipMapLinearFilter;
imagePreviewTexture.magFilter = THREE.LinearFilter;
pointShaderMaterial = new THREE.ShaderMaterial( {
uniforms: {
tex1: { type: "t", value: imagePreviewTexture },
zoom: { type: 'f', value: 9.0 },
},
attributes: {
alpha: { type: 'f', value: null },
},
vertexShader: document.getElementById( 'vertexshader' ).textContent,
fragmentShader: document.getElementById( 'fragmentshader' ).textContent,
transparent: true
});
//create particles with buffer geometry
var distanceFunction = function(a, b){
return Math.pow(a[0] - b[0], 2) + Math.pow(a[1] - b[1], 2) + Math.pow(a[2] - b[2], 2);
};
positions = new Float32Array( amountOfParticles * 3 );
alphas = new Float32Array( amountOfParticles );
_particleGeom = new THREE.BufferGeometry();
_particleGeom.addAttribute( 'position', new THREE.BufferAttribute( positions, 3 ) );
_particleGeom.addAttribute( 'alpha', new THREE.BufferAttribute( alphas, 1 ) );
particles = new THREE.PointCloud( _particleGeom, pointShaderMaterial );
for (var x = 0; x < amountOfParticles; x++) {
positions[ x * 3 + 0 ] = Math.random() * 1000;
positions[ x * 3 + 1 ] = Math.random() * 1000;
positions[ x * 3 + 2 ] = Math.random() * 1000;
alphas[x] = 1.0;
}
var measureStart = new Date().getTime();
// creating the kdtree takes a lot of time to execute, in turn the nearest neighbour search will be much faster
kdtree = new THREE.TypedArrayUtils.Kdtree( positions, distanceFunction, 3 );
console.log('TIME building kdtree', new Date().getTime() - measureStart);
// display particles after the kd-tree was generated and the sorting of the positions-array is done
scene.add(particles);
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
controls.handleResize();
}
function animate() {
requestAnimationFrame( animate );
//
displayNearest(camera.position);
controls.update( clock.getDelta() )
renderer.render( scene, camera );
}
function displayNearest(position) {
// take the nearest 200 around him. distance^2 'cause we use the manhattan distance and no square is applied in the distance function
var imagePositionsInRange = kdtree.nearest([position.x, position.y, position.z], 100, maxDistance);
// We combine the nearest neighbour with a view frustum. Doesn't make sense if we change the sprites not in our view... well maybe it does. Whatever you want.
var _frustum = new THREE.Frustum();
var _projScreenMatrix = new THREE.Matrix4();
camera.matrixWorldInverse.getInverse( camera.matrixWorld );
_projScreenMatrix.multiplyMatrices( camera.projectionMatrix, camera.matrixWorldInverse );
_frustum.setFromMatrix( _projScreenMatrix );
for ( i = 0, il = imagePositionsInRange.length; i < il; i ++ ) {
var object = imagePositionsInRange[i];
var objectPoint = new THREE.Vector3().fromArray( object[ 0 ].obj );
if (_frustum.containsPoint(objectPoint)){
var objectIndex = object[0].pos;
// set the alpha according to distance
alphas[ objectIndex ] = 1.0 / maxDistance * object[1];
// update the attribute
_particleGeom.attributes.alpha.needsUpdate = true;
}
}
}
init();
animate();
</script>
</body>
</html>
To recap what has been said in the comments, the solution to this problem is to disable alpha blending, and in the shader you have to discard the pixel drawn based on the alpha value of the input texture.
So that this...
void main() {
gl_FragColor = texture2D(tex1, gl_PointCoord);
gl_FragColor.r = (1.0 - gl_FragColor.r) * vAlpha + gl_FragColor.r;
}
...becomes this
void main() {
gl_FragColor = texture2D(tex1, gl_PointCoord);
gl_FragColor.r = (1.0 - gl_FragColor.r) * vAlpha + gl_FragColor.r;
if ( gl_FragColor.a < 0.5 ) discard;
}
Related
the title make it looks easy but I'm struggling to get this pie chart shader on my 3d model. For this I'm using three js. here is my code till now:
index.html:
<!DOCTYPE html>
<html>
<head>
<meta charset='utf-8'>
<meta http-equiv='X-UA-Compatible' content='IE=edge'>
<title>Site Prof Matheus</title>
<meta name='viewport' content='width=device-width, initial-scale=1'>
<link rel='stylesheet' type='text/css' media='screen' href='./styles/main.css'>
<script type='module' src='./src/main.js'></script>
</head>
<body>
</body>
</html>
main.js:
import * as THREE from 'https://cdn.skypack.dev/three'
import { OrbitControls } from 'https://cdn.skypack.dev/three-stdlib/controls/OrbitControls'
import { GLTFLoader } from 'https://cdn.skypack.dev/three-stdlib/loaders/GLTFLoader'
import { vertexShader } from '../shaders/vertex.glsl.js'
import { fragmentShader } from '../shaders/fragment.glsl.js'
const scene = new THREE.Scene();
const camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000);
const loader = new GLTFLoader();
const renderer = new THREE.WebGLRenderer({ antialias: true });
renderer.setClearColor(0x002653, 0.5)
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
renderer.setPixelRatio(window.devicePixelRatio);
const r_material = new THREE.ShaderMaterial({ //Roulette material
uniforms: {
iTime: { value: 1.0 },
resolution: { value: new THREE.Vector2 }
},
vertexShader,
fragmentShader
})
loader.load(
'../roulette.glb',
function (gltf) {
gltf.scene.traverse((o) => {
if (o.isMesh) o.material = r_material;
});
scene.add(gltf.scene);
},
function () { },
function (err) {
console.log('error: ' + err);
}
)
camera.position.z = 5;
const controls = new OrbitControls(camera, renderer.domElement);
controls.enableZoom = true;
controls.enableDamping = true;
function animate() {
requestAnimationFrame(animate);
controls.update();
renderer.render(scene, camera);
};
animate();
vertex.glsl.js:
const vertexShader = /* glsl */`
void main()
{
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
`
// uniform vec3 iResolution;
// uniform int SEGMENTCOUNT;
export { vertexShader };
fragment.glsl.js:
const fragmentShader = /* glsl */`
void main() {
gl_FragColor = vec4(0, 1, 0, 1);
gl_Position
}
`
export { fragmentShader };
and here is my roulette.gbl model
the intended result is to have a shader with colors that I choose, all the parts equal, the colors can repeat and the shader should covers the whole mesh, less the bottom. intended result
PS. i see my object looks a flat plain on the image, i guess just need to add some proper light, here is my object geometry just for sake of curiosity: my mesh geometry
PSS. some antialiasing on the pie chart shader would be very welcome
This is how you can do it, modifying a material with .onBeforeCompile():
body{
overflow: hidden;
margin: 0;
}
<script type="module">
import * as THREE from "https://cdn.skypack.dev/three#0.136.0";
import {
OrbitControls
} from "https://cdn.skypack.dev/three#0.136.0/examples/jsm/controls/OrbitControls";
let scene = new THREE.Scene();
let camera = new THREE.PerspectiveCamera(60, innerWidth / innerHeight, 1, 2000);
camera.position.set(0, 1, 1).setLength(12);
let renderer = new THREE.WebGLRenderer({
antialias: true
});
renderer.setSize(innerWidth, innerHeight);
document.body.appendChild(renderer.domElement);
window.addEventListener("resize", onWindowResize);
let controls = new OrbitControls(camera, renderer.domElement);
scene.add(new THREE.GridHelper());
let light = new THREE.DirectionalLight(0xffffff, 0.5);
light.position.setScalar(1);
scene.add(light, new THREE.AmbientLight(0xffffff, 0.5));
let path = new THREE.Path();
path.moveTo(0, -1);
path.lineTo(4, -1);
path.absarc(4, -0.5, 0.5, Math.PI * 1.5, 0);
path.absarc(4, 0.5, 0.5, 0, Math.PI * 0.5);
path.lineTo(0, -0.5);
let g = new THREE.LatheGeometry(path.getPoints(50), 72);
let m = new THREE.MeshLambertMaterial({
color: 0x00ff7f,
//wireframe: true,
onBeforeCompile: shader => {
shader.vertexShader = `
varying vec3 vPos;
${shader.vertexShader}
`.replace(
`#include <begin_vertex>`,
`#include <begin_vertex>
vPos = position;
`
);
//console.log(shader.vertexShader);
shader.fragmentShader = `
#define ss(a, b, c) smoothstep(a, b, c)
varying vec3 vPos;
${shader.fragmentShader}
`.replace(
`vec4 diffuseColor = vec4( diffuse, opacity );`,
`
vec3 col = diffuse;
int N = 37;
float a = atan(vPos.x,-vPos.z)+PI;
float r = PI2/float(N);
float cId = floor(a/r);
vec3 br = mod(cId, 2.) == 0. ? vec3(0) : vec3(1, 0, 0); // black / red
br = cId == 0. ? vec3(0, 0.75, 0) : br; // green
float d = length(vPos.xz);
float fw = length(fwidth(vPos.xz));
col = mix(col, br, ss(3. - fw, 3., d) - ss(4., 4. + fw, d));
col = mix(diffuse, col, clamp(sign(vPos.y), 0., 1.));
vec4 diffuseColor = vec4( col, opacity );
`
);
//console.log(shader.fragmentShader);
}
})
let o = new THREE.Mesh(g, m);
o.position.y = 0.5;
o.rotation.y = Math.PI;
scene.add(o);
renderer.setAnimationLoop(() => {
renderer.render(scene, camera);
});
function onWindowResize() {
camera.aspect = innerWidth / innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(innerWidth, innerHeight);
}
</script>
I'm trying to apply a texture on a very large plane (1000x1000 which is also scaled 10 times) by using RepeatWrapping. It looks good when I use MeshBasicMaterial but it flickers when I use ShaderMaterial. Below is My Code.
<!DOCTYPE html>
<html>
<head>
<title>MeshShaderMaterialExample</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0, shrink-to-fit=no">
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/99/three.min.js"></script>
<script src="https://unpkg.com/three#0.85.0/examples/js/controls/TrackballControls.js"></script>
<script src="js/TWEEN.js"></script>
<style type="text/css">
body {
width: 100%;
height: 100%;
background-color: #000;
color: #fff;
margin: 0px;
padding: 0;
overflow: hidden;
}
</style>
</head>
<body>
<script>
var camera, scene, renderer;
var container, mesh, geometry;
var controls, effect;
var tweenUpdate="false";
var tweenOver="true";
var textureData=
{
"texture_0":
{
"img":"gman.png"
},
"texture_1":
{
"img":"gman.png"
}}
var magicPosition = { magicTrans:0 };
var magicTarget = { magicTrans:1 };
var magicTween = new TWEEN.Tween(magicPosition).to(magicTarget, 1000);
magicTween.easing(TWEEN.Easing.Linear.None);
var currentTexture=0;
var nextTexture=0;
var uniforms = {
textures: {
value: []
},
repeat: {
type: 'f',
value: 100
},
transition: {
value: 0
},
currentUniform: {
value: 0
},
nextUniform: {
value: 0
}
};
var textureLoader = new THREE.TextureLoader();
var pics=[];
for (var i = 0; i < Object.keys(textureData).length; i++) {
var ass="texture_"+i;
pics[i]= textureData[ass].img;
console.log(pics[i]);
}
pics.forEach((p, idx)=>{
textureLoader.load(p, function(tex){
tex.needsUpdate = true;
uniforms.textures.value[idx] = tex;
uniforms.textures.value[idx].needsUpdate = true;
// console.log(tex);
uniforms.textures.value[idx].minFilter = THREE.LinearFilter;
})
});
var vertShader = `
varying vec2 vUv;
uniform float repeat;
void main()
{
vUv = repeat * uv;
vec4 mvPosition = modelViewMatrix * vec4(position, 1 );
gl_Position = projectionMatrix * mvPosition;
}
`;
var fragShader = `
uniform sampler2D textures[` + pics.length + `];
uniform float transition;
uniform float currentUniform;
uniform float nextUniform;
varying vec2 vUv;
vec4 getTexture(int index){
for(int i = 0; i < ` + pics.length + `; i++){
if (i == index){ return texture2D(textures[i],vUv); }
}
}
void main()
{
float chunk = 1. / ` + 1 + `.; // amount of transitions = 1
float t = floor(transition / chunk);
int idx0 = int(currentUniform);
int idx1 = int(nextUniform);
gl_FragColor = mix(
getTexture(idx0),
getTexture(idx1),
(transition - (float(t) * chunk)) * ` + 1 + `.
);
}
`;
window.onload=function()
{
init();
animate();
}
function init(){
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild(renderer.domElement);
camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 1, 10000 );
controls = new THREE.TrackballControls( camera,renderer.domElement );
camera.position.z = 500;
console.log(camera.fov);
scene = new THREE.Scene();
scene.add(camera);
var magicPlaneMaterial = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: vertShader,
fragmentShader: fragShader,
side: THREE.DoubleSide
});
for (var i = 0; i < Object.keys(textureData).length; i++) {
uniforms.textures.value[i].wrapS = uniforms.textures.value[i].wrapT = THREE.RepeatWrapping;
uniforms.textures.value[i].needsUpdate = true;
}
// for (var i = 0; i < Object.keys(textureData).length; i++) {
// uniforms.textures.value[i].wrapS = uniforms.textures.value[i].wrapT = THREE.RepeatWrapping;
// uniforms.textures.value[i].needsUpdate = true;
// }
var magicPlaneGeometry = new THREE.PlaneBufferGeometry(1000, 1000, 16, 16);
var magicPlaneMesh = new THREE.Mesh(magicPlaneGeometry, magicPlaneMaterial);
magicPlaneMesh.position.y = -500;
magicPlaneMesh.rotation.x = Math.PI / 2;
magicPlaneMesh.scale.x=10;
magicPlaneMesh.scale.y=10;
scene.add(magicPlaneMesh);
changeMagicPlane(currentTexture);
document.addEventListener( 'wheel', onDocumentMouseWheel, false );
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
function onDocumentMouseWheel( event ) {
var fov = camera.fov + event.deltaY * 0.05;
camera.fov = THREE.Math.clamp( fov, 10, 75 );
console.log(camera.fov);
camera.updateProjectionMatrix();
}
function animate() {
if(tweenUpdate=="true")
{
TWEEN.update();
}
renderer.render( scene, camera );
controls.update();
requestAnimationFrame( animate );
}
function changeMagicPlane(asset){
var assNum= parseInt(asset);
nextTexture = assNum;
uniforms.nextUniform.value = nextTexture;
console.log("Cuurent: "+currentTexture);
console.log("Next: "+nextTexture);
magicTween.start();
tweenUpdate="true";
tweenOver="false";
}
magicTween.onUpdate(function(){
uniforms.transition.value = magicPosition.magicTrans;
});
magicTween.onComplete(function(){
tweenUpdate="false";
tweenOver="true";
clicked="false";
//console.log("Am i complete?");
magicPosition.magicTrans=0;
currentTexture=nextTexture;
uniforms.currentUniform.value = currentTexture;
console.log("Current: "+currentTexture);
});
</script>
</body>
</html>
I'm trying to use ShaderMaterial for crossfading effect. My texture image is 256*256 pixels.
Working snippet. Tween.js is used from here (http://learningthreejs.com/blog/2011/08/17/tweenjs-for-smooth-animation/). gman.png is from here (https://i.imgur.com/ZKMnXce.png)
You've disabled the trilinear texture filtering (mipmaps), by setting the texture minifying function (.minFilter) to the value THREE.LinearFilter:
uniforms.textures.value[idx].minFilter = THREE.LinearFilter;
This causes Moire effects.
Activate the trilinear texture filtering by THREE.LinearMipMapLinearFilter (this is default):
uniforms.textures.value[idx].minFilter = THREE.LinearMipMapLinearFilter;
Anyway your (fragment) shader code has undefined behavior and the mip-mapping won't work:
vec4 getTexture(int index){
for(int i = 0; i < ` + pics.length + `; i++){
if (i == index){ return texture2D(textures[i],vUv); }
}
}
void main()
{
// ....
gl_FragColor = mix(
getTexture(idx0),
getTexture(idx1),
(transition - (float(t) * chunk)) * ` + 1 + `.
);
See OpenGL ES Shading Language 1.00 Specification - 13 Acknowledgements; page 107:
5 Indexing of Arrays, Vectors and Matrices
[...]
Samplers
GLSL ES 1.00 supports both arrays of samplers and arrays of structures which contain samplers. In both these cases, for ES 2.0, support for indexing with a constant-index-expression is mandated but support for indexing with other values is not mandated.
[...]
6 Texture Accesses
Accessing mip-mapped textures within the body of a non-uniform conditional block gives an undefined value. A non-uniform conditional block is a block whose execution cannot be determined at compile time.
Do the texture lookup in the block scope of main and use a constant-index-expression, for the index of the texture sampler array:
e.g.
float a = transition - float(t) * chunk;
gl_FragColor = mix(texture2D(textures[0], vUv), texture2D(textures[1], vUv), a);
If you are familiar with the examples for three.js - there is an an example called webgl ocean demo
I can't see the actual materials when I run it locally or when I change the paths to the local resources. Has anyone else resolved this problem? I want to experiment with a different image instead of the globe that is floating in the ocean.
Here is the code:
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgl - geometry - terrain</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
color: #000;
font-family:Monospace;
font-size:13px;
margin: 0px;
overflow: hidden;
}
#info {
position: absolute;
top: 0px; width: 100%;
text-align:center;
padding: 5px;
}
a {
color: #a06851;
}
</style>
</head>
<body>
<div id="info">three.js - webgl ocean demo</div>
<script src="../build/three.min.js"></script>
<script src="js/controls/OrbitControls.js"></script>
<script src="js/Mirror.js"></script>
<script src="js/WaterShader.js"></script>
<script src="js/Detector.js"></script>
<script src="js/libs/stats.min.js"></script>
<script>
if ( ! Detector.webgl ) {
Detector.addGetWebGLMessage();
document.getElementById( 'container' ).innerHTML = "";
}
var container, stats;
var camera, scene, renderer;
var sphere;
var parameters = {
width: 2000,
height: 2000,
widthSegments: 250,
heightSegments: 250,
depth: 1500,
param: 4,
filterparam: 1
};
var waterNormals;
init();
animate();
function init() {
container = document.createElement( 'div' );
document.body.appendChild( container );
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
container.appendChild( renderer.domElement );
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera( 55, window.innerWidth / window.innerHeight, 0.5, 3000000 );
camera.position.set( 2000, 750, 2000 );
controls = new THREE.OrbitControls( camera, renderer.domElement );
controls.enablePan = false;
controls.minDistance = 1000.0;
controls.maxDistance = 5000.0;
controls.maxPolarAngle = Math.PI * 0.495;
controls.center.set( 0, 500, 0 );
scene.add( new THREE.AmbientLight( 0x444444 ) );
var light = new THREE.DirectionalLight( 0xffffbb, 1 );
light.position.set( - 1, 1, - 1 );
scene.add( light );
waterNormals = new THREE.ImageUtils.loadTexture( 'textures/waternormals.jpg' );
waterNormals.wrapS = waterNormals.wrapT = THREE.RepeatWrapping;
water = new THREE.Water( renderer, camera, scene, {
textureWidth: 512,
textureHeight: 512,
waterNormals: waterNormals,
alpha: 1.0,
sunDirection: light.position.clone().normalize(),
sunColor: 0xffffff,
waterColor: 0x001e0f,
distortionScale: 50.0,
} );
mirrorMesh = new THREE.Mesh(
new THREE.PlaneBufferGeometry( parameters.width * 500, parameters.height * 500 ),
water.material
);
mirrorMesh.add( water );
mirrorMesh.rotation.x = - Math.PI * 0.5;
scene.add( mirrorMesh );
// load skybox
var cubeMap = new THREE.CubeTexture( [] );
cubeMap.format = THREE.RGBFormat;
var loader = new THREE.ImageLoader();
loader.load( 'textures/skyboxsun25degtest.png', function ( image ) {
var getSide = function ( x, y ) {
var size = 1024;
var canvas = document.createElement( 'canvas' );
canvas.width = size;
canvas.height = size;
var context = canvas.getContext( '2d' );
context.drawImage( image, - x * size, - y * size );
return canvas;
};
cubeMap.images[ 0 ] = getSide( 2, 1 ); // px
cubeMap.images[ 1 ] = getSide( 0, 1 ); // nx
cubeMap.images[ 2 ] = getSide( 1, 0 ); // py
cubeMap.images[ 3 ] = getSide( 1, 2 ); // ny
cubeMap.images[ 4 ] = getSide( 1, 1 ); // pz
cubeMap.images[ 5 ] = getSide( 3, 1 ); // nz
cubeMap.needsUpdate = true;
} );
var cubeShader = THREE.ShaderLib[ 'cube' ];
cubeShader.uniforms[ 'tCube' ].value = cubeMap;
var skyBoxMaterial = new THREE.ShaderMaterial( {
fragmentShader: cubeShader.fragmentShader,
vertexShader: cubeShader.vertexShader,
uniforms: cubeShader.uniforms,
depthWrite: false,
side: THREE.BackSide
} );
var skyBox = new THREE.Mesh(
new THREE.BoxGeometry( 1000000, 1000000, 1000000 ),
skyBoxMaterial
);
scene.add( skyBox );
var geometry = new THREE.IcosahedronGeometry( 400, 4 );
for ( var i = 0, j = geometry.faces.length; i < j; i ++ ) {
geometry.faces[ i ].color.setHex( Math.random() * 0xffffff );
}
var material = new THREE.MeshPhongMaterial( {
vertexColors: THREE.FaceColors,
shininess: 100,
envMap: cubeMap
} );
sphere = new THREE.Mesh( geometry, material );
scene.add( sphere );
}
//
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
var time = performance.now() * 0.001;
sphere.position.y = Math.sin( time ) * 500 + 250;
sphere.rotation.x = time * 0.5;
sphere.rotation.z = time * 0.51;
water.material.uniforms.time.value += 1.0 / 60.0;
controls.update();
water.render();
renderer.render( scene, camera );
}
</script>
</body>
</html>
Here is my code - just changed the relative paths
<!DOCTYPE html>
<html lang="en">
<head>
<title>Test</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
color: #000;
font-family:Monospace;
font-size:13px;
margin: 0px;
overflow: hidden;
}
#info {
position: absolute;
top: 0px; width: 100%;
text-align:center;
padding: 5px;
}
a {
color: #a06851;
}
</style>
</head>
<body>
<div id="info">AA ocean demo</div>
<script src="js/three.min.js"></script>
<script src="js/controls/OrbitControls.js"></script>
<script src="js/Mirror.js"></script>
<script src="js/WaterShader.js"></script>
<script src="js/Detector.js"></script>
<script src="js/libs/stats.min.js"></script>
<script>
if ( ! Detector.webgl ) {
Detector.addGetWebGLMessage();
document.getElementById( 'container' ).innerHTML = "";
}
var container, stats;
var camera, scene, renderer;
var sphere;
var parameters = {
width: 2000,
height: 2000,
widthSegments: 250,
heightSegments: 250,
depth: 1500,
param: 4,
filterparam: 1
};
var waterNormals;
init();
animate();
function init() {
container = document.createElement( 'div' );
document.body.appendChild( container );
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
container.appendChild( renderer.domElement );
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera( 55, window.innerWidth / window.innerHeight, 0.5, 3000000 );
camera.position.set( 2000, 750, 2000 );
controls = new THREE.OrbitControls( camera, renderer.domElement );
controls.enablePan = false;
controls.minDistance = 1000.0;
controls.maxDistance = 5000.0;
controls.maxPolarAngle = Math.PI * 0.495;
controls.center.set( 0, 500, 0 );
scene.add( new THREE.AmbientLight( 0x444444 ) );
var light = new THREE.DirectionalLight( 0xffffbb, 1 );
light.position.set( - 1, 1, - 1 );
scene.add( light );
waterNormals = new THREE.ImageUtils.loadTexture( 'js/waternormals.jpg' );
waterNormals.wrapS = waterNormals.wrapT = THREE.RepeatWrapping;
water = new THREE.Water( renderer, camera, scene, {
textureWidth: 512,
textureHeight: 512,
waterNormals: waterNormals,
alpha: 1.0,
sunDirection: light.position.clone().normalize(),
sunColor: 0xffffff,
waterColor: 0x001e0f,
distortionScale: 50.0,
} );
mirrorMesh = new THREE.Mesh(
new THREE.PlaneBufferGeometry( parameters.width * 500, parameters.height * 500 ),
water.material
);
mirrorMesh.add( water );
mirrorMesh.rotation.x = - Math.PI * 0.5;
scene.add( mirrorMesh );
// load skybox
var cubeMap = new THREE.CubeTexture( [] );
cubeMap.format = THREE.RGBFormat;
var loader = new THREE.ImageLoader();
loader.load( 'js/skyboxsun25degtest.png', function ( image ) {
var getSide = function ( x, y ) {
var size = 1024;
var canvas = document.createElement( 'canvas' );
canvas.width = size;
canvas.height = size;
var context = canvas.getContext( '2d' );
context.drawImage( image, - x * size, - y * size );
return canvas;
};
cubeMap.images[ 0 ] = getSide( 2, 1 ); // px
cubeMap.images[ 1 ] = getSide( 0, 1 ); // nx
cubeMap.images[ 2 ] = getSide( 1, 0 ); // py
cubeMap.images[ 3 ] = getSide( 1, 2 ); // ny
cubeMap.images[ 4 ] = getSide( 1, 1 ); // pz
cubeMap.images[ 5 ] = getSide( 3, 1 ); // nz
cubeMap.needsUpdate = true;
} );
var cubeShader = THREE.ShaderLib[ 'cube' ];
cubeShader.uniforms[ 'tCube' ].value = cubeMap;
var skyBoxMaterial = new THREE.ShaderMaterial( {
fragmentShader: cubeShader.fragmentShader,
vertexShader: cubeShader.vertexShader,
uniforms: cubeShader.uniforms,
depthWrite: false,
side: THREE.BackSide
} );
var skyBox = new THREE.Mesh(
new THREE.BoxGeometry( 1000000, 1000000, 1000000 ),
skyBoxMaterial
);
scene.add( skyBox );
var geometry = new THREE.IcosahedronGeometry( 400, 4 );
for ( var i = 0, j = geometry.faces.length; i < j; i ++ ) {
geometry.faces[ i ].color.setHex( Math.random() * 0xffffff );
}
var material = new THREE.MeshPhongMaterial( {
vertexColors: THREE.FaceColors,
shininess: 100,
envMap: cubeMap
} );
sphere = new THREE.Mesh( geometry, material );
scene.add( sphere );
}
//
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
var time = performance.now() * 0.001;
sphere.position.y = Math.sin( time ) * 500 + 250;
sphere.rotation.x = time * 0.5;
sphere.rotation.z = time * 0.51;
water.material.uniforms.time.value += 1.0 / 60.0;
controls.update();
water.render();
renderer.render( scene, camera );
}
</script>
</body>
</html>
If you check your browser's console, you'll see error messages regarding the local access of files: by default, modern browsers do not allow this. This problem isn't three.js related at all, it is a common browser security policy.
Basically, there are two workarounds:
run a local server or put your code online
modify the browser settings to allow local file access.
As a lot of people trying their hands on three.js run into this issue when loading textures, there is a dedicated Three.js Wiki page on the topic.
I'm rendering a picking scene that contains sprites. As my cursor gets close to the sprite, it registers as a color and gets "picked". This invisible border gets larger closer you zoom into the sprites.
Open up your console to see the IDs printed in real time. Move your cursor closer and further away to large and small sprites. You'll see that sprites get selected on an invisible border. This behavior does not happen with regular geometry, just with sprites.
It's weird because I'm rendering out what renderer.readRenderTargetPixels actually sees.
How can I get rid of the invisible borders for more accurate picking?
var renderer, scene, camera, controls;
var particles, uniforms;
var PARTICLE_SIZE = 50;
var raycaster, intersects;
var mouse, INTERSECTED;
var pickingTexture;
var numOfVertices;
init();
animate();
function init() {
container = document.getElementById('container');
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 1, 10000);
camera.position.z = 150;
//
var geometry1 = new THREE.BoxGeometry(200, 200, 200, 4, 4, 4);
var vertices = geometry1.vertices;
numOfVertices = vertices.length;
var positions = new Float32Array(vertices.length * 3);
var colors = new Float32Array(vertices.length * 3);
var sizes = new Float32Array(vertices.length);
var vertex;
var color = new THREE.Color();
for (var i = 0, l = vertices.length; i < l; i++) {
vertex = vertices[i];
vertex.toArray(positions, i * 3);
color.setHex(i + 1);
color.toArray(colors, i * 3);
sizes[i] = PARTICLE_SIZE * 0.5;
}
var geometry = new THREE.BufferGeometry();
geometry.addAttribute('position', new THREE.BufferAttribute(positions, 3));
geometry.addAttribute('customColor', new THREE.BufferAttribute(colors, 3));
geometry.addAttribute('size', new THREE.BufferAttribute(sizes, 1));
//
var material = new THREE.ShaderMaterial({
uniforms: {
// texture: {type: "t", value: THREE.ImageUtils.loadTexture("../textures/circle.png")}
texture: {type: "t", value: THREE.ImageUtils.loadTexture("../textures/disc.png")}
},
vertexShader: document.getElementById('vertexshader').textContent,
fragmentShader: document.getElementById('fragmentshader').textContent,
depthTest: false,
transparent: false
// alphaTest: 0.9
});
//
particles = new THREE.Points(geometry, material);
scene.add(particles);
//
renderer = new THREE.WebGLRenderer({
antialias: true,
alpha: true
});
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setClearColor(0xffffff);
container.appendChild(renderer.domElement);
//
raycaster = new THREE.Raycaster();
mouse = new THREE.Vector2();
//
//
window.addEventListener('resize', onWindowResize, false);
document.addEventListener('mousemove', onDocumentMouseMove, false);
// defaults are on the right (except minFilter)
var options = {
format: THREE.RGBAFormat, // THREE.RGBAFormat
type: THREE.UnsignedByteType, // THREE.UnsignedByteType
anisotropy: 1, // 1
magFilter: THREE.LinearFilter, // THREE.LinearFilter
minFilter: THREE.LinearFilter, // THREE.LinearFilter
depthBuffer: true, // true
stencilBuffer: true // true
};
pickingTexture = new THREE.WebGLRenderTarget(window.innerWidth, window.innerHeight, options);
pickingTexture.texture.generateMipmaps = false;
controls = new THREE.OrbitControls(camera, container);
controls.damping = 0.2;
controls.enableDamping = false;
}
function onDocumentMouseMove(e) {
// event.preventDefault();
mouse.x = e.clientX;
mouse.y = e.clientY;
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
function animate() {
requestAnimationFrame(animate);
controls.update();
render();
}
function render() {
pick();
renderer.render(scene, camera);
}
function pick() {
renderer.render(scene, camera, pickingTexture);
//create buffer for reading single pixel
var pixelBuffer = new Uint8Array(4);
//read the pixel under the mouse from the texture
renderer.readRenderTargetPixels(pickingTexture, mouse.x, pickingTexture.height - mouse.y, 1, 1, pixelBuffer);
//interpret the pixel as an ID
var id = ( pixelBuffer[0] << 16 ) | ( pixelBuffer[1] << 8 ) | ( pixelBuffer[2] );
if (id <= numOfVertices) console.log(id);
}
body {
color: #ffffff;
background-color: #000000;
margin: 0px;
overflow: hidden;
}
<script src="http://threejs.org/build/three.min.js"></script>
<script src="http://threejs.org/examples/js/controls/OrbitControls.js"></script>
<script type="x-shader/x-fragment" id="fragmentshader">
uniform sampler2D texture;
varying vec3 vColor;
void main() {
// solid squares of color
gl_FragColor = vec4( vColor, 1.0 );
}
</script>
<script type="x-shader/x-vertex" id="vertexshader">
attribute float size;
attribute vec3 customColor;
varying vec3 vColor;
void main() {
vColor = customColor;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = size * ( 300.0 / length( mvPosition.xyz ) );
gl_Position = projectionMatrix * mvPosition;
}
</script>
<div id="container"></div>
The problem is you're on a device that has a devicePixelRatio != 1.0 and three.js lying about the size.
Because you called renderer.setPixelRatio now magic happens behind the scenes. Your canvas is not the size you requested it's some other size based on some formula hidden in the three.js code.
So, what happens. Your canvas is one size but your render target is a different size. Your shader uses gl_PointSize to draw its points. That size is in device pixels. Because your render target is a different size the size of the points are different in your render target than they are on screen.
Remove the call to render.setPixelRatio and it will start working.
IMO the correct way to fix this is to use devicePixelRatio yourself because that way everything that is happening is 100% visible to you. No magic happening behind the scenes.
So,
Get rid of the container and use a canvas directly
<canvas id="c"></canvas>
set the canvas to use 100vw for width, 100vh for height and made the body margin: 0;
canvas { width: 100vw; height: 100vh; display: block; }
body { margin: 0; }
This will make your canvas stretch automatically to fill the window.
Use the size the browser stretched the canvas to choose the size its drawingBuffer should be and multiply by devicePixelRatio. That assumes you actually want to support device pixel ratio. No need to do this twice so following D.R.Y. so just do it in onWindowResize.
canvas = document.getElementById("c");
renderer = new THREE.WebGLRenderer({
antialias: true,
alpha: true,
canvas: canvas,
});
pickingTexture = new THREE.WebGLRenderTarget(1, 1, options);
onWindowResize();
...
function onWindowResize() {
var width = canvas.clientWidth * window.devicePixelRatio;
var height = canvas.clientHeight * window.devicePixelRatio;
camera.aspect = width / height;
camera.updateProjectionMatrix();
renderer.setSize(width, height, false); // YOU MUST PASS FALSE HERE otherwise three.js will muck with the CSS
pickingTexture.setSize(width, height);
}
Convert the mouse coordinates into device coordinates
renderer.readRenderTargetPixels(
pickingTexture,
mouse.x * window.devicePixelRatio,
pickingTexture.height - mouse.y * window.devicePixelRatio,
1, 1, pixelBuffer);
Here's that solution
var renderer, scene, camera, controls;
var particles, uniforms;
var PARTICLE_SIZE = 50;
var raycaster, intersects;
var mouse, INTERSECTED;
var pickingTexture;
var numOfVertices;
var info = document.querySelector('#info');
init();
animate();
function init() {
canvas = document.getElementById('c');
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(45, 1, 1, 10000);
camera.position.z = 150;
//
var geometry1 = new THREE.BoxGeometry(200, 200, 200, 4, 4, 4);
var vertices = geometry1.vertices;
numOfVertices = vertices.length;
var positions = new Float32Array(vertices.length * 3);
var colors = new Float32Array(vertices.length * 3);
var sizes = new Float32Array(vertices.length);
var vertex;
var color = new THREE.Color();
for (var i = 0, l = vertices.length; i < l; i++) {
vertex = vertices[i];
vertex.toArray(positions, i * 3);
color.setHex(i + 1);
color.toArray(colors, i * 3);
sizes[i] = PARTICLE_SIZE * 0.5;
}
var geometry = new THREE.BufferGeometry();
geometry.setAttribute('position', new THREE.BufferAttribute(positions, 3));
geometry.setAttribute('customColor', new THREE.BufferAttribute(colors, 3));
geometry.setAttribute('size', new THREE.BufferAttribute(sizes, 1));
//
var loader = new THREE.TextureLoader();
var material = new THREE.ShaderMaterial({
uniforms: {
// texture: {type: "t", value: THREE.ImageUtils.loadTexture("../textures/circle.png")}
texture: {value: loader.load("https://i.imgur.com/iXT97XR.png")}
},
vertexShader: document.getElementById('vertexshader').textContent,
fragmentShader: document.getElementById('fragmentshader').textContent,
depthTest: false,
transparent: false
// alphaTest: 0.9
});
//
particles = new THREE.Points(geometry, material);
scene.add(particles);
//
renderer = new THREE.WebGLRenderer({
antialias: true,
alpha: true,
canvas: canvas,
});
renderer.setClearColor(0xffffff);
//
raycaster = new THREE.Raycaster();
mouse = new THREE.Vector2();
//
//
window.addEventListener('resize', onWindowResize, false);
document.addEventListener('mousemove', onDocumentMouseMove, false);
// defaults are on the right (except minFilter)
var options = {
format: THREE.RGBAFormat, // THREE.RGBAFormat
type: THREE.UnsignedByteType, // THREE.UnsignedByteType
anisotropy: 1, // 1
magFilter: THREE.LinearFilter, // THREE.LinearFilter
minFilter: THREE.LinearFilter, // THREE.LinearFilter
depthBuffer: true, // true
stencilBuffer: true // true
};
pickingTexture = new THREE.WebGLRenderTarget(1, 1, options);
pickingTexture.texture.generateMipmaps = false;
controls = new THREE.OrbitControls(camera, canvas);
controls.damping = 0.2;
controls.enableDamping = false;
onWindowResize();
}
function onDocumentMouseMove(e) {
// event.preventDefault();
mouse.x = e.clientX;
mouse.y = e.clientY;
}
function onWindowResize() {
var width = canvas.clientWidth * window.devicePixelRatio;
var height = canvas.clientHeight * window.devicePixelRatio;
camera.aspect = width / height;
camera.updateProjectionMatrix();
renderer.setSize(width, height, false); // YOU MUST PASS FALSE HERE!
pickingTexture.setSize(width, height);
}
function animate() {
requestAnimationFrame(animate);
controls.update();
render();
}
function render() {
pick();
renderer.render(scene, camera);
}
function pick() {
renderer.setRenderTarget(pickingTexture);
renderer.setClearColor(0);
renderer.render(scene, camera);
renderer.setClearColor(0xFFFFFF);
renderer.setRenderTarget(null)
//create buffer for reading single pixel
var pixelBuffer = new Uint8Array(4);
//read the pixel under the mouse from the texture
renderer.readRenderTargetPixels(pickingTexture, mouse.x * window.devicePixelRatio, pickingTexture.height - mouse.y * window.devicePixelRatio, 1, 1, pixelBuffer);
//interpret the pixel as an ID
var id = ( pixelBuffer[0] << 16 ) | ( pixelBuffer[1] << 8 ) | ( pixelBuffer[2] );
//if (id > 0) console.log(id);
info.textContent = id;
}
body {
color: #ffffff;
background-color: #000000;
margin: 0;
}
canvas { width: 100vw; height: 100vh; display: block; }
#info { position: absolute; left: 0; top: 0; color: red; background: black; padding: 0.5em; font-family: monospace; }
<script src="https://threejs.org/build/three.min.js"></script>
<script src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
<script type="x-shader/x-fragment" id="fragmentshader">
uniform sampler2D texture;
varying vec3 vColor;
void main() {
// solid squares of color
gl_FragColor = vec4( vColor, 1.0 );
}
</script>
<script type="x-shader/x-vertex" id="vertexshader">
attribute float size;
attribute vec3 customColor;
varying vec3 vColor;
void main() {
vColor = customColor;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = size * ( 300.0 / length( mvPosition.xyz ) );
gl_Position = projectionMatrix * mvPosition;
}
</script>
<canvas id="c"></canvas>
<div id="info"></div>
Note a few other things.
I'd guess you really want to clear the picking texture to zero instead of white. That way 0 = nothing there, anything else = something there.
renderer.setClearColor(0);
renderer.render(scene, camera, pickingTexture);
renderer.setClearColor(0xFFFFFF);
No idea what the id <= numOfVertices means
So given that it's clearing to zero now the code is just
if (id) console.log(id);
I don't set the renderer size, the pickingTexture size nor the camera aspect at init time.
Why repeat myself. onWindowResize already sets it
You need to resize the pickingTexture render target when the canvas is resizes so it matches in size.
I removed most references to window.innerWidth and window.innerHeight
I would have removed all of them but I didn't want to change even more code for this example. Using window.innerWidth ties the code to the window. If you ever want to use the code in something that's not the fullsize of the window, for example lets say you make an editor. You'll have to change the code.
It's not any harder to write the code in a way that works in more situations so why make more work for yourself later.
Other solutions I didn't chose
You could call render.setPixelRatio and then set the pickingTexture render target's size with window.devicePixelRatio
I didn't pick this solution because you have to guess what three.js is doing behind the scenes. Your guess might be correct today but wrong tomorrow. It seems better if you tell three.js make something width by height it should just make it width by height and not make it something else. Similarly you'd have to guess when three.js is going to apply pixelRatio and when it's not. As you noticed above it doesn't apply it to the size of the render target and it can't because it doesn't know what your purpose is. Are you making a render target for picking? For a fullscreen effect? For capture? for a non-fullscreen effect? Since it can't know it can't apply the pixelRatio for you. This happens all over the three.js code. Some places it applies pixelRatio, other places it doesn't. You're left guessing. If you never set pixelRatio that problem disappears.
You could pass in devicePixelRatio into your shader
<script type="x-shader/x-vertex" id="vertexshader">
attribute float size;
attribute vec3 customColor;
varying vec3 vColor;
uniform float devicePixelRatio; // added
void main() {
vColor = customColor;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = size * ( 300.0 / length( mvPosition.xyz ) ) * devicePixelRatio;
gl_Position = projectionMatrix * mvPosition;
}
</script>
and of course you'd need to set devicePixelRatio in your uniforms.
I might pick this solution. The minor problem is if the pickingTexture is not the same resolution as the canvas's backbuffer you can get off by 1 errors. In this case if the canvas was 2x the pickingTexture then 3 of every 4 pixels in the canvas don't exist in the pickingTexture. Depending on your application that might be ok though. You can't pick 1/2 pixels, at least not with the mouse.
Another other reason I would probably not pick this solution is it just leaves the issue to pop up other places. lineWidth is one, gl_FragCoord is another. So are the viewport and scissor settings. It seems better to make the render target size match that canvas so that everything is the same rather than make more and more workarounds and have to remember where to use one size vs another. Tomorrow I start using the PointsMaterial. It also has issues with devicePixelRatio. By not calling renderer.setPixelRatio those problems go away.
I'm currently playing with this great library, but I have some difficulties with the EffectComposer.
I can't send a texture to a postprocess shader introduced by a ShaderPass.
I think this is a bug... or I'm doing something stupid (needUpDate, warp,... ??)
(r54, W7, Nv9700mGT, FF 17.0.1 and Chrome 24.0.1312.52)
I used the "webgl_postprocessing.html" example to reproduce the phenomenon just by adding these shaders :
<script id="vertexShaderBasic" type="x-shader/x-vertex">
varying vec2 glTexCoord;
void main() {
glTexCoord = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script id="fragmentShaderBlender" type="x-shader/x-fragment">
varying vec2 glTexCoord;
uniform sampler2D sprite1;
uniform sampler2D previousRender;
void main() {
vec3 color = texture2D( previousRender, glTexCoord ).rgb;
color += texture2D( sprite1, glTexCoord ).rgb;
gl_FragColor.rgb = color;
gl_FragColor.a = 1.0;
}
</script>
this at the begining of the main script to be sure the sprite is loaded :
var composer2;
function start() {
init();
animate();
}
var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", null, start );
this in the composer field, after :
composer.addPass( new THREE.RenderPass( scene, camera ) );
composer2 = new THREE.EffectComposer( renderer );
var uniforms1 = {
sprite1: { type: "t", value: sprite1 }, // <- something wrong here
previousRender: { type: "t", value: null }
};
var blenderShader = {
uniforms: uniforms1,
vertexShader: $( 'vertexShaderBasic' ).textContent,
fragmentShader: $( 'fragmentShaderBlender' ).textContent
};
// link with the previous render
blenderShader.uniforms.previousRender.value = composer.renderTarget2;
// the custom shader
var blenderPass = new THREE.ShaderPass( blenderShader );
blenderPass.renderToScreen = true;
composer2.addPass( blenderPass );
I also coment this, beacause it's not relevent any more :
//effect.renderToScreen = true;
and I add this at the end :
composer2.render();
The link between the two passes work well, but the sprite never appear on the EffectComposer quad that cover the screen...
Thanks and sorry for my english.
EDIT to recap :
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgl - postprocessing</title>
<meta charset="utf-8">
<style>
body {
margin: 0px;
background-color: #000000;
overflow: hidden;
}
</style>
</head>
<body>
<script src="../build/three.min.js"></script>
<script src="js/shaders/CopyShader.js"></script>
<script src="js/shaders/DotScreenShader.js"></script>
<script src="js/shaders/RGBShiftShader.js"></script>
<script src="js/postprocessing/EffectComposer.js"></script>
<script src="js/postprocessing/RenderPass.js"></script>
<script src="js/postprocessing/MaskPass.js"></script>
<script src="js/postprocessing/ShaderPass.js"></script>
<script id="vertexShaderBasic" type="x-shader/x-vertex">
varying vec2 glTexCoord;
void main() {
glTexCoord = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script id="fragmentShaderBlender" type="x-shader/x-fragment">
varying vec2 glTexCoord;
uniform sampler2D sprite1;
uniform sampler2D previousRender;
void main() {
vec3 color = texture2D( previousRender, glTexCoord ).rgb;
color += texture2D( sprite1, glTexCoord ).rgb;
gl_FragColor.rgb = color;
gl_FragColor.a = 1.0;
}
</script>
<script>
var camera, scene, renderer, composer;
var composer2;
var object, light;
function start() {
init();
animate();
}
var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", null, start );
//var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", new THREE.UVMapping(), start ); // change anything
function init() {
renderer = new THREE.WebGLRenderer();
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
//
camera = new THREE.PerspectiveCamera( 70, window.innerWidth / window.innerHeight, 1, 1000 );
camera.position.z = 400;
scene = new THREE.Scene();
scene.fog = new THREE.Fog( 0x000000, 1, 1000 );
object = new THREE.Object3D();
scene.add( object );
var geometry = new THREE.SphereGeometry( 1, 4, 4 );
var material = new THREE.MeshPhongMaterial( { color: 0xffffff, shading: THREE.FlatShading } );
for ( var i = 0; i < 100; i ++ ) {
var mesh = new THREE.Mesh( geometry, material );
mesh.position.set( Math.random() - 0.5, Math.random() - 0.5, Math.random() - 0.5 ).normalize();
mesh.position.multiplyScalar( Math.random() * 400 );
mesh.rotation.set( Math.random() * 2, Math.random() * 2, Math.random() * 2 );
mesh.scale.x = mesh.scale.y = mesh.scale.z = Math.random() * 50;
object.add( mesh );
}
scene.add( new THREE.AmbientLight( 0x222222 ) );
light = new THREE.DirectionalLight( 0xffffff );
light.position.set( 1, 1, 1 );
scene.add( light );
// postprocessing
composer = new THREE.EffectComposer( renderer );
composer.addPass( new THREE.RenderPass( scene, camera ) );
/////////////////////////////////////
/////////////////////////////////////
composer2 = new THREE.EffectComposer( renderer );
var uniforms1 = {
sprite1: { type: "t", value: sprite1 },
previousRender: { type: "t", value: null }
};
//uniforms1.sprite1.value.wrapS = uniforms1.sprite1.value.wrapT = THREE.RepeatWrapping;
var blenderShader = {
uniforms: uniforms1,
vertexShader: $( 'vertexShaderBasic' ).textContent,
fragmentShader: $( 'fragmentShaderBlender' ).textContent
};
blenderShader.uniforms.previousRender.value = composer.renderTarget2;
var blenderPass = new THREE.ShaderPass( blenderShader );
blenderPass.renderToScreen = true;
composer2.addPass( blenderPass );
/////////////////////////////////////
/////////////////////////////////////
var effect = new THREE.ShaderPass( THREE.DotScreenShader );
effect.uniforms[ 'scale' ].value = 4;
composer.addPass( effect );
var effect = new THREE.ShaderPass( THREE.RGBShiftShader );
effect.uniforms[ 'amount' ].value = 0.0015;
//effect.renderToScreen = true;
composer.addPass( effect );
//
window.addEventListener( 'resize', onWindowResize, false );
}
function $( id ) {
return document.getElementById( id );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
requestAnimationFrame( animate );
var time = Date.now();
object.rotation.x += 0.005;
object.rotation.y += 0.01;
composer.render();
composer2.render();
}
</script>
</body>
</html>
What I want :
Good
What I get :
NotGood
I had this issue too, and found a workaround.
I debugged it to determine that the texture ID for the extra texture is different in the shader pass than expected, which is bad. If you look in the ShaderPass constructor, you'll see it clones the uniforms. That seems to be the cause. If I edit that code to restore the original texture object in the cloned uniforms, it works as expected. So that should work for you too.
I'm trying to get some kind of (proper) bug fix integrated into the actual release.
Try this
var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", new THREE.UVMapping(), start );
three.js r.54