Three.JS Cannot change HDR RGBELoader Offset.y Value - three.js

I have an object which when a normal texture is applied allows me to change the offset.y value, however when using RGBELoader and a HDR file I can no longer change the offset.y.
My code is as follows:
var loader3 = new THREE.ObjectLoader();
loader3.load("model/dome/dome2.json",function ( obj ) {
obj.scale.x = 7;
obj.scale.y = 7;
obj.scale.z = 7;
obj.position.x = 0;
obj.position.z = 0;
obj.position.y = 0;
var loader = new THREE.RGBELoader();
var texture = loader.load( "maps/scene2.hdr", function( texture, textureData ){
materialHDR = new THREE.ShaderMaterial( {
uniforms: {
tDiffuse: { type: "t", value: texture },
exposure: { type: "f", value: textureData.exposure },
brightMax: { type: "f", value: textureData.gamma }
},
vertexShader: getText( 'vs-hdr' ),
fragmentShader: getText( 'fs-hdr' )
} );
texture.offset.y = 0.5; // HERE LIES THE PROBLEM
texture.flipY = true;
obj.traverse( function ( child ) {
if ( child instanceof THREE.Mesh ) {
child.material = materialHDR;
child.receiveShadow = true;
//child.material.side = THREE.BackSide;
child.material.side = THREE.DoubleSide;
}
});
scene.add( obj );
} );
});
The HDR image loads just fine and is applied to the object just as it is when I use a normal texture however I just cannot move the texture around the model at all.
I have tried wrap with repeat and all sorts of combinations but the stubborn offset will not work!
I would also like to add I am currently learning three.js (awesome!) so excuse the code above if it has any additional errors.
Thanks for any help in advance it is driving me nuts!
Shader code below
<script id="fs-hdr" type="x-shader/x-fragment">
uniform sampler2D tDiffuse;
uniform float exposure;
uniform float brightMax;
varying vec2 vUv;
vec3 decode_pnghdr( const in vec4 color ) {
vec4 rgbcolor = vec4( 0.0, 0.0, 0.0, 0.0 );
if ( color.w > 0.0 ) {
float f = pow(2.0, 127.0*(color.w-0.5));
rgbcolor.xyz = color.xyz * f;
}
return rgbcolor.xyz;
/*
// remove gamma correction
vec4 res = color * color;
// decoded RI
float ri = pow( 2.0, res.w * 32.0 - 16.0 );
// decoded HDR pixel
res.xyz = res.xyz * ri;
return res.xyz;
*/
}
void main() {
vec4 color = texture2D( tDiffuse, vUv );
color.xyz = decode_pnghdr( color );
// apply gamma correction and exposure
//gl_FragColor = vec4( pow( exposure * color.xyz, vec3( 0.474 ) ), 1.0 );
// Perform tone-mapping
float Y = dot(vec4(0.30, 0.59, 0.11, 0.0), color);
float YD = exposure * (exposure/brightMax + 1.0) / (exposure + 1.0);
color *= YD;
gl_FragColor = vec4( color.xyz, 1.0 );
}
</script>
<!-- HDR vertex shader -->
<script id="vs-hdr" type="x-shader/x-vertex">
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 3 );
}
</script>

Your custom shader must handle the offset/repeat.
Add offsetRepeat to your uniforms;
offsetRepeat: { type: "v4", value: new THREE.Vector4( 0, 0.5, 1, 1 ) }
And modify your vertex shader
uniform vec4 offsetRepeat;
varying vec2 vUv;
void main() {
vUv = uv * offsetRepeat.zw + offsetRepeat.xy;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
Your texture must be power-of-two, and set
texture.wrapS = texture.wrapT = THREE.RepeatWrapping;
three.js r.75

Related

Preventing Texture Border Strectching in WebGL when Applying a Wave Effect with a Fragment Shader

I'm using GLSL via Three.js in my project.
I've got a flag texture like so:
When I apply a Fragment Shader to give it a wavy effect, the borders of the texture seem to stretch. However, I don't want that; I want the background color to be seen (in this case, clear) so that it just looks like a flag that's waving. You can see the problem in the CodePen here.
For reference (and if the CodePen for some reason doesn't work in the future), here's the code:
The Vertex Shader
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
The Fragment Shader
uniform vec2 u_resolution;
uniform float u_time;
uniform sampler2D image;
uniform vec2 strength;
uniform float speed;
varying vec2 vUv;
vec2 sineWave( vec2 p ) {
float x = strength.x * sin(-1.0 * p.y + 50.0 * p.x + u_time * speed);
float y = strength.y * sin(p.y + 10.0 * p.x + u_time * speed);
return vec2(p.x, p.y + y);
}
void main() {
vec4 color = texture2D(image, sineWave(vUv));
gl_FragColor = color;
}
Three.js Code
const CANVAS_WIDTH = 250;
const CANVAS_HEIGHT = 250;
const imageDataUri = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAPoAAAD6CAYAAACI7Fo9AAAQOElEQVR4nO2dTWtcyRWGT2v0QSwUZqVh0EbeBbQek0A2Yf5AdlkNjLchf6LzC2YgOEtlK6+8SJYhmwEbeaFJwJCdjbE3zlK0pWmBOlS729Nu9e2+VfdU3apTzwMemK/X95br1aPbfVQ9+ONvfv1SRI4FAKzyaltEhoP9vdPP//qNDPb37tznT//6r4z+8k/Z/9PXsve7XwWtg2bGL//8e9k+Oer9OrgXMkrImDHcevT02d8mo59eXf/jP0XcuKViUHIyYmZsHR64v33lOr41+3fD67//Wyajn/gDIIMMAxn3Hv5WJqOx+0dD95dp0ZetzkYgg4yyMybvx07cU5vLvOgzplZ3v9gIZJBRbsbug/vTHs9t7hgs/sfzV+DZCGSQUW7G1ePncnV27mx+f/7fbC39P+4V+OlXBEs3TgYZtWS419mWbS7LRV/3CnzNi0cGGaVkuO4uPpvPWTa6rHoFvuQbJ4OMWjKabC6riu5rdf4AyCAjj4wmm0uD0aWt1dkIZJCRR8Y6m0tT0dtYnY1ABhn5ZKyzuawxuqyzOhuBDDLyydhkc1lX9CarsxHIICOvjE02lw1Gl2WrsxHIICOvjDY2d3y27l8+f/Pmx6+++PLbwe7257fvLtkIZJCRWcb1kwu5uXjtbP5wXdZ2i8t2Vj91XznYCGSQkU9GW5tLi2/dPz6ru9+MjUAGGflktHk2n7Ox6DOG4/OXXtNyvhdNBhlktM/wsbm0LXroDDx/iGSQESfDx+biYXTxnYFnI5BBRpwMX5uLT9F9rM5GIIOMeBm+NhdPo0sbq7MRyCAjXkaIzcW36JuszkYgg4y4GSE2lwCjS5PV2QhkkBE3I9TmElL0VVZnI5BBRvyMUJtLoNFl0epsBDLIiJ/RxeayfAqsD+7E2J2To+ObF2/ZCGSQETlj1cmuPoQa3TF0JXefCMFGIIOMeBldbS5dij57TnjlPhEiBDYTGWS0o8uz+ZwuRpeQE2OFjUAGGa3RsLl0LXrIDDwbgQwy2qNhc1EwuvhYnY1ABhnt0bK5aBS9rdXZCGSQ4YeWzUXJ6LLJ6mwEMsjwX5Ors3PRsLlsOjOuLYtny+2cHN25YDYCGWT4r4l7V2vTWXBt0TK6rLI6G4EMMsLWZIaKzUXL6LLC6mwEMsgIW5OtwwOZjMZqNhdlo8vc6u4XG4EMMvzXxE2aTkbTITQ1m0uXWfcm3Ay8iByzEcggw39Nbv932WmmvQltozuGg/092X0Qdp1sJjJqyVheE9cZrffNl1EveuiJscJGIKOijFVrovm++TIxjC4hM/BsBDJqyVi1JppTcKuIUnRfq7MRyKglo2lNYtpcIhpd2lqdjUBGLRlNaxLb5hKz6G2szkYgo5aMdWsS2+YS2eiyzupsBDJqyVi3JilsLrGL3mR1NgIZtWRsWpMUNpcERpdlq7MRyKglY9OapLK5aM66N7E4A3/77pKNQEYVGW3W5PrJhdxcvFadaW9iO/ZvMMNZ/dR9BWMjkGE9o82apLS5JPrW/eOzurthNgIZljParkmqZ/M5SYo+Yzg+f+l9YqywmcgoJKPt/aS2uaQseugMPMUgo4QMn/tJbXNJbHTxnYGnGGSUkOFzP33YXFIX3cfqFIOMEjJ876cPm0sPRpc2VqcYZJSQ4Xs/fdlc+ij6JqtTDDJKyAi5n75sLj0ZXZqsTjHIKCEj5H76tLn0VfRVVqcYZJSQEXo/fdpcejS6LFqdYpBRQkbo/fRtc4lxCqwP7sTYnZOj45sXbykGGVlndLmfq8fPo5zs6kOfRnd870ruzrKmGGTkmtHlfnKwufRd9EdPn33nPl9q8n4c9P9TLjJiZ3S9lr6fzef0bXQJOTFWKAYZCTK6XksuNpccih4yA08xyIidoXEtudhcMjG6+FidYpARO0PjWnKyueRS9LZWpxhkxM7QupacbC4ZGV02WZ1ikBE7Q+tacrO55FT0dVanGGTEztC6FsnQ5pKZ0WWV1SkGGbEztK5lnnN1di452VxSnALrw+KJsTsnRxSDjOgZWteymONmQ1Kc7OpDqlNgfZieGDu4tyvvT3+gGGREy9C6Fvm05JKbzaXvWfcm3Ay8iBxTDDJiZWhdy2LO1uGB3L677HWmvYncntHnDAf7e7L7IGy9KBcZKdZkMcf9vMZkNB3lzs7mkmvRQ0+MFYpBRqI1Wc5xP6+R2yvti+RqdAmZgacYZKRYk+Uc951nbu+bL5Nt0X2tTjHISLEmq3JyfN98mZyNLm2tTjHISLEmq3JynIJbRdZFb2N1ikFGijVpyinB5lKA0WWd1SkGGSnWpCmnFJtLCUVvsjrFICPFmqzLKcXmUojRZdnqFIOMFGuyLqckm0tus+5NLM7A3767pBhkRF+TTTnXTy7k5uJ1djPtTeQ4697EdAbefSWlGGTEXJNNOaXZXAr61v3js7pbdIpBRqw1aZNT0rP5nGKKPmM4Pn/pfWKsUAzTGVpr0ianRJtLaUUPnYGnGHYztNakbU6JNpcCjS6+M/AUw26G1pq0zSnV5lJi0X2sTjHsZmitiU9OqTaXQo0ubaxOMexmaK2JT07JNpdSi77J6hTDbobWmvjmlGxzKdjo0mR1imE3Q2tNfHNKt7mUXPRVVqcYdjO01iQkp3SbS+FGl0WrUwy7GVprEpJjweaS6ymwPrgTY3dOjo5vXrylGAYztNYkNOfq8XP3gQxZnuzqQ+lGdwxdyd0pnBTDVobW/YTmWLG5WCj67LnplTuFMwTKlWeG1v10ybHwbD7HgtEl5MRYoVzZZmjdT5ccSzYXK0UPmYGnXHlmaN1P1xxLNhdDRhcfq1OuPDO07qdrjjWbi6Wit7U65cozQ+t+NHKs2VyMGV02WZ1y5ZmhdT8aORZtLtaKvs7qlCvPDK370cqxaHMxaHRZZXXKlWeG1v1o5Vi1uVgs+rLVKVeeGVr3o5lj1eZS2CmwPkxPjB3c25X3pz9QrswytO5HO+fq7Fws2lwszLo34WbgReSYcuWVoXU/MXLchGXpM+1NWHxGnzMc7O9NP7s6BAqqn6F1LTFyZpi0uZTySS0hLH66y87JkVcCBdXP0LqWGDlbhwcyGY2L+dSVECwbXUJm4CmofobWtcTIcT/1OBlNfyDKrM3FetF9Z+ApqH6G1rXEynE/9Wj1lfZFrBtd2lqdgupnaF1LrBz3+o3V982XMV/0NlanoPoZWtcSM8fy++bL1GB0WWd1CqqfoXUtMXMsT8GtooqiN1mdgupnaF1L7JyabC4VGV2WrU5B9TO0riV2Tm02F8vvoy+z+L767btLCqqcoXUtKXKun1zIzcVr0++bL2N11r2J6Qy8+4pOQfUytK4lRU6NNpfKvnX/+Kzu/uApaH0llwqfzedUVfQZw/H5S+8TY4WSF59Tq82lxqKHnBgrlNxETq02l0qNLr4z8JS8/JyabS61Ft3H6pTcRk7NNpeKjS5trE7JbeTUbnOpueibrE7J7eTUbnOp3OjSZHVKbicHm3+g6qKvsjolt5WDzT9Qu9Fl0eqU3FYONv8Zs6fA+uBOjN05OTq+efGWkhvKuXr83B3hbPZkVx8w+geGruTu/DBKbiMHm38KRZ89q7szvd35YSFQ8vxyeDb/FIr+M94nxgolzzIHm9+Fos8ImYGn5HnmYPO7UPRPaW11Sp5nDjZfDUVfoK3VKXm+Odh8NRT9LmutTsnzzcHmzVD0JdZZnZLnnYPNm6Hoq7ljdUqedw42Xw9FX8Gy1Sl5/jnYfD21nQLrw/TE2MG9XXl/+gMlzzgHm2+GWfc1uBl4ETmm5HnnMNO+Gb51X89wsL83/dTNECh5mpyrs3PB5uup5pNaQlj8dJedkyOvBEqeLsf9nEJNn7oSAkbfjPcMPCVPlzMDm28Ao2/A1+qUPF3O1uGBTEZjbN4CjN6OVlan5Oly3NkBk9H0x4qxeQsoegvazMBT8rQ57uwA3jdvD0VvT6PVKXnaHPcuCO+b+0HRW9JkdUqePocpOH8ouh+fWJ2Sp89hCi4MXnX3YPEV+Nt3l5S8h5zrJxdyc/GaV9o9Ydbdn+kMvDMLJU+bg83D4Vt3T+bP6m7zUfK0OTybh0PRwxiOz196nxgrlDw4B5t3g6IHEHJirFDyTjnYvBsUPRyvGXhKHp6DzbtD0QPxsTol75aDzbtD0bux0eqUvFsONteBondgk9UpefccbK4DRe/OSqtT8u452FwPit6RVVan5Do52FwPiq7DR6vXXk6tHGyuC6fAKuFOjN05OTq+efGWkivkcLKrLhhdj6EruTv5hJJ3y8Hm+lB0JWbPka/cySehUPIP8GyuD0XXxfvE2DmU/APYPA4UXRFm4LvnYPM4UHR9mIEPzMHm8aDoyjADH56DzeNB0ePADLxnDjaPC0WPADPw/jnYPC4UPR7MwLcEm8eHokeCGfj2YPP4UPS4MAO/AWyeBmbdI8MM/HqYaU8DRo8PM/ANYPN0UPTIMAPfDM/m6aDoaWAGfkXO1dm5YPM08NlrCVj8zLadk6PWv6H1F/Dcdzp8hloaMHo6mIH/tOSCzdOB0RPhY3XrJd86PJDJaIzNE4LR01L9DLx792Eymr4wic0TQtETwgz81+LefeCV9vRQ9PRUOwO/++A+75v3BEVPTM0z8Lxv3h8UvR+qm4FnCq5fmHXvidpm4Jlp7xeM3h/f1zIDj837h6L3xKOnz76rZQaeZ/P+oej9Yn4GHpvnAUXvkRrOgcfmeUDR+8fsDDw2zweK3jOWz4HH5vlA0fPA3Aw8Ns8Lip4BFmfgsXleUPR8MDMDj83zg6JngqUZeGyeHxQ9L4qfgcfmecKse2aUPgPPTHueYPT8KHYGHpvnC0XPjJJn4Hk2zxeKnifFzcBj87yh6BlS4gw8Ns8bip4vxczAY/P8oeiZUtIMPDbPH4qeN9nPwGPzMqDoGVPCDDw2LwOKnj/ZzsBj83Kg6JmT8ww8Ni8Hil4G2c3AY/OyYNa9EHKbgWemvSwwejkMc5mBx+blQdELYfYcnMUMPM/m5UHRyyKLGfirs3PB5mXxWe0LUBLP37z58asvvvx2sLv9+c7JUesr134Bz31n8ejps4emFtc4GL08ep2Bn4HNCwOjF4aP1bVLvnV4IJPRGJsXCEYvk+Qz8O7V/slo+kIgNi8Qil4gfczAu1f7eaW9XCh6uSSbgd99cJ/3zQuHohdKyhl43jcvH4peNtFn4JmCswGz7oUTewaemXYbYPTyiTYDj83tQNELJ+YMPM/mdqDoNlCfgcfmtqDoBohxDjw2twVFt4PaDDw2twdFN4LmOfDY3B4U3RadZ+CxuU0ouiE0ZuCxuU0ouj2CZ+CxuV0oujG6zMBjc7tQdJt4z8Bjc9sw624U3xl4Ztptg9Ht0noGHpvbh6IbxWcGnmdz+1B022x8Xx2b1wFFN0ybaTlsXgcU3T6NVsfm9UDRjbPO6ti8Hih6HdyxOjavC4peAausjs3rgqLXw0erY/P6YDKuIty03C/+8ODY3TFTcHWxXfsCVIaz+unslrE5gFWc1d0v/oDrAqPXByavDRH5P5j6oOB59pHPAAAAAElFTkSuQmCC';
const vertexShader = () => {
return `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
`
};
const fragmentShader = () => {
return `
uniform vec2 u_resolution;
uniform float u_time;
uniform sampler2D image;
uniform vec2 strength;
uniform float speed;
varying vec2 vUv;
vec2 sineWave( vec2 p ) {
float x = strength.x * sin(-1.0 * p.y + 50.0 * p.x + u_time * speed);
float y = strength.y * sin(p.y + 10.0 * p.x + u_time * speed);
return vec2(p.x, p.y + y);
}
void main() {
vec4 color = texture2D(image, sineWave(vUv));
gl_FragColor = color;
}
`
};
function init() {
let loader = new THREE.TextureLoader();
loader.load(
imageDataUri,
(texture) => {
let scene = new THREE.Scene();
let camera =
new THREE.PerspectiveCamera(75, CANVAS_WIDTH / CANVAS_HEIGHT, 0.1, 1000);
let renderer = new THREE.WebGLRenderer({ alpha: true });
renderer.setClearColor(0x000000, 0);
renderer.setSize(CANVAS_WIDTH, CANVAS_HEIGHT);
document.body.appendChild(renderer.domElement);
let uniforms = {
u_time: { type: 'f', value: 0.1 },
u_resolution: { type: 'v2', value: new THREE.Vector2() },
image: { type: 't', value: texture },
strength: { type: 'v2', value: new THREE.Vector2(0.01, 0.025) },
speed: { type: 'f', value: 8.0 }
};
let material = new THREE.ShaderMaterial({
uniforms,
vertexShader: vertexShader(),
fragmentShader: fragmentShader()
});
let triangleMesh = new THREE.Mesh(new THREE.PlaneGeometry(CANVAS_WIDTH, CANVAS_HEIGHT, 1, 1), material);
scene.add(triangleMesh);
camera.position.z = 250;
let clock = new THREE.Clock();
let animate = () => {
requestAnimationFrame(animate);
triangleMesh.material.uniforms.u_time.value += clock.getDelta();
renderer.render(scene, camera);
};
animate();
},
undefined,
(err) => { console.error('Could not load texture', err) }
);
}
init();
body {background: orange;}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/110/three.min.js"></script>
What you can do is to scale the wave effect dependent on the v coordinate of the texture. If you scale by 1.0-p.y then the wave effect is zero at the top border and has its maximum at the bottom.
float y = strength.y * sin(p.y + 10.0 * p.x + u_time * speed) * (1.0-p.y*p.y);
For a less weak wave effect in the top area of the flag, scale by the square of the v coordinate ((1.0-p.y*p.y)):
float y = strength.y * sin(p.y + 10.0 * p.x + u_time * speed) * (1.0-p.y*p.y);
const CANVAS_WIDTH = 250;
const CANVAS_HEIGHT = 250;
const imageDataUri = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAPoAAAD6CAYAAACI7Fo9AAAQOElEQVR4nO2dTWtcyRWGT2v0QSwUZqVh0EbeBbQek0A2Yf5AdlkNjLchf6LzC2YgOEtlK6+8SJYhmwEbeaFJwJCdjbE3zlK0pWmBOlS729Nu9e2+VfdU3apTzwMemK/X95br1aPbfVQ9+ONvfv1SRI4FAKzyaltEhoP9vdPP//qNDPb37tznT//6r4z+8k/Z/9PXsve7XwWtg2bGL//8e9k+Oer9OrgXMkrImDHcevT02d8mo59eXf/jP0XcuKViUHIyYmZsHR64v33lOr41+3fD67//Wyajn/gDIIMMAxn3Hv5WJqOx+0dD95dp0ZetzkYgg4yyMybvx07cU5vLvOgzplZ3v9gIZJBRbsbug/vTHs9t7hgs/sfzV+DZCGSQUW7G1ePncnV27mx+f/7fbC39P+4V+OlXBEs3TgYZtWS419mWbS7LRV/3CnzNi0cGGaVkuO4uPpvPWTa6rHoFvuQbJ4OMWjKabC6riu5rdf4AyCAjj4wmm0uD0aWt1dkIZJCRR8Y6m0tT0dtYnY1ABhn5ZKyzuawxuqyzOhuBDDLyydhkc1lX9CarsxHIICOvjE02lw1Gl2WrsxHIICOvjDY2d3y27l8+f/Pmx6+++PLbwe7257fvLtkIZJCRWcb1kwu5uXjtbP5wXdZ2i8t2Vj91XznYCGSQkU9GW5tLi2/dPz6ru9+MjUAGGflktHk2n7Ox6DOG4/OXXtNyvhdNBhlktM/wsbm0LXroDDx/iGSQESfDx+biYXTxnYFnI5BBRpwMX5uLT9F9rM5GIIOMeBm+NhdPo0sbq7MRyCAjXkaIzcW36JuszkYgg4y4GSE2lwCjS5PV2QhkkBE3I9TmElL0VVZnI5BBRvyMUJtLoNFl0epsBDLIiJ/RxeayfAqsD+7E2J2To+ObF2/ZCGSQETlj1cmuPoQa3TF0JXefCMFGIIOMeBldbS5dij57TnjlPhEiBDYTGWS0o8uz+ZwuRpeQE2OFjUAGGa3RsLl0LXrIDDwbgQwy2qNhc1EwuvhYnY1ABhnt0bK5aBS9rdXZCGSQ4YeWzUXJ6LLJ6mwEMsjwX5Ors3PRsLlsOjOuLYtny+2cHN25YDYCGWT4r4l7V2vTWXBt0TK6rLI6G4EMMsLWZIaKzUXL6LLC6mwEMsgIW5OtwwOZjMZqNhdlo8vc6u4XG4EMMvzXxE2aTkbTITQ1m0uXWfcm3Ay8iByzEcggw39Nbv932WmmvQltozuGg/092X0Qdp1sJjJqyVheE9cZrffNl1EveuiJscJGIKOijFVrovm++TIxjC4hM/BsBDJqyVi1JppTcKuIUnRfq7MRyKglo2lNYtpcIhpd2lqdjUBGLRlNaxLb5hKz6G2szkYgo5aMdWsS2+YS2eiyzupsBDJqyVi3JilsLrGL3mR1NgIZtWRsWpMUNpcERpdlq7MRyKglY9OapLK5aM66N7E4A3/77pKNQEYVGW3W5PrJhdxcvFadaW9iO/ZvMMNZ/dR9BWMjkGE9o82apLS5JPrW/eOzurthNgIZljParkmqZ/M5SYo+Yzg+f+l9YqywmcgoJKPt/aS2uaQseugMPMUgo4QMn/tJbXNJbHTxnYGnGGSUkOFzP33YXFIX3cfqFIOMEjJ876cPm0sPRpc2VqcYZJSQ4Xs/fdlc+ij6JqtTDDJKyAi5n75sLj0ZXZqsTjHIKCEj5H76tLn0VfRVVqcYZJSQEXo/fdpcejS6LFqdYpBRQkbo/fRtc4lxCqwP7sTYnZOj45sXbykGGVlndLmfq8fPo5zs6kOfRnd870ruzrKmGGTkmtHlfnKwufRd9EdPn33nPl9q8n4c9P9TLjJiZ3S9lr6fzef0bXQJOTFWKAYZCTK6XksuNpccih4yA08xyIidoXEtudhcMjG6+FidYpARO0PjWnKyueRS9LZWpxhkxM7QupacbC4ZGV02WZ1ikBE7Q+tacrO55FT0dVanGGTEztC6FsnQ5pKZ0WWV1SkGGbEztK5lnnN1di452VxSnALrw+KJsTsnRxSDjOgZWteymONmQ1Kc7OpDqlNgfZieGDu4tyvvT3+gGGREy9C6Fvm05JKbzaXvWfcm3Ay8iBxTDDJiZWhdy2LO1uGB3L677HWmvYncntHnDAf7e7L7IGy9KBcZKdZkMcf9vMZkNB3lzs7mkmvRQ0+MFYpBRqI1Wc5xP6+R2yvti+RqdAmZgacYZKRYk+Uc951nbu+bL5Nt0X2tTjHISLEmq3JyfN98mZyNLm2tTjHISLEmq3JynIJbRdZFb2N1ikFGijVpyinB5lKA0WWd1SkGGSnWpCmnFJtLCUVvsjrFICPFmqzLKcXmUojRZdnqFIOMFGuyLqckm0tus+5NLM7A3767pBhkRF+TTTnXTy7k5uJ1djPtTeQ4697EdAbefSWlGGTEXJNNOaXZXAr61v3js7pbdIpBRqw1aZNT0rP5nGKKPmM4Pn/pfWKsUAzTGVpr0ianRJtLaUUPnYGnGHYztNakbU6JNpcCjS6+M/AUw26G1pq0zSnV5lJi0X2sTjHsZmitiU9OqTaXQo0ubaxOMexmaK2JT07JNpdSi77J6hTDbobWmvjmlGxzKdjo0mR1imE3Q2tNfHNKt7mUXPRVVqcYdjO01iQkp3SbS+FGl0WrUwy7GVprEpJjweaS6ymwPrgTY3dOjo5vXrylGAYztNYkNOfq8XP3gQxZnuzqQ+lGdwxdyd0pnBTDVobW/YTmWLG5WCj67LnplTuFMwTKlWeG1v10ybHwbD7HgtEl5MRYoVzZZmjdT5ccSzYXK0UPmYGnXHlmaN1P1xxLNhdDRhcfq1OuPDO07qdrjjWbi6Wit7U65cozQ+t+NHKs2VyMGV02WZ1y5ZmhdT8aORZtLtaKvs7qlCvPDK370cqxaHMxaHRZZXXKlWeG1v1o5Vi1uVgs+rLVKVeeGVr3o5lj1eZS2CmwPkxPjB3c25X3pz9QrswytO5HO+fq7Fws2lwszLo34WbgReSYcuWVoXU/MXLchGXpM+1NWHxGnzMc7O9NP7s6BAqqn6F1LTFyZpi0uZTySS0hLH66y87JkVcCBdXP0LqWGDlbhwcyGY2L+dSVECwbXUJm4CmofobWtcTIcT/1OBlNfyDKrM3FetF9Z+ApqH6G1rXEynE/9Wj1lfZFrBtd2lqdgupnaF1LrBz3+o3V982XMV/0NlanoPoZWtcSM8fy++bL1GB0WWd1CqqfoXUtMXMsT8GtooqiN1mdgupnaF1L7JyabC4VGV2WrU5B9TO0riV2Tm02F8vvoy+z+L767btLCqqcoXUtKXKun1zIzcVr0++bL2N11r2J6Qy8+4pOQfUytK4lRU6NNpfKvnX/+Kzu/uApaH0llwqfzedUVfQZw/H5S+8TY4WSF59Tq82lxqKHnBgrlNxETq02l0qNLr4z8JS8/JyabS61Ft3H6pTcRk7NNpeKjS5trE7JbeTUbnOpueibrE7J7eTUbnOp3OjSZHVKbicHm3+g6qKvsjolt5WDzT9Qu9Fl0eqU3FYONv8Zs6fA+uBOjN05OTq+efGWkhvKuXr83B3hbPZkVx8w+geGruTu/DBKbiMHm38KRZ89q7szvd35YSFQ8vxyeDb/FIr+M94nxgolzzIHm9+Fos8ImYGn5HnmYPO7UPRPaW11Sp5nDjZfDUVfoK3VKXm+Odh8NRT9LmutTsnzzcHmzVD0JdZZnZLnnYPNm6Hoq7ljdUqedw42Xw9FX8Gy1Sl5/jnYfD21nQLrw/TE2MG9XXl/+gMlzzgHm2+GWfc1uBl4ETmm5HnnMNO+Gb51X89wsL83/dTNECh5mpyrs3PB5uup5pNaQlj8dJedkyOvBEqeLsf9nEJNn7oSAkbfjPcMPCVPlzMDm28Ao2/A1+qUPF3O1uGBTEZjbN4CjN6OVlan5Oly3NkBk9H0x4qxeQsoegvazMBT8rQ57uwA3jdvD0VvT6PVKXnaHPcuCO+b+0HRW9JkdUqePocpOH8ouh+fWJ2Sp89hCi4MXnX3YPEV+Nt3l5S8h5zrJxdyc/GaV9o9Ydbdn+kMvDMLJU+bg83D4Vt3T+bP6m7zUfK0OTybh0PRwxiOz196nxgrlDw4B5t3g6IHEHJirFDyTjnYvBsUPRyvGXhKHp6DzbtD0QPxsTol75aDzbtD0bux0eqUvFsONteBondgk9UpefccbK4DRe/OSqtT8u452FwPit6RVVan5Do52FwPiq7DR6vXXk6tHGyuC6fAKuFOjN05OTq+efGWkivkcLKrLhhdj6EruTv5hJJ3y8Hm+lB0JWbPka/cySehUPIP8GyuD0XXxfvE2DmU/APYPA4UXRFm4LvnYPM4UHR9mIEPzMHm8aDoyjADH56DzeNB0ePADLxnDjaPC0WPADPw/jnYPC4UPR7MwLcEm8eHokeCGfj2YPP4UPS4MAO/AWyeBmbdI8MM/HqYaU8DRo8PM/ANYPN0UPTIMAPfDM/m6aDoaWAGfkXO1dm5YPM08NlrCVj8zLadk6PWv6H1F/Dcdzp8hloaMHo6mIH/tOSCzdOB0RPhY3XrJd86PJDJaIzNE4LR01L9DLx792Eymr4wic0TQtETwgz81+LefeCV9vRQ9PRUOwO/++A+75v3BEVPTM0z8Lxv3h8UvR+qm4FnCq5fmHXvidpm4Jlp7xeM3h/f1zIDj837h6L3xKOnz76rZQaeZ/P+oej9Yn4GHpvnAUXvkRrOgcfmeUDR+8fsDDw2zweK3jOWz4HH5vlA0fPA3Aw8Ns8Lip4BFmfgsXleUPR8MDMDj83zg6JngqUZeGyeHxQ9L4qfgcfmecKse2aUPgPPTHueYPT8KHYGHpvnC0XPjJJn4Hk2zxeKnifFzcBj87yh6BlS4gw8Ns8bip4vxczAY/P8oeiZUtIMPDbPH4qeN9nPwGPzMqDoGVPCDDw2LwOKnj/ZzsBj83Kg6JmT8ww8Ni8Hil4G2c3AY/OyYNa9EHKbgWemvSwwejkMc5mBx+blQdELYfYcnMUMPM/m5UHRyyKLGfirs3PB5mXxWe0LUBLP37z58asvvvx2sLv9+c7JUesr134Bz31n8ejps4emFtc4GL08ep2Bn4HNCwOjF4aP1bVLvnV4IJPRGJsXCEYvk+Qz8O7V/slo+kIgNi8Qil4gfczAu1f7eaW9XCh6uSSbgd99cJ/3zQuHohdKyhl43jcvH4peNtFn4JmCswGz7oUTewaemXYbYPTyiTYDj83tQNELJ+YMPM/mdqDoNlCfgcfmtqDoBohxDjw2twVFt4PaDDw2twdFN4LmOfDY3B4U3RadZ+CxuU0ouiE0ZuCxuU0ouj2CZ+CxuV0oujG6zMBjc7tQdJt4z8Bjc9sw624U3xl4Ztptg9Ht0noGHpvbh6IbxWcGnmdz+1B022x8Xx2b1wFFN0ybaTlsXgcU3T6NVsfm9UDRjbPO6ti8Hih6HdyxOjavC4peAausjs3rgqLXw0erY/P6YDKuIty03C/+8ODY3TFTcHWxXfsCVIaz+unslrE5gFWc1d0v/oDrAqPXByavDRH5P5j6oOB59pHPAAAAAElFTkSuQmCC';
const vertexShader = () => {
return `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
`
};
const fragmentShader = () => {
return `
uniform vec2 u_resolution;
uniform float u_time;
uniform sampler2D image;
uniform vec2 strength;
uniform float speed;
varying vec2 vUv;
vec2 sineWave( vec2 p ) {
float x = strength.x * sin(-1.0 * p.y + 50.0 * p.x + u_time * speed);
float y = strength.y * sin(p.y + 10.0 * p.x + u_time * speed) * (1.0-p.y*p.y);
return vec2(p.x, p.y + y);
}
void main() {
vec4 color = texture2D(image, sineWave(vUv));
gl_FragColor = color;
}
`
};
function init() {
let loader = new THREE.TextureLoader();
loader.load(
imageDataUri,
(texture) => {
let scene = new THREE.Scene();
let camera =
new THREE.PerspectiveCamera(75, CANVAS_WIDTH / CANVAS_HEIGHT, 0.1, 1000);
let renderer = new THREE.WebGLRenderer({ alpha: true });
renderer.setClearColor(0x000000, 0);
renderer.setSize(CANVAS_WIDTH, CANVAS_HEIGHT);
document.body.appendChild(renderer.domElement);
let uniforms = {
u_time: { type: 'f', value: 0.1 },
u_resolution: { type: 'v2', value: new THREE.Vector2() },
image: { type: 't', value: texture },
strength: { type: 'v2', value: new THREE.Vector2(0.01, 0.025) },
speed: { type: 'f', value: 8.0 }
};
let material = new THREE.ShaderMaterial({
uniforms,
vertexShader: vertexShader(),
fragmentShader: fragmentShader()
});
let triangleMesh = new THREE.Mesh(new THREE.PlaneGeometry(CANVAS_WIDTH, CANVAS_HEIGHT, 1, 1), material);
scene.add(triangleMesh);
camera.position.z = 250;
let clock = new THREE.Clock();
let animate = () => {
requestAnimationFrame(animate);
triangleMesh.material.uniforms.u_time.value += clock.getDelta();
renderer.render(scene, camera);
};
animate();
},
undefined,
(err) => { console.error('Could not load texture', err) }
);
}
init();
body {background: orange;}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/110/three.min.js"></script>

three.js texture across InstanceGeometry

I'm using InstanceGeometry to render thousands of base geometries (boxes) in a scene. It's efficient and uses only 1 material/texture, but repeats the image texture for every instance.
I'm trying to figure out how to have the texture spread out over x number of instances. Say for example there's 8 box instances, I'd like to 1/8 of the texture to appear on every box.
I think the transformUV function on THREE.Texture is what I'd want to use, but I'm not sure how to use it in this context. OR, would the texture mapping happen in the Shader itself?
UPDATE
My own code is pretty involved and uses the built-in three.js materials adapted for instances, so let's just use one of the three.js examples as a starting point: https://github.com/mrdoob/three.js/blob/master/examples/webgl_buffergeometry_instancing_dynamic.html
also pasted in brief below..
Vertex shader:
precision highp float;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
attribute vec3 position;
attribute vec3 offset;
attribute vec2 uv;
attribute vec4 orientation;
varying vec2 vUv;
// http://www.geeks3d.com/20141201/how-to-rotate-a-vertex-by-a-quaternion-in-glsl/
vec3 applyQuaternionToVector( vec4 q, vec3 v ){
return v + 2.0 * cross( q.xyz, cross( q.xyz, v ) + q.w * v );
}
void main() {
vec3 vPosition = applyQuaternionToVector( orientation, position );
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( offset + vPosition, 1.0 );
}
Fragment shader
precision highp float;
uniform sampler2D map;
varying vec2 vUv;
void main() {
gl_FragColor = texture2D( map, vUv );
}
JS:
var instances = 50;
var bufferGeometry = new THREE.BoxBufferGeometry( 2, 2, 2 );
var geometry = new THREE.InstancedBufferGeometry();
geometry.index = bufferGeometry.index;
geometry.attributes.position = bufferGeometry.attributes.position;
geometry.attributes.uv = bufferGeometry.attributes.uv;
// per instance data
var offsets = [];
var orientations = [];
var vector = new THREE.Vector4();
var x, y, z, w;
for ( var i = 0; i < instances; i ++ ) {
// offsets
x = Math.random() * 100 - 50;
y = Math.random() * 100 - 50;
z = Math.random() * 100 - 50;
vector.set( x, y, z, 0 ).normalize();
vector.multiplyScalar( 5 );
offsets.push( x + vector.x, y + vector.y, z + vector.z );
// orientations
x = Math.random() * 2 - 1;
y = Math.random() * 2 - 1;
z = Math.random() * 2 - 1;
w = Math.random() * 2 - 1;
vector.set( x, y, z, w ).normalize();
orientations.push( vector.x, vector.y, vector.z, vector.w );
}
offsetAttribute = new THREE.InstancedBufferAttribute( new Float32Array( offsets ), 3 );
orientationAttribute = new THREE.InstancedBufferAttribute( new Float32Array( orientations ), 4 ).setDynamic( true );
geometry.addAttribute( 'offset', offsetAttribute );
geometry.addAttribute( 'orientation', orientationAttribute );
// material
var material = new THREE.ShaderMaterial( {
uniforms: {
map: { value: new THREE.TextureLoader().load( 'textures/crate.gif' ) } },
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent
} );
mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
You're going to have to create an additional custom attribute that holds the offset of UVs, just like you're creating an attribute that holds the x, y, z offsets, but with u, v.
First, you add it in JavaScript:
var uvOffsets = [];
var u, v;
for ( var i = 0; i < instances; i ++ ) {
//... inside the loop
u = Math.random(); // I'm assigning random, but you can do the math...
v = Math.random(); // ... to make it discrete 1/8th amounts
uvOffsets.push(u, v);
}
// Add new attribute to BufferGeometry
var uvOffsetAttribute = new THREE.InstancedBufferAttribute( new Float32Array( uvOffsets ), 2 );
geometry.addAttribute( 'uvOffset', uvOffsetAttribute );
Then, in your Vertex shader:
// [...]
attribute vec2 uv;
attribute vec2 uvOffset;
varying vec2 vUv;
void main() {
vec3 vPosition = applyQuaternionToVector( orientation, position );
// Divide uvs by 8, and add assigned offsets
vUv = (uv / 8.0) + uvOffset;
gl_Position = projectionMatrix * modelViewMatrix * vec4( offset + vPosition, 1.0 );
}
Finally, in your frag shader:
precision highp float;
uniform sampler2D map;
uniform vec2 uvOffset;
varying vec2 vUv; // <- these UVs have been transformed by vertex shader.
void main() {
gl_FragColor = texture2D( map, vUv ); // <- Transformation is applied to texture
}

Threejs globe glow issue

We are trying to achieve a glow around a sphere/globe using this code :
var sphereGeom = new THREE.SphereGeometry(3, 40, 40);
var moonTexture = new THREE.TextureLoader().load(
"../../assets/moon-map.png"
);
var moonMaterial = new THREE.MeshPhongMaterial({ map: moonTexture });
var moon = new THREE.Mesh(sphereGeom, moonMaterial);
moon.position.set(0, 0, 0);
this.add(moon);
var customMaterial = new THREE.ShaderMaterial({
uniforms: {
c: { type: "f", value: 0.4 },
p: { type: "f", value: 2.3 },
glowColor: { type: "c", value: new THREE.Color(0xffffff) },
viewVector: { type: "v3", value: new THREE.Vector3(10, 10, 10) }
},
vertexShader: `uniform vec3 viewVector;
uniform float c;
uniform float p;
varying float intensity;
void main()
{
vec3 vNormal = normalize( normalMatrix * normal );
vec3 vNormel = normalize( normalMatrix * viewVector );
intensity = pow( c - dot(vNormal, vNormel), p );
// intensity = 0;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: `uniform vec3 glowColor;
varying float intensity;
void main()
{
vec3 glow = glowColor * intensity;
gl_FragColor = vec4( glow, 1.0 );
}`,
side: THREE.BackSide,
blending: THREE.AdditiveBlending,
transparent: true
});
moonGlow = new THREE.Mesh(sphereGeom.clone(), customMaterial.clone());
moonGlow.position.set(moon.position.x, moon.position.y, moon.position.z);
moonGlow.scale.multiplyScalar(1.1);
this.add(moonGlow);
This all looks good and we are getting the desired result. BUT when we are setting the WebGLRenderer’s alpha to true (for getting a gradient in the background body element) then the whole glow is being surrounded by a black border.
Is there some way for getting a gradient color in the background and in the front showing this sphere with glow?
You can apply a texture to scene.background to achieve the desired effect. In this case, it's not necessary to apply alpha = true to WebGLRenderer.
var loader = new THREE.TextureLoader();
var texture = loader.load( 'textures/gradient.jpg' );
scene.background = texture;
Demo: https://jsfiddle.net/f2Lommf5/4946/

How can textures with transparent spots be correctly applied to multiple stacked plane instances in threejs?

I'm creating 512 instances of the same 1x1 plane with a texture that has transparent areas. The planes are randomly spread around the origin like the image below.
How can the planes in front be drawn after the planes behind so that the transparency of the planes in front take into account the output of the planes from behind?
(with depthTest disabled)
(with depthTest normal)
For reference, the transparency disabled version of the instanced geometry. This proves that the planes are correctly positioned.
Update:
Adding code as asked:
import {
Mesh,
ShaderMaterial,
Vector3,
PlaneBufferGeometry,
EdgesGeometry,
LineBasicMaterial,
LineSegments,
InstancedBufferAttribute,
UniformsLib,
BufferAttribute,
TextureLoader,
InstancedBufferGeometry,
DoubleSide,
} from 'three'
import path from 'path'
import fs from 'fs'
import {
randomValueBetween,
} from '../../utils'
const vertexShader = fs.readFileSync(path.resolve(__dirname, './assets/vertex.glsl'), 'utf8')
const fragmentShader = fs.readFileSync(path.resolve(__dirname, './assets/fragment.glsl'), 'utf8')
const createInstancedAtrributes = (geometry, instanceCount) => {
const startseed = new InstancedBufferAttribute(new Float32Array(instanceCount * 1), 1)
const scale = new InstancedBufferAttribute(new Float32Array(instanceCount * 3), 3)
const offset = new InstancedBufferAttribute(new Float32Array(instanceCount * 2), 2)
const orientationY = new InstancedBufferAttribute(new Float32Array(instanceCount), 1)
const baseScale = 0.5
for (let i = 0; i < instanceCount; i += 1) {
scale.setXYZ(i,
baseScale * randomValueBetween(0.8, 1.3, 1),
baseScale * randomValueBetween(0.8, 1.3, 1),
baseScale * randomValueBetween(0.8, 1.3, 1),
)
orientationY.setX(i, randomValueBetween(0.0, 1.0, 3))
startseed.setX(i, randomValueBetween(1, 3, 1))
}
for (let i = 0; i < instanceCount / 4; i += 4) {
const randomX = randomValueBetween(-3.5, 3.5, 1)
const randomY = randomValueBetween(-3.5, 3.5, 1)
offset.setXY(i, randomX, randomY)
}
geometry.addAttribute('scale', scale)
geometry.addAttribute('offset', offset)
geometry.addAttribute('startseed', offset)
geometry.addAttribute('orientationY', offset)
return { scale, offset }
}
const createInstancedGeometry = (instancePerUnitCount) => {
const geometry = new InstancedBufferGeometry()
geometry.maxInstancedCount = instancePerUnitCount
const shape = new PlaneBufferGeometry(1, 1, 1, 3)
const data = shape.attributes
geometry.addAttribute('position', new BufferAttribute(new Float32Array(data.position.array), 3))
geometry.addAttribute('uv', new BufferAttribute(new Float32Array(data.uv.array), 2))
geometry.addAttribute('normal', new BufferAttribute(new Float32Array(data.normal.array), 3))
geometry.setIndex(new BufferAttribute(new Uint16Array(shape.index.array), 1))
shape.dispose()
createInstancedAtrributes(geometry, instancePerUnitCount)
return geometry
}
export default class GrassDeform extends Mesh {
constructor() {
const geometry = createInstancedGeometry(8 * 256)
const uniforms = {
uTime: {
type: 'f',
value: 0,
},
uMap: {
type: 't',
value: null,
},
}
const textureLoader = new TextureLoader()
textureLoader.load(path.resolve(__dirname, './assets/grass-texture-01.png'), (t) => {
uniforms.uMap.value = t
})
const material = new ShaderMaterial({
uniforms: Object.assign({},
UniformsLib.ambient,
UniformsLib.lights,
uniforms,
),
vertexShader,
fragmentShader,
lights: true,
transparent: true,
side: DoubleSide,
})
super(geometry, material)
this.geometry = geometry
this.material = material
this.up = new Vector3(0, 0, 1)
const lineGeo = new EdgesGeometry(geometry) // or WireframeGeometry
const mat = new LineBasicMaterial({ color: 0xffffff, linewidth: 2 })
const wireframe = new LineSegments(lineGeo, mat)
this.add(wireframe)
this.frustumCulled = false
}
update({ ellapsedTime }) {
this.material.uniforms.uTime.value = ellapsedTime
}
}
And the object is added to the scene like this:
const grass2 = new GrassDeform2()
grass2.position.set(-1, 0, 0.50)
grass2.rotateX(Math.PI / 2)
scene.add(grass2)
dirLight.target = grass2
const animate = (ellapsedTime = 0) => {
stats.begin()
grass2.update({ ellapsedTime })
/// other scene stuff
renderer.render(scene, playerController.camera)
requestAnimationFrame(animate)
}
animate()
The vertex shader:
#if NUM_DIR_LIGHTS > 0
struct DirectionalLight {
vec3 direction;
vec3 color;
int shadow;
float shadowBias;
float shadowRadius;
vec2 shadowMapSize;
};
uniform DirectionalLight directionalLights[ NUM_DIR_LIGHTS ];
#endif
uniform float uTime;
attribute vec2 offset;
attribute vec3 scale;
attribute float startseed;
varying vec2 vUv;
varying vec3 vPosition;
varying vec3 vDirectionalLightDirection;
varying vec3 vDirectionalLightColor;
varying vec3 uNormal;
void main() {
vec3 pos = position * scale;
pos.x += offset.x;
pos.z += offset.y;
pos.y += (scale.y - 1.0) * 0.5;
pos.y = orientationY
vPosition = pos;
uNormal = normal;
vUv = uv;
uNormal = normal;
vDirectionalLightDirection = directionalLights[0].direction;
vDirectionalLightColor = directionalLights[0].color;
float variation = startseed + uTime * 0.002;
float pass = (0.5 + pos.y) * 0.05;
pos.x += sin(pass + variation) * pass;
pos.z += cos(pass + variation + 0.01) * pass;
pos.y += sin(pass + variation - 0.01) * pass;
gl_Position = projectionMatrix * modelViewMatrix * vec4(pos,1.0);
}
And the fragment shader (has some extra stuff for light, not added for now):
uniform sampler2D uMap;
varying vec2 vUv;
varying vec3 vPosition;
varying vec3 vDirectionalLightDirection;
varying vec3 vDirectionalLightColor;
varying vec3 uNormal;
void main() {
vec4 map = texture2D(uMap, vUv);
vec3 lightVector = normalize((vDirectionalLightDirection) - vPosition);
float dotNL = dot( uNormal, lightVector );
vec3 baseColor = map.rgb;
vec3 lightedColor = vDirectionalLightColor * 0.6 * dotNL;
if ( map.a < 0.5 ) discard; //!!! THIS WAS THE LINE NEEDED TO SOLVE THE ISSUE
gl_FragColor = vec4( map.rgb , 1 );
}
After applying the change from the final result, the scene looks right!
You can solve your problem with alpha testing. Use a pattern like the following in your fragment shader:
vec4 texelColor = texture2D( map, vUv );
if ( texelColor.a < 0.5 ) discard;
Your material will no longer need to have transparent = true, since you appear to be using a cut-out in which the texture alpha is either 0 or 1.
three.js r.88

Incorrect depthTexture with SSAO

I’ve been puzzled lately as I’ve been attempting to get a THREE.DepthTexture to work with the Ambient Occlusion shader. I’ve had it working before with RGBA unpacking, but after reading about Matt Deslauriers’s project, Audiograph, I decided to attempt the method he described for a potential performance boost:
Historically in ThreeJS, you would render your scene with
MeshDepthMaterial to a WebGLRenderTarget, and then unpack to a linear
depth value when sampling from the depth target. This is fairly
expensive and often unnecessary, since many environments support the
WEBGL_depth_texture extension.
Matt Deslauriers, Audiograph
After attempting this method, I somehow ended up with this weird unwanted effect in which lines are all over the terrain:
I have setup a small example below in which I have replicated the issue. I feel it’s something very obvious that I’m simply glossing over.
I hope someone here is able to point out what I’m missing so that I can get the ambient occlusion working in a way that is a little bit more performant!
Many thanks in advance.
const scene = new THREE.Scene();
const camera = new THREE.PerspectiveCamera(75, window.innerWidth/window.innerHeight, 0.1, 2000);
const pivot = new THREE.Object3D();
pivot.add(camera);
scene.add(pivot);
camera.position.set(0, 250, 500);
camera.lookAt(pivot.position);
const renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.gammaInput = true;
renderer.gammaOutput = true;
renderer.gammaFactor = 2.2;
let supportsExtension = false;
if (renderer.extensions.get('WEBGL_depth_texture')) {
supportsExtension = true;
}
document.body.appendChild(renderer.domElement);
const createCube = () => {
const geo = new THREE.BoxGeometry(500, 500, 500);
const mat = new THREE.MeshBasicMaterial({ color: 0x00ff00 });
const obj = new THREE.Mesh(geo, mat);
obj.position.y = -(obj.geometry.parameters.height / 2);
scene.add(obj);
}
const createSphere = () => {
const geo = new THREE.SphereGeometry(100, 12, 8);
const mat = new THREE.MeshBasicMaterial({ color: 0xff00ff });
const obj = new THREE.Mesh(geo, mat);
obj.position.y = obj.geometry.parameters.radius;
scene.add(obj);
}
// Create objects
createCube();
createSphere();
const composer = new THREE.EffectComposer(renderer);
const target = new THREE.WebGLRenderTarget( window.innerWidth, window.innerHeight );
target.texture.format = THREE.RGBFormat;
target.texture.minFilter = THREE.NearestFilter;
target.texture.magFilter = THREE.NearestFilter;
target.texture.generateMipmaps = false;
target.stencilBuffer = false;
target.depthBuffer = true;
target.depthTexture = new THREE.DepthTexture();
target.depthTexture.type = THREE.UnsignedShortType;
function initPostProcessing() {
composer.addPass(new THREE.RenderPass( scene, camera ));
const pass = new THREE.ShaderPass({
uniforms: {
"tDiffuse": { value: null },
"tDepth": { value: target.depthTexture },
"resolution": { value: new THREE.Vector2( 512, 512 ) },
"cameraNear": { value: 1 },
"cameraFar": { value: 100 },
"onlyAO": { value: 0 },
"aoClamp": { value: 0.5 },
"lumInfluence": { value: 0.5 }
},
vertexShader: document.getElementById('vertexShader').textContent,
fragmentShader: document.getElementById('fragmentShader').textContent,
});
pass.material.precision = 'highp';
composer.addPass(pass);
pass.uniforms.tDepth.value = target.depthTexture;
pass.uniforms.cameraNear.value = camera.near;
pass.uniforms.cameraFar.value = camera.far;
composer.passes[composer.passes.length - 1].renderToScreen = true;
}
initPostProcessing();
const animate = () => {
requestAnimationFrame( animate );
pivot.rotation.y += 0.01;
renderer.render( scene, camera, target );
composer.render();
}
animate();
html, body { margin: 0; }
canvas { display: block; width: 100%; height: 100%; }
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/86/three.js"></script>
<script src="https://cdn.rawgit.com/mrdoob/three.js/dev/examples/js/postprocessing/EffectComposer.js"></script>
<script src="https://cdn.rawgit.com/mrdoob/three.js/dev/examples/js/postprocessing/RenderPass.js"></script>
<script src="https://cdn.rawgit.com/mrdoob/three.js/dev/examples/js/postprocessing/ShaderPass.js"></script>
<script src="https://cdn.rawgit.com/mrdoob/three.js/dev/examples/js/shaders/CopyShader.js"></script>
<script id="vertexShader" type="x-shader/x-vertex">
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script id="fragmentShader" type="x-shader/x-fragment">
uniform float cameraNear;
uniform float cameraFar;
uniform bool onlyAO; // use only ambient occlusion pass?
uniform vec2 resolution; // texture width, height
uniform float aoClamp; // depth clamp - reduces haloing at screen edges
uniform float lumInfluence; // how much luminance affects occlusion
uniform sampler2D tDiffuse;
uniform highp sampler2D tDepth;
varying vec2 vUv;
// #define PI 3.14159265
#define DL 2.399963229728653 // PI * ( 3.0 - sqrt( 5.0 ) )
#define EULER 2.718281828459045
// user variables
const int samples = 4; // ao sample count
const float radius = 5.0; // ao radius
const bool useNoise = false; // use noise instead of pattern for sample dithering
const float noiseAmount = 0.0003; // dithering amount
const float diffArea = 0.4; // self-shadowing reduction
const float gDisplace = 0.4; // gauss bell center
highp vec2 rand( const vec2 coord ) {
highp vec2 noise;
if ( useNoise ) {
float nx = dot ( coord, vec2( 12.9898, 78.233 ) );
float ny = dot ( coord, vec2( 12.9898, 78.233 ) * 2.0 );
noise = clamp( fract ( 43758.5453 * sin( vec2( nx, ny ) ) ), 0.0, 1.0 );
} else {
highp float ff = fract( 1.0 - coord.s * ( resolution.x / 2.0 ) );
highp float gg = fract( coord.t * ( resolution.y / 2.0 ) );
noise = vec2( 0.25, 0.75 ) * vec2( ff ) + vec2( 0.75, 0.25 ) * gg;
}
return ( noise * 2.0 - 1.0 ) * noiseAmount;
}
float readDepth( const in vec2 coord ) {
float cameraFarPlusNear = cameraFar + cameraNear;
float cameraFarMinusNear = cameraFar - cameraNear;
float cameraCoef = 2.0 * cameraNear;
return cameraCoef / ( cameraFarPlusNear - texture2D( tDepth, coord ).x * cameraFarMinusNear );
}
float compareDepths( const in float depth1, const in float depth2, inout int far ) {
float garea = 2.0; // gauss bell width
float diff = ( depth1 - depth2 ) * 100.0; // depth difference (0-100)
// reduce left bell width to avoid self-shadowing
if ( diff < gDisplace ) {
garea = diffArea;
} else {
far = 1;
}
float dd = diff - gDisplace;
float gauss = pow( EULER, -2.0 * dd * dd / ( garea * garea ) );
return gauss;
}
float calcAO( float depth, float dw, float dh ) {
float dd = radius - depth * radius;
vec2 vv = vec2( dw, dh );
vec2 coord1 = vUv + dd * vv;
vec2 coord2 = vUv - dd * vv;
float temp1 = 0.0;
float temp2 = 0.0;
int far = 0;
temp1 = compareDepths( depth, readDepth( coord1 ), far );
// DEPTH EXTRAPOLATION
if ( far > 0 ) {
temp2 = compareDepths( readDepth( coord2 ), depth, far );
temp1 += ( 1.0 - temp1 ) * temp2;
}
return temp1;
}
void main() {
highp vec2 noise = rand( vUv );
float depth = readDepth( vUv );
float tt = clamp( depth, aoClamp, 1.0 );
float w = ( 1.0 / resolution.x ) / tt + ( noise.x * ( 1.0 - noise.x ) );
float h = ( 1.0 / resolution.y ) / tt + ( noise.y * ( 1.0 - noise.y ) );
float ao = 0.0;
float dz = 1.0 / float( samples );
float z = 1.0 - dz / 2.0;
float l = 0.0;
for ( int i = 0; i <= samples; i ++ ) {
float r = sqrt( 1.0 - z );
float pw = cos( l ) * r;
float ph = sin( l ) * r;
ao += calcAO( depth, pw * w, ph * h );
z = z - dz;
l = l + DL;
}
ao /= float( samples );
ao = 1.0 - ao;
vec3 color = texture2D( tDiffuse, vUv ).rgb;
vec3 lumcoeff = vec3( 0.299, 0.587, 0.114 );
float lum = dot( color.rgb, lumcoeff );
vec3 luminance = vec3( lum );
vec3 final = vec3( color * mix( vec3( ao ), vec3( 1.0 ), luminance * lumInfluence ) ); // mix( color * ao, white, luminance )
float depth2 = readDepth(vUv);
if ( onlyAO ) {
final = vec3( mix( vec3( ao ), vec3( 1.0 ), luminance * lumInfluence ) ); // ambient occlusion only
}
// gl_FragColor = vec4( vec3( readDepth( vUv) ), 1.0 ); // Depth
gl_FragColor = vec4( final, 1.0 );
}
</script>
I'd love to hear what is causing my Ambient Occlusion to not render properly!
If you are using a perspective camera and relying on the depth map for any purpose -- that includes SSAO and shadows -- be careful of your choice of camera.near and camera.far -- especially near. ( That would be shadow.camera.near if you are dealing with shadows.)
Push the near plane out as far as is reasonable for your use case. You will achieve the best results if your scene is positioned near the front of the frustum.
three.js r.86

Resources