I use the following ShaderMaterial for my objects in scene. The code below is working. However, if I set the WebGLRenderer option logarithmicDepthBuffer to true, the Material defined below is not displayed correctly.
new THREE.ShaderMaterial({
uniforms: {
color1: {
value: new THREE.Color('#3a0000')
},
color2: {
value: new THREE.Color('#ffa9b0')
}
},
vertexShader: `
varying vec3 vNormal;
void main(void){
vNormal = normalMatrix * normalize(normal);
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}`,
fragmentShader: `
uniform vec3 color1;
uniform vec3 color2;
varying vec3 vNormal;
void main(void){
vec3 view_nv = normalize(vNormal);
vec3 nv_color = view_nv * 0.5 + 0.5;
vec3 c = mix(color1, color2, nv_color.r);
gl_FragColor = vec4(c, 1.0);
}`,
side: THREE.DoubleSide,
});
After looking for a solution to this problem, I found the following SO answer. Summarizing, the solution is to add 4 pieces of code to vertexShader and fragmentShader.
Where exactly do I have to integrate the provided code snippets, i.e. Vertex shader body and Fragment shader body?
I tried various "positions" but I always got WebGL errors.
THREE.WebGLProgram: shader error: 0 gl.VALIDATE_STATUS false gl.getProgramInfoLog Must have a compiled vertex shader attached. ERROR: 0:63: 'EPSILON' : undeclared identifier
UPDATE added playground: https://codepen.io/anon/pen/gQoaye
If you add the option of logarithmicDepthBuffer to the constructor, you will see that the ShaderMaterial won't work anymore.
var renderer = new THREE.WebGLRenderer(logarithmicDepthBuffer:true);
Where exactly do I have to integrate the provided code snippets, i.e. Vertex shader body and Fragment shader body?
In the vertex shader you have to define EPSILON.
After adding the code snipptes logdepthbuf_pars_vertex.glsl and logdepthbuf_vertex.glsl, the final vertex shader is:
#ifdef USE_LOGDEPTHBUF
#define EPSILON 1e-6
#ifdef USE_LOGDEPTHBUF_EXT
varying float vFragDepth;
#endif
uniform float logDepthBufFC;
#endif
varying vec3 vNormal;
void main(void){
vNormal = normalMatrix * normalize(normal);
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
#ifdef USE_LOGDEPTHBUF
gl_Position.z = log2(max( EPSILON, gl_Position.w + 1.0 )) * logDepthBufFC;
#ifdef USE_LOGDEPTHBUF_EXT
vFragDepth = 1.0 + gl_Position.w;
#else
gl_Position.z = (gl_Position.z - 1.0) * gl_Position.w;
#endif
#endif
}
After adding the code snippets, the final fragment shader is:
#ifdef USE_LOGDEPTHBUF
uniform float logDepthBufFC;
#ifdef USE_LOGDEPTHBUF_EXT
#extension GL_EXT_frag_depth : enable
varying float vFragDepth;
#endif
#endif
uniform vec3 color1;
uniform vec3 color2;
varying vec3 vNormal;
void main(void){
vec3 view_nv = normalize(vNormal);
vec3 nv_color = view_nv * 0.5 + 0.5;
vec3 c = mix(color1, color2, nv_color.r);
gl_FragColor = vec4(c, 1.0);
#if defined(USE_LOGDEPTHBUF) && defined(USE_LOGDEPTHBUF_EXT)
gl_FragDepthEXT = log2(vFragDepth) * logDepthBufFC * 0.5;
#endif
}
See the example:
(function onLoad() {
var container, camera, scene, renderer, orbitControls;
function createModel() {
var material = new THREE.ShaderMaterial({
uniforms: {
color1: {
value: new THREE.Color('#3a0000')
},
color2: {
value: new THREE.Color('#ffa9b0')
}
},
vertexShader: `
#ifdef USE_LOGDEPTHBUF
#define EPSILON 1e-6
#ifdef USE_LOGDEPTHBUF_EXT
varying float vFragDepth;
#endif
uniform float logDepthBufFC;
#endif
varying vec3 vNormal;
void main(void){
vNormal = normalMatrix * normalize(normal);
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
#ifdef USE_LOGDEPTHBUF
gl_Position.z = log2(max( EPSILON, gl_Position.w + 1.0 )) * logDepthBufFC;
#ifdef USE_LOGDEPTHBUF_EXT
vFragDepth = 1.0 + gl_Position.w;
#else
gl_Position.z = (gl_Position.z - 1.0) * gl_Position.w;
#endif
#endif
}`,
fragmentShader: `
#ifdef USE_LOGDEPTHBUF
#ifdef USE_LOGDEPTHBUF_EXT
#extension GL_EXT_frag_depth : enable
varying float vFragDepth;
#endif
uniform float logDepthBufFC;
#endif
uniform vec3 color1;
uniform vec3 color2;
varying vec3 vNormal;
void main(void){
vec3 view_nv = normalize(vNormal);
vec3 nv_color = view_nv * 0.5 + 0.5;
vec3 c = mix(color1, color2, nv_color.r);
gl_FragColor = vec4(c, 1.0);
#if defined(USE_LOGDEPTHBUF) && defined(USE_LOGDEPTHBUF_EXT)
gl_FragDepthEXT = log2(vFragDepth) * logDepthBufFC * 0.5;
#endif
}`,
side: THREE.DoubleSide,
});
//var material = new THREE.MeshPhongMaterial({color:'#b090b0'});
var geometry = new THREE.BoxGeometry( 1, 1, 1 );
var mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
}
function init() {
container = document.getElementById('container');
renderer = new THREE.WebGLRenderer({
antialias: true,
logarithmicDepthBuffer: true
});
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.shadowMap.enabled = true;
container.appendChild(renderer.domElement);
camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight, 1, 100);
camera.position.set(0, 1, -2);
scene = new THREE.Scene();
scene.background = new THREE.Color(0xffffff);
scene.add(camera);
window.onresize = resize;
orbitControls = new THREE.OrbitControls(camera, container);
var helper = new THREE.GridHelper(100, 100);
helper.material.opacity = 0.25;
helper.material.transparent = true;
scene.add(helper);
var axis = new THREE.AxesHelper(1000);
scene.add(axis);
createModel();
}
function resize() {
var aspect = window.innerWidth / window.innerHeight;
renderer.setSize(window.innerWidth, window.innerHeight);
camera.aspect = aspect;
camera.updateProjectionMatrix();
}
function animate() {
requestAnimationFrame(animate);
orbitControls.update();
render();
}
function render() {
renderer.render(scene, camera);
}
init();
animate();
})();
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/110/three.min.js"></script>
<script src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
<div id="container"></div>
I assume you have tried inserting that code in your two shaders? To my understanding, that should be correct.
The error appears to be complaining about the shader not compiling, due to a reference to EPSILON in the vertex shader body, although EPSILON was never declared.
Try defining EPSILON, e.g. using a macro in the shader itself:
#define EPSILON 1e-6
or pass it to the shader as a uniform. (Notice that this is just an example value; you may want to research what a suitable value for EPSILON might be in your particular case.)
Related
I've applied ShaderMaterial to a glb model that has opacity map (the model is human body and the opacity map is used to create hair and eyelashes), the reference for the model material was this -
So as you can see - the material is some sort of glow effect, so i was manage to find This Example which is pretty much what i need - the problem is that i can't figure out how to apply the models opacity map - if you look closely on the difference between my result (left picture) to the right picture - you'll see that the hair doesn't looks as it should - since the opacity map do not applied... i wonder is the ShaderMaterial is the good for this look or should i use other kind of shader.
Here is my material code -
let m = new THREE.MeshStandardMaterial({
roughness: 0.25,
metalness: 0.75,
opacity: 0.3,
map: new THREE.TextureLoader().load(
"/maps/opacity.jpg",
(tex) => {
tex.wrapS = THREE.RepeatWrapping;
tex.wrapT = THREE.RepeatWrapping;
tex.repeat.set(16, 1);
}
),
onBeforeCompile: (shader) => {
shader.uniforms.s = uniforms.s;
shader.uniforms.b = uniforms.b;
shader.uniforms.p = uniforms.p;
shader.uniforms.glowColor = uniforms.glowColor;
shader.vertexShader = document.getElementById("vertexShader").textContent;
shader.fragmentShader = document.getElementById(
"fragmentShader"
).textContent;
shader.side = THREE.FrontSide;
shader.transparent = true;
// shader.uniforms['alphaMap'].value.needsUpdate = true;
console.log(shader.vertexShader);
console.log(shader.fragmentShader);
},
});
Shader setting:
<script id="vertexShader" type="x-shader/x-vertex">
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
vNormal = normalize( normalMatrix * normal ); // 转换到视图空间
vPositionNormal = normalize(( modelViewMatrix * vec4(position, 1.0) ).xyz);
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<!-- fragment shader a.k.a. pixel shader -->
<script id="fragmentShader" type="x-shader/x-vertex">
uniform vec3 glowColor;
uniform float b;
uniform float p;
uniform float s;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
float a = pow( b + s * abs(dot(vNormal, vPositionNormal)), p );
gl_FragColor = vec4( mix(vec3(0), glowColor, a), 1. );
}
</script>
You're creating a MeshStandardMaterial, but then you're overriding all its shader code when you assign new vertex and fragment shaders, making the Standard material useless. You should stick to ShaderMaterial like the demo you linked. It would make your code cleaner:
// Get shader code
let vertShader = document.getElementById("vertexShader").textContent;
let fragShader = document.getElementById("fragmentShader").textContent;
// Build texture
let alphaTex = new THREE.TextureLoader().load("/maps/opacity.jpg");
alphaTex.wrapS = THREE.RepeatWrapping;
alphaTex.wrapT = THREE.RepeatWrapping;
// alphaTex.repeat.set(16, 1); <- repeat won't work in a custom shader
// Build material
let m = new THREE.ShaderMaterial({
transparent: true,
// side: THREE.FrontSide, <- this is already default. Not needed
uniforms: {
s: {value: 1},
b: {value: 2},
p: {value: 3},
alphaMap: {value: alphaTex},
glowColor: {value: new THREE.Color(0x0099ff)},
// we create a Vec2 to manually handle repeat
repeat: {value: new THREE.Vector2(16, 1)}
},
vertexShader: vertShader,
fragmentShader: fragShader
});
This helps build you material in a cleaner way, since you're using its native build method without having to override anything. Then, you can sample the alphaMap texture in your fragment shader:
uniform float s;
uniform float b;
uniform float p;
uniform vec3 glowColor;
uniform vec2 repeat;
// Declare the alphaMap uniform if we're gonna use it
uniform sampler2D alphaMap;
// Don't forget to declare UV coordinates
varying vec2 vUv;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
float a = pow( b + s * abs(dot(vNormal, vPositionNormal)), p );
// Sample map with UV coordinates. Multiply by uniform to get repeat
float a2 = texture2D(alphaMap, vUv * repeat).r;
// Combine both alphas
float opacity = a * a2;
gl_FragColor = vec4( mix(vec3(0), glowColor, opacity), 1. );
}
Also, don't forget to carry over the UVs from your vertex shader:
// Don't forget to declare UV coordinates
varying vec2 vUv;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
// convert uv attribute to vUv varying
vUv = uv;
vNormal = normalize( normalMatrix * normal ); // 转换到视图空间
vPositionNormal = normalize(( modelViewMatrix * vec4(position, 1.0) ).xyz);
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
Update
The error
'=' : cannot convert from 'lowp 4-component vector of float' to 'highp float'
means I made a mistake when taking the texture2D() sample in the fragment shader. It should have been texture2D().r so we only read the red channel to get a float instead of cramming all RGBA channels (yielding a vec4) into a float. See the following snippet for the final result:
var container, scene, camera, renderer, controls, torusKnot;
init()
function init() {
initBase()
initObject()
render()
}
function initBase () {
container = document.getElementById( 'ThreeJS' )
// SCENE
scene = new THREE.Scene();
// CAMERA
var SCREEN_WIDTH = window.innerWidth, SCREEN_HEIGHT = window.innerHeight
var VIEW_ANGLE = 45, ASPECT = SCREEN_WIDTH / SCREEN_HEIGHT, NEAR = 0.1, FAR = 20000
camera = new THREE.PerspectiveCamera( VIEW_ANGLE, ASPECT, NEAR, FAR)
camera.position.set(0,0,50)
camera.lookAt(scene.position)
// RENDERER
renderer = new THREE.WebGLRenderer( {antialias:true} )
renderer.setSize(SCREEN_WIDTH, SCREEN_HEIGHT)
renderer.setClearColor(0x333333)
container.appendChild( renderer.domElement )
// CONTROLS
controls = new THREE.OrbitControls( camera, renderer.domElement )
// Resize
window.addEventListener("resize", onWindowResize);
}
function onWindowResize() {
var w = window.innerWidth;
var h = window.innerHeight;
renderer.setSize(w, h);
camera.aspect = w / h;
camera.updateProjectionMatrix();
}
function initObject () {
let vertShader = document.getElementById("vertexShader").textContent;
let fragShader = document.getElementById("fragmentShader").textContent;
// Build texture
let alphaTex = new THREE.TextureLoader().load("https://threejs.org/examples/textures/floors/FloorsCheckerboard_S_Diffuse.jpg");
alphaTex.wrapS = THREE.RepeatWrapping;
alphaTex.wrapT = THREE.RepeatWrapping;
var customMaterial = new THREE.ShaderMaterial({
uniforms: {
s: {value: -1},
b: {value: 1},
p: {value: 2},
alphaMap: {value: alphaTex},
glowColor: {value: new THREE.Color(0x00ffff)},
// we create a Vec2 to manually handle repeat
repeat: {value: new THREE.Vector2(16, 1)}
},
vertexShader: vertShader,
fragmentShader: fragShader
})
var geometry = new THREE.TorusKnotBufferGeometry( 10, 3, 100, 32 )
torusKnot = new THREE.Mesh( geometry, customMaterial )
scene.add( torusKnot )
}
function render() {
torusKnot.rotation.y += 0.01;
renderer.render( scene, camera );
requestAnimationFrame(render);
}
body{
overflow: hidden;
margin: 0;
}
<script src="https://threejs.org/build/three.js"></script>
<script src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
<!-- vertext shader a.k.a. pixel shader -->
<script id="vertexShader" type="x-shader/x-vertex">
varying vec2 vUv;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
// convert uv attribute to vUv varying
vUv = uv;
vNormal = normalize( normalMatrix * normal ); // 转换到视图空间
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
vPositionNormal = normalize(( mvPosition ).xyz);
gl_Position = projectionMatrix * mvPosition;
}
</script>
<!-- fragment shader a.k.a. pixel shader -->
<script id="fragmentShader" type="x-shader/x-vertex">
uniform float s;
uniform float b;
uniform float p;
uniform vec3 glowColor;
uniform vec2 repeat;
// Declare the alphaMap uniform if we're gonna use it
uniform sampler2D alphaMap;
// Don't forget to declare UV coordinates
varying vec2 vUv;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
float a = pow( b + s * abs(dot(vNormal, vPositionNormal)), p );
// Sample map with UV coordinates. Multiply by uniform to get repeat
float a2 = texture2D(alphaMap, vUv * repeat).r;
// Combine both alphas
float opacity = a * a2;
gl_FragColor = vec4( mix(vec3(0), glowColor, opacity), 1. );
}
</script>
<div id="ThreeJS" style="position: absolute; left:0px; top:0px"></div>
I have this shader code
#ifdef GL_ES
precision highp float;
#endif
uniform float u_time;
uniform vec2 u_resolution;
uniform vec2 u_mouse;
vec3 vary(vec3 y)
{
y = y+sin(u_time*1.5)*y.r;
return y;
}
void main()
{
vec2 st = gl_FragCoord.xy/u_resolution;
vec3 color = vary(vec3(st.x*1.65, st.y*1.,1.));
gl_FragColor = vec4(color, 1.);
}
You can see the effect of it here.
http://editor.thebookofshaders.com/
(with the code above pasted - the export wasn't working for some reason)
As you can see it's a sort of moving color shader, that depends on the coordinates of the fragment. The value for "u_resolution that I passed in is
uniforms.u_resolution.value.x = renderer.domElement.width;
uniforms.u_resolution.value.y = renderer.domElement.height;
These change every time the scene window is changed.
So the problem is that I don't know how to turn the effect of this shader to a THREEJS geometry (a cube for instance) to display. I know how to display THREEJS, what stumps me is this shader. Could someone help?
You've to pass the texture coordinates from the geometry from the vertex shader to the fragment shader.
Create a vertex shader which pass through the texture coordinates:
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
Use the texture coordinates (vUv.xy) in the fragment shader instead of gl_FragCoord.xy:
precision highp float;
uniform float u_time;
varying vec2 vUv;
vec3 vary(vec3 y)
{
y = y+sin(u_time*1.5)*y.r;
return y;
}
void main(){
vec2 st = vUv;
vec3 color = vary(vec3(st.x*1.65, st.y*1.,1.));
gl_FragColor = vec4(color, 1.);
}
See the example:
(function onLoad() {
var loader, camera, scene, renderer, orbitControls, uniforms;
init();
animate();
function init() {
renderer = new THREE.WebGLRenderer({
antialias: true,
alpha: true
});
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.shadowMap.enabled = true;
document.body.appendChild(renderer.domElement);
camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 1, 100);
camera.position.set(0, 1, -2);
loader = new THREE.TextureLoader();
loader.setCrossOrigin("");
scene = new THREE.Scene();
scene.background = new THREE.Color(0xffffff);
scene.add(camera);
window.onresize = resize;
var ambientLight = new THREE.AmbientLight(0x404040);
scene.add(ambientLight);
var directionalLight = new THREE.DirectionalLight( 0xffffff, 0.5 );
directionalLight.position.set(1,2,1.5);
scene.add( directionalLight );
orbitControls = new THREE.OrbitControls(camera);
addGridHelper();
createModel();
}
function createModel() {
uniforms = {
u_time : { type: 'f', value: 100 }
};
var material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: document.getElementById('vertex-shader').textContent,
fragmentShader: document.getElementById('fragment-shader').textContent
});
var geometry = new THREE.BoxGeometry( 1, 1, 1 );
var mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
}
function addGridHelper() {
var helper = new THREE.GridHelper(100, 100);
helper.material.opacity = 0.25;
helper.material.transparent = true;
scene.add(helper);
var axis = new THREE.AxesHelper(1000);
scene.add(axis);
}
function resize() {
var aspect = window.innerWidth / window.innerHeight;
renderer.setSize(window.innerWidth, window.innerHeight);
camera.aspect = aspect;
camera.updateProjectionMatrix();
}
function animate(deltaT) {
requestAnimationFrame(animate);
orbitControls.update();
render(deltaT);
}
function render(deltaT) {
uniforms.u_time.value = deltaT / 1000.0;
renderer.render(scene, camera);
}
})();
<script type='x-shader/x-vertex' id='vertex-shader'>
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script type='x-shader/x-fragment' id='fragment-shader'>
precision highp float;
uniform float u_time;
varying vec2 vUv;
vec3 vary(vec3 y)
{
y = y+sin(u_time*1.5)*y.r;
return y;
}
void main(){
vec2 st = vUv;
vec3 color = vary(vec3(st.x*1.65, st.y*1.,1.));
gl_FragColor = vec4(color, 1.);
}
</script>
<script src="https://threejs.org/build/three.min.js"></script>
<script src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
I am new to shader. I wrote this extremely simple fragment shader, and it looks like this(with the orange color on the top and red color at the bottom)
What I want to achieve:
I want to animate these two colors.
I want the top orange color translates to bottom and then go back to top, and the bottom red color translate to top and then go back to bottom. (To simulate the sunset and sunrise effect)
Thanks!
#ifdef GL_ES
precision mediump float;
#endif
uniform vec2 u_resolution;
uniform vec2 u_mouse;
uniform float u_time;
vec3 colorA = vec3(0.905,0.045,0.045);
vec3 colorB = vec3(0.995,0.705,0.051);
void main() {
vec2 st = gl_FragCoord.xy/u_resolution.xy;
vec3 pct = vec3(st.y);
vec3 color = vec3(0.0);
color = mix(colorA, colorB, pct);
gl_FragColor = vec4(color,1);
}
The easiest way to do so is to add an offset to st.y. The offset has to be in the range [-1.0, 1.0]. This can be achieved by sin. The argument to sin has to depend on u_time.
Note, sin calculates the sine function of an angle in radians, so the argument has to be scaled by 2.0 * PI (~ 2.0*3.1415).
The result has to be clamped (clamp) to the range [0.0, 1.0].
e.g.:
float interval = 2000.0; // 2000 milliseconds = 2 seconds
float p = clamp(st.y + sin(2.0*3.1415 * u_time / interval), 0.0, 1.0);
vec3 pct = vec3(p);
Note the example works in case when u_time is a value in milliseconds. If u_time would be a value in seconds, then you've to scale the interval by *1000.0' (e.g 2.0 instead of 2000.0).
var container, camera, scene, renderer, uniforms;
init();
animate();
function init() {
container = document.getElementById( 'container' );
camera = new THREE.Camera();
camera.position.z = 1;
scene = new THREE.Scene();
var geometry = new THREE.PlaneBufferGeometry( 2, 2 );
uniforms = {
u_time: { type: "f", value: 1.0 },
u_resolution: { type: "v2", value: new THREE.Vector2() },
u_mouse: { type: "v2", value: new THREE.Vector2() }
};
var material = new THREE.ShaderMaterial( {
uniforms: uniforms,
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent
} );
var mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
container.appendChild( renderer.domElement );
onWindowResize();
window.addEventListener( 'resize', onWindowResize, false );
document.onmousemove = function(e){
uniforms.u_mouse.value.x = e.pageX
uniforms.u_mouse.value.y = e.pageY
}
}
function onWindowResize( event ) {
renderer.setSize( window.innerWidth, window.innerHeight );
uniforms.u_resolution.value.x = renderer.domElement.width;
uniforms.u_resolution.value.y = renderer.domElement.height;
}
function animate(delta_ms) {
requestAnimationFrame( animate );
render(delta_ms);
}
function render(delta_ms) {
uniforms.u_time.value = delta_ms;
renderer.render( scene, camera );
}
<script id="vertexShader" type="x-shader/x-vertex">
void main() {
gl_Position = vec4( position, 1.0 );
}
</script>
<script id="fragmentShader" type="x-shader/x-fragment">
#ifdef GL_ES
precision mediump float;
#endif
uniform vec2 u_resolution;
uniform vec2 u_mouse;
uniform float u_time;
vec3 colorA = vec3(0.905,0.045,0.045);
vec3 colorB = vec3(0.995,0.705,0.051);
void main() {
vec2 st = gl_FragCoord.xy/u_resolution.xy;
float interval = 2000.0; // 2000 milliseconds = 2 seconds
float p = clamp(st.y + sin(2.0*3.1415 * u_time / interval), 0.0, 1.0);
vec3 pct = vec3(p);
vec3 color = vec3(0.0);
color = mix(colorA, colorB, pct);
gl_FragColor = vec4(color,1);
}
</script>
<div id="container"></div>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/104/three.min.js"></script>
Something like that, with additional mixing of colours by sin function of time:
#ifdef GL_ES
precision mediump float;
#endif
uniform vec2 u_resolution;
uniform vec2 u_mouse;
uniform float u_time;
vec3 colorA = vec3(0.905,0.045,0.045);
vec3 colorB = vec3(0.995,0.705,0.051);
void main() {
vec2 st = gl_FragCoord.xy/u_resolution.xy;
float sinF = sin(u_time) * 0.5 + 0.5;
vec3 colorTop = mix(colorA, colorB, sinF);
vec3 colorBottom = mix(colorB, colorA, sinF);
vec3 pct = vec3(st.y);
vec3 color = vec3(0.0);
color = mix(colorTop, colorBottom, pct);
gl_FragColor = vec4(color,1);
}
I am struggling to set a day-night cycle with a directional light in a Earth model by using custom shaders. The night and day maps as well as the light are ok as long as I do not touch the camera, i.e., the Earth rotates as the light source remains still and nights and days are updated correctly. However, when I rotate the camera using the mouse, the light appears to follow the the camera, so you always see an illuminated part of the Earth.
This is how I set the light source:
var light = new THREE.DirectionalLight(0xffffff, 1);
light.position.set(5,3,5);
scene.add(light);
This is how I pass the parameters to the shader:
uniforms_earth = {
sunPosition: { type: "v3", value: light.position },
dayTexture: { type: "t", value: THREE.ImageUtils.loadTexture( "daymap.jpg" ) },
nightTexture: { type: "t", value: THREE.ImageUtils.loadTexture( "images/nightmap.jpg" ) }
};
This is the vertex shader:
varying vec2 v_Uv;
varying vec3 v_Normal;
uniform vec3 sunPosition;
varying vec3 v_vertToLight;
void main() {
v_Uv = uv;
v_Normal = normalMatrix * normal;
vec4 worldPosition = modelViewMatrix * vec4(position, 1.0);
v_vertToLight = normalize(sunPosition - worldPosition.xyz);
gl_Position = projectionMatrix * worldPosition;
}
And this the fragment shader:
uniform sampler2D dayTexture;
uniform sampler2D nightTexture;
varying vec2 v_Uv;
varying vec3 v_Normal;
varying vec3 v_vertToLight;
void main( void ) {
vec3 dayColor = texture2D(dayTexture, v_Uv).rgb;
vec3 nightColor = texture2D(nightTexture, v_Uv).rgb;
vec3 fragToLight = normalize(v_vertToLight);
float cosineAngleSunToNormal = dot(normalize(v_Normal), fragToLight);
cosineAngleSunToNormal = clamp(cosineAngleSunToNormal * 10.0, -1.0, 1.0);
float mixAmount = cosineAngleSunToNormal * 0.5 + 0.5;
vec3 color = mix(nightColor, dayColor, mixAmount);
gl_FragColor = vec4( color, 1.0 );
}
Finally, I use the THREE library for the camera controls:
var controls = new THREE.TrackballControls(camera);
And I update the Earth rotation inside the render function as:
function render() {
controls.update();
earth.rotation.y += rotation_speed;
requestAnimationFrame(render);
renderer.render(scene, camera);
}
I have already tried to change how I compute v_vertToLight so that both the vertex and the light position are in the same world as:
v_vertToLight = normalize((modelViewMatrix*vec4(sunPosition, 1.0)).xyz - worldPosition.xyz);
This stops the light from moving when I change the camera, but then, the night-day shadow remains always in the exact same place as the light appears to start rotating with the Earth itself.
I feel that I am close to solving this, so any hint or help would be much appreciated. Thank you for your time.
Blockquote
What you call worldPosition is not a position in world space, it is a position in view space. Rename the misnamed variable:
vec4 worldPosition = modelViewMatrix * vec4(position, 1.0);
vec4 viewPosition = modelViewMatrix * vec4(position, 1.0);
sunPosition is a position in world space. It has to be transformed to view space, before it can be used to calculate the view space light vector. This has to be done by the viewMatrix rather than modelViewMatrix. Note, the modelViewMatrix from model space to view space and the viewMatrix transforms from worlds space to view space (see three.js - WebGLProgram):
vec4 viewSunPos = viewMatrix * vec4(sunPosition, 1.0);
v_vertToLight = normalize(viewSunPos.xyz - viewPosition.xyz);
Note, v_vertToLight and v_Normal both have to be either view space vectors or world space vectors, the have to have the same reference system. Otherwise it would not make sense to calculate the dot product of both vectors.
Vertex shader:
varying vec2 v_Uv;
varying vec3 v_Normal;
uniform vec3 sunPosition;
varying vec3 v_vertToLight;
void main() {
vec4 viewPosition = modelViewMatrix * vec4(position, 1.0);
vec4 viewSunPos = viewMatrix * vec4(sunPosition, 1.0);
v_Uv = uv;
v_Normal = normalMatrix * normal;
v_vertToLight = normalize(viewSunPos.xyz - viewPosition.xyz);
gl_Position = projectionMatrix * viewPosition;
}
See the very simple example, which uses the vertex shader:
(function onLoad() {
var loader, camera, scene, renderer, orbitControls, mesh;
init();
animate();
function init() {
renderer = new THREE.WebGLRenderer({
antialias: true,
alpha: true
});
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.shadowMap.enabled = true;
document.body.appendChild(renderer.domElement);
camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 1, 100);
camera.position.set(0, 1, -4);
//camera.lookAt( -1, 0, 0 );
loader = new THREE.TextureLoader();
loader.setCrossOrigin("");
scene = new THREE.Scene();
scene.background = new THREE.Color(0xffffff);
scene.add(camera);
window.onresize = resize;
var ambientLight = new THREE.AmbientLight(0x404040);
scene.add(ambientLight);
var directionalLight = new THREE.DirectionalLight( 0xffffff, 0.5 );
directionalLight.position.set(1,2,1.5);
scene.add( directionalLight );
orbitControls = new THREE.OrbitControls(camera, renderer.domElement);
addGridHelper();
createModel();
}
function createModel() {
var uniforms = {
u_time : {type:'f', value:0.0},
u_resolution: {type: 'v2', value: {x:2048.,y:1024.}},
u_color : {type: 'v3', value: {x:1.0, y:0.0, z:0.0} },
sunPosition : {type: 'v3', value: {x:5.0, y:5.0, z:5.0} }
};
var material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: document.getElementById('vertex-shader').textContent,
fragmentShader: document.getElementById('fragment-shader').textContent,
});
var geometry = new THREE.BoxGeometry( 1, 1, 1 );
mesh = new THREE.Mesh(geometry, material);
mesh.position.set(0, 0, -1);
scene.add(mesh);
}
function addGridHelper() {
var helper = new THREE.GridHelper(100, 100);
helper.material.opacity = 0.25;
helper.material.transparent = true;
scene.add(helper);
var axis = new THREE.AxesHelper(1000);
scene.add(axis);
}
function resize() {
var aspect = window.innerWidth / window.innerHeight;
renderer.setSize(window.innerWidth, window.innerHeight);
camera.aspect = aspect;
camera.updateProjectionMatrix();
}
function animate() {
requestAnimationFrame(animate);
orbitControls.update();
render();
}
function render() {
mesh.rotation.y += 0.01;
renderer.render(scene, camera);
}
})();
<script src="https://cdn.jsdelivr.net/npm/three#0.131/build/three.js"></script>
<script src="https://cdn.jsdelivr.net/npm/three#0.131/examples/js/controls/OrbitControls.js"></script>
<script type='x-shader/x-vertex' id='vertex-shader'>
varying vec2 v_Uv;
varying vec3 v_Normal;
uniform vec3 sunPosition;
varying vec3 v_vertToLight;
void main() {
vec4 viewPosition = modelViewMatrix * vec4(position, 1.0);
vec4 viewSunPos = viewMatrix * vec4(sunPosition, 1.0);
v_Uv = uv;
v_Normal = normalMatrix * normal;
v_vertToLight = normalize(viewSunPos.xyz - viewPosition.xyz);
gl_Position = projectionMatrix * viewPosition;
}
</script>
<script type='x-shader/x-fragment' id='fragment-shader'>
precision highp float;
uniform float u_time;
uniform vec2 u_resolution;
varying vec2 v_Uv;
varying vec3 v_Normal;
varying vec3 v_vertToLight;
uniform vec3 u_color;
void main(){
float kd = max(0.0, dot(v_vertToLight, v_Normal));
gl_FragColor = vec4(u_color.rgb * kd + 0.1, 1.0);
}
</script>
I have a classic mesh composed by a THREE.PlaneGeometry and a material. If I add a THREE.MeshNormalMaterial() here's the result I get :
So far, so good. But when I call my THREE.ShaderMaterial(), using an external texture, the dimension of my mesh completely changes :
I always get that weird ratio even if - like in the screenshot - my texture is a square (512x512). I just want my MaterialShader to fit inside my geometry.
Here is the code of my MaterialShader :
var material = new THREE.ShaderMaterial( {
uniforms: uniforms,
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent,
} );
var mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
I don't see what I'm missing. Does anyone has an idea ? Thank you very much.
UPDATE :
Here's the fully code of my ShaderMaterial :
material = new THREE.ShaderMaterial({
uniforms:{
u_time: { type: "f", value: 1.0 },
u_resolution: { type: "v2", value: new THREE.Vector2() },
u_mouse: { type: "v2", value: new THREE.Vector2() },
texture1: { type: "t", value: texture }
},
vertexShader:`
void main() {
gl_Position = vec4( position, 1.0 );
}
`,
fragmentShader:`
#ifdef GL_ES
precision highp float;
precision highp int;
#endif
uniform vec2 u_resolution;
uniform vec2 u_mouse;
uniform float u_time;
uniform sampler2D texture1;
void main(){
float pyt=3.1415926*2./3.;
float m=-1e10;
vec4 mv= vec4(0.);
vec2 xy = gl_FragCoord.xy/u_resolution.xy;
int ic=0;
for (int i=0;i<30;i++){
vec2 np=vec2(xy.x+float(i)/u_resolution.x*sin(3.14/2.) * 4.,xy.y+float(i)/u_resolution.y*cos(3.14/2.) * 4.);
float jTime = u_time*1.618;
vec4 tk=texture2D(texture1,np);
float t=tk.r*sin(jTime)+tk.g*sin(jTime+pyt)+tk.b*sin(jTime+2.*pyt)-.01*float(i);
if (t>m){m=t; mv=tk;ic=i;}
}
float sc=float(ic)/30.;
vec4 tk=texture2D(texture1,xy);
mv=sc*tk+(1.-sc)*mv;
gl_FragColor = vec4(mv.r,mv.g,mv.b,1.0);
}
`
});
UPDATE2 :
I changed my vertex shader but nothing has changed.
I might have a lead : I think this is related to my camera settings. I changed them and I've a better result. Now my texture fits into my square mesh.
Unfortunately, the scale isn't good. Since my texture is a square too, I want it to have exactly the same size than my mesh, for now it's zoomed.
How can I manage the size of my texture ? Should I do it inside my vertexShader ?
Here's my texture settings for now :
texture = new THREE.TextureLoader().load( "test5.jpg");
texture.wrapS = THREE.RepeatWrapping;
texture.wrapT = THREE.RepeatWrapping;
UPDATE 3 :
I found that code to apply a texture and make it fit to my mesh :
https://bl.ocks.org/duhaime/c8375f1c313587ac629e04e0253481f9
It's working but as soon as I change the example fragement shader by mine, I've no errors but the shaders become one unique color. I don't understand what I'm missing...
Try this code of the vertex shader:
void main() {
gl_Position = projectionMatrix *
modelViewMatrix *
vec4(position,1.0);
}
Reference
Simply pass the uv coordinates from the vertex shader to the fragment shader and use them there.
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight, 1, 1000);
camera.position.set(0, 0, 5);
camera.lookAt(scene.position);
var renderer = new THREE.WebGLRenderer();
renderer.setClearColor(0x404040);
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
var iResolution = new THREE.Vector2();
var planeGeo = new THREE.PlaneBufferGeometry(5, 5);
var planeMat = new THREE.ShaderMaterial({
uniforms: {
texture: {
value: null
},
iResolution: {
value: iResolution
},
iTime: {
value: 0
}
},
vertexShader: `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix *
modelViewMatrix *
vec4(position,1.0);
}
`,
fragmentShader: `
uniform sampler2D texture;
uniform float iTime;
uniform vec2 iResolution;
varying vec2 vUv;
void main() {
float pyt=3.1415926*2./3.;
float m=-1e10;//very negitive start value for maximisation algorithm.
vec4 mv= vec4(0.);//lank starting value of max so far
vec2 xy = vUv;
int ic=0;//stores smear distance
for (int i=0;i<30;i++){
//point offset on a circle
vec2 np=vec2(xy.x+float(i)/iResolution.x*sin(iTime),xy.y+float(i)/iResolution.y*cos(iTime));
//colour cycles faster than position
float jTime = iTime*1.618;
//get neerby point
vec4 tk=texture2D(texture,np);
// and if its colourfull enough, use that
float t=tk.r*sin(jTime)+tk.g*sin(jTime+pyt)+tk.b*sin(jTime+2.*pyt)-.01*float(i);
if (t>m){m=t; mv=tk;ic=i;}
}
//mix smeared with background depending ondistance
float sc=float(ic)/30.;
vec4 tk=texture2D(texture,xy);
mv=sc*tk+(1.-sc)*mv;
gl_FragColor = vec4(mv.rgb,1.0);
}
`
});
var textureLoader = new THREE.TextureLoader();
textureLoader.load("https://threejs.org/examples/textures/UV_Grid_Sm.jpg", tex => {
planeMat.uniforms.texture.value = tex;
planeMat.uniforms.texture.value.needsUpdate = true;
iResolution.set(tex.image.width, tex.image.height);
planeMat.needsUpdate = true;
console.log(texture);
});
var plane = new THREE.Mesh(planeGeo, planeMat);
scene.add(plane);
var clock = new THREE.Clock();
var time = 0;
render();
function render() {
requestAnimationFrame(render);
time += clock.getDelta();
planeMat.uniforms.iTime.value = time;
renderer.render(scene, camera);
}
body {
overflow: hidden;
margin: 0;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/96/three.min.js"></script>