Three.js ShaderMaterial lighting not working - three.js

I'm experimenting with Three.js ShaderMaterial and trying to implement lighting.
I have working code for r70 but the same code (with minor changes - the MAX_POINT_LIGHTS constant has been renamed NUM_POINT_LIGHTS) doesn't work for r76.
Looking at a trace in WebGL Inspector it's clear that no light data is being sent to the shader. So, has lighting broken or do I need to set up something else to get it to work?
Using r70 (working)
http://codepen.io/anon/pen/KzjXNr?editors=1010
Fragment Shader
uniform vec3 diffuse;
varying vec3 vPos;
varying vec3 vNormal;
uniform vec3 pointLightColor[MAX_POINT_LIGHTS];
uniform vec3 pointLightPosition[MAX_POINT_LIGHTS];
uniform float pointLightDistance[MAX_POINT_LIGHTS];
void main() {
vec4 addedLights = vec4(0.1, 0.1, 0.1, 1.0);
for(int l = 0; l < MAX_POINT_LIGHTS; l++) {
vec3 lightDirection = normalize(vPos - pointLightPosition[l]);
addedLights.rgb += clamp(dot(-lightDirection, vNormal), 0.0, 1.0) * pointLightColor[l];
}
gl_FragColor = addedLights;
}
JavaScript - setting up the Shadermaterial with UniformsUtils and UniformsLib
var uniforms = THREE.UniformsUtils.merge([
THREE.UniformsLib['lights'],
{ diffuse: { type: 'c', value: new THREE.Color(0xff00ff) } }
]);
var vertexShader = document.getElementById('vertexShader').text;
var fragmentShader = document.getElementById('fragmentShader').text;
material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: vertexShader,
fragmentShader: fragmentShader,
lights: true
});
var geometry = new THREE.BoxGeometry(200, 200, 200);
mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
Using r76 (futzed)
http://codepen.io/anon/pen/ZWdXLZ?editors=1010
Fragment Shader
uniform vec3 diffuse;
varying vec3 vPos;
varying vec3 vNormal;
uniform vec3 pointLightColor[NUM_POINT_LIGHTS];
uniform vec3 pointLightPosition[NUM_POINT_LIGHTS];
uniform float pointLightDistance[NUM_POINT_LIGHTS];
void main() {
vec4 addedLights = vec4(0.1, 0.1, 0.1, 1.0);
for(int l = 0; l < NUM_POINT_LIGHTS; l++) {
vec3 lightDirection = normalize(vPos - pointLightPosition[l]);
addedLights.rgb += clamp(dot(-lightDirection, vNormal), 0.0, 1.0) * pointLightColor[l];
}
gl_FragColor = addedLights;
}
JavaScript
Unchanged

The light uniforms have been changed to structs in r74 (Specifically in #7324). Note that though the change to structs happened in r74, the following works r75 and later.
A single array of structs is given for each light type. Each struct has the position and color properties you need.
Fragment Shader
uniform vec3 diffuse;
varying vec3 vPos;
varying vec3 vNormal;
struct PointLight {
vec3 position;
vec3 color;
};
uniform PointLight pointLights[ NUM_POINT_LIGHTS ];
void main() {
vec4 addedLights = vec4(0.1, 0.1, 0.1, 1.0);
for(int l = 0; l < NUM_POINT_LIGHTS; l++) {
vec3 adjustedLight = pointLights[l].position + cameraPosition;
vec3 lightDirection = normalize(vPos - adjustedLight);
addedLights.rgb += clamp(dot(-lightDirection, vNormal), 0.0, 1.0) * pointLights[l].color;
}
gl_FragColor = addedLights;//mix(vec4(diffuse.x, diffuse.y, diffuse.z, 1.0), addedLights, addedLights);
}
Note that the light position is now relative to the camera so you offset the light position with the camera position.
Working Fiddle

Related

How to add opacity map to ShaderMaterial

I've applied ShaderMaterial to a glb model that has opacity map (the model is human body and the opacity map is used to create hair and eyelashes), the reference for the model material was this -
So as you can see - the material is some sort of glow effect, so i was manage to find This Example which is pretty much what i need - the problem is that i can't figure out how to apply the models opacity map - if you look closely on the difference between my result (left picture) to the right picture - you'll see that the hair doesn't looks as it should - since the opacity map do not applied... i wonder is the ShaderMaterial is the good for this look or should i use other kind of shader.
Here is my material code -
let m = new THREE.MeshStandardMaterial({
roughness: 0.25,
metalness: 0.75,
opacity: 0.3,
map: new THREE.TextureLoader().load(
"/maps/opacity.jpg",
(tex) => {
tex.wrapS = THREE.RepeatWrapping;
tex.wrapT = THREE.RepeatWrapping;
tex.repeat.set(16, 1);
}
),
onBeforeCompile: (shader) => {
shader.uniforms.s = uniforms.s;
shader.uniforms.b = uniforms.b;
shader.uniforms.p = uniforms.p;
shader.uniforms.glowColor = uniforms.glowColor;
shader.vertexShader = document.getElementById("vertexShader").textContent;
shader.fragmentShader = document.getElementById(
"fragmentShader"
).textContent;
shader.side = THREE.FrontSide;
shader.transparent = true;
// shader.uniforms['alphaMap'].value.needsUpdate = true;
console.log(shader.vertexShader);
console.log(shader.fragmentShader);
},
});
Shader setting:
<script id="vertexShader" type="x-shader/x-vertex">
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
vNormal = normalize( normalMatrix * normal ); // 转换到视图空间
vPositionNormal = normalize(( modelViewMatrix * vec4(position, 1.0) ).xyz);
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<!-- fragment shader a.k.a. pixel shader -->
<script id="fragmentShader" type="x-shader/x-vertex">
uniform vec3 glowColor;
uniform float b;
uniform float p;
uniform float s;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
float a = pow( b + s * abs(dot(vNormal, vPositionNormal)), p );
gl_FragColor = vec4( mix(vec3(0), glowColor, a), 1. );
}
</script>
You're creating a MeshStandardMaterial, but then you're overriding all its shader code when you assign new vertex and fragment shaders, making the Standard material useless. You should stick to ShaderMaterial like the demo you linked. It would make your code cleaner:
// Get shader code
let vertShader = document.getElementById("vertexShader").textContent;
let fragShader = document.getElementById("fragmentShader").textContent;
// Build texture
let alphaTex = new THREE.TextureLoader().load("/maps/opacity.jpg");
alphaTex.wrapS = THREE.RepeatWrapping;
alphaTex.wrapT = THREE.RepeatWrapping;
// alphaTex.repeat.set(16, 1); <- repeat won't work in a custom shader
// Build material
let m = new THREE.ShaderMaterial({
transparent: true,
// side: THREE.FrontSide, <- this is already default. Not needed
uniforms: {
s: {value: 1},
b: {value: 2},
p: {value: 3},
alphaMap: {value: alphaTex},
glowColor: {value: new THREE.Color(0x0099ff)},
// we create a Vec2 to manually handle repeat
repeat: {value: new THREE.Vector2(16, 1)}
},
vertexShader: vertShader,
fragmentShader: fragShader
});
This helps build you material in a cleaner way, since you're using its native build method without having to override anything. Then, you can sample the alphaMap texture in your fragment shader:
uniform float s;
uniform float b;
uniform float p;
uniform vec3 glowColor;
uniform vec2 repeat;
// Declare the alphaMap uniform if we're gonna use it
uniform sampler2D alphaMap;
// Don't forget to declare UV coordinates
varying vec2 vUv;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
float a = pow( b + s * abs(dot(vNormal, vPositionNormal)), p );
// Sample map with UV coordinates. Multiply by uniform to get repeat
float a2 = texture2D(alphaMap, vUv * repeat).r;
// Combine both alphas
float opacity = a * a2;
gl_FragColor = vec4( mix(vec3(0), glowColor, opacity), 1. );
}
Also, don't forget to carry over the UVs from your vertex shader:
// Don't forget to declare UV coordinates
varying vec2 vUv;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
// convert uv attribute to vUv varying
vUv = uv;
vNormal = normalize( normalMatrix * normal ); // 转换到视图空间
vPositionNormal = normalize(( modelViewMatrix * vec4(position, 1.0) ).xyz);
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
Update
The error
'=' : cannot convert from 'lowp 4-component vector of float' to 'highp float'
means I made a mistake when taking the texture2D() sample in the fragment shader. It should have been texture2D().r so we only read the red channel to get a float instead of cramming all RGBA channels (yielding a vec4) into a float. See the following snippet for the final result:
var container, scene, camera, renderer, controls, torusKnot;
init()
function init() {
initBase()
initObject()
render()
}
function initBase () {
container = document.getElementById( 'ThreeJS' )
// SCENE
scene = new THREE.Scene();
// CAMERA
var SCREEN_WIDTH = window.innerWidth, SCREEN_HEIGHT = window.innerHeight
var VIEW_ANGLE = 45, ASPECT = SCREEN_WIDTH / SCREEN_HEIGHT, NEAR = 0.1, FAR = 20000
camera = new THREE.PerspectiveCamera( VIEW_ANGLE, ASPECT, NEAR, FAR)
camera.position.set(0,0,50)
camera.lookAt(scene.position)
// RENDERER
renderer = new THREE.WebGLRenderer( {antialias:true} )
renderer.setSize(SCREEN_WIDTH, SCREEN_HEIGHT)
renderer.setClearColor(0x333333)
container.appendChild( renderer.domElement )
// CONTROLS
controls = new THREE.OrbitControls( camera, renderer.domElement )
// Resize
window.addEventListener("resize", onWindowResize);
}
function onWindowResize() {
var w = window.innerWidth;
var h = window.innerHeight;
renderer.setSize(w, h);
camera.aspect = w / h;
camera.updateProjectionMatrix();
}
function initObject () {
let vertShader = document.getElementById("vertexShader").textContent;
let fragShader = document.getElementById("fragmentShader").textContent;
// Build texture
let alphaTex = new THREE.TextureLoader().load("https://threejs.org/examples/textures/floors/FloorsCheckerboard_S_Diffuse.jpg");
alphaTex.wrapS = THREE.RepeatWrapping;
alphaTex.wrapT = THREE.RepeatWrapping;
var customMaterial = new THREE.ShaderMaterial({
uniforms: {
s: {value: -1},
b: {value: 1},
p: {value: 2},
alphaMap: {value: alphaTex},
glowColor: {value: new THREE.Color(0x00ffff)},
// we create a Vec2 to manually handle repeat
repeat: {value: new THREE.Vector2(16, 1)}
},
vertexShader: vertShader,
fragmentShader: fragShader
})
var geometry = new THREE.TorusKnotBufferGeometry( 10, 3, 100, 32 )
torusKnot = new THREE.Mesh( geometry, customMaterial )
scene.add( torusKnot )
}
function render() {
torusKnot.rotation.y += 0.01;
renderer.render( scene, camera );
requestAnimationFrame(render);
}
body{
overflow: hidden;
margin: 0;
}
<script src="https://threejs.org/build/three.js"></script>
<script src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
<!-- vertext shader a.k.a. pixel shader -->
<script id="vertexShader" type="x-shader/x-vertex">
varying vec2 vUv;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
// convert uv attribute to vUv varying
vUv = uv;
vNormal = normalize( normalMatrix * normal ); // 转换到视图空间
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
vPositionNormal = normalize(( mvPosition ).xyz);
gl_Position = projectionMatrix * mvPosition;
}
</script>
<!-- fragment shader a.k.a. pixel shader -->
<script id="fragmentShader" type="x-shader/x-vertex">
uniform float s;
uniform float b;
uniform float p;
uniform vec3 glowColor;
uniform vec2 repeat;
// Declare the alphaMap uniform if we're gonna use it
uniform sampler2D alphaMap;
// Don't forget to declare UV coordinates
varying vec2 vUv;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
float a = pow( b + s * abs(dot(vNormal, vPositionNormal)), p );
// Sample map with UV coordinates. Multiply by uniform to get repeat
float a2 = texture2D(alphaMap, vUv * repeat).r;
// Combine both alphas
float opacity = a * a2;
gl_FragColor = vec4( mix(vec3(0), glowColor, opacity), 1. );
}
</script>
<div id="ThreeJS" style="position: absolute; left:0px; top:0px"></div>

Storing data as a texture for use in Vertex Shader for Instanced Geometry (THREE JS / GLSL)

I'm using a THREE.InstancedBufferGeometry, and I wish to access data in the Vertex Shader, encoded into a Texture.
What I want to do, is create a Data Texture with one pixel per instance, which will store position data for each instance (then at a later stage, I can update the texture using a simulation with a flow field to animate the positions).
I'm struggling to access the data from the texture in the Vertex Shader.
const INSTANCES_COUNT = 5000;
// FOR EVERY INSTANCE, GIVE IT A RANDOM X, Y, Z OFFSET, AND SAVE IT IN DATA TEXTURE
const data = new Uint8Array(4 * INSTANCES_COUNT);
for (let i = 0; i < INSTANCES_COUNT; i++) {
const stride = i * 4;
data[stride] = (Math.random() - 0.5);
data[stride + 1] = (Math.random() - 0.5);
data[stride + 2] = (Math.random() - 0.5);
data[stride + 3] = 0.0;
}
const offsetTexture = new THREE.DataTexture( data, INSTANCES, 1, THREE.RGBAFormat, THREE.FloatType );
offsetTexture.minFilter = THREE.NearestFilter;
offsetTexture.magFilter = THREE.NearestFilter;
offsetTexture.generateMipmaps = false;
offsetTexture.needsUpdate = true;
// CREATE MY INSTANCED GEOMETRY
const geometry = new THREE.InstancedBufferGeometry();
geometry.maxInstancedCount = INSTANCES_COUNT;
geometry.addAttribute( 'position', new THREE.Float32BufferAttribute([5, -5, 0, -5, 5, 0, 0, 0, 5], 3 )); // SIMPLE TRIANGLE
const vertexShader = `
precision highp float;
uniform vec3 color;
uniform sampler2D offsetTexture;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
attribute vec3 position;
varying vec3 vPosition;
varying vec3 vColor;
void main(){
vPosition = position;
vec4 orientation = vec4(.0, .0, .0, .0);
vec3 vcV = cross( orientation.xyz, vPosition );
vPosition = vcV * ( 2.0 * orientation.w ) + ( cross( orientation.xyz, vcV ) * 2.0 + vPosition );
vec2 uv = position.xy;
vec4 data = texture2D( offsetTexture, uv );
vec3 particlePosition = data.xyz * 1000.0;
gl_Position = projectionMatrix * modelViewMatrix * vec4( vPosition + particlePosition, 1.0 );
}
`;
const fragmentShader = `
precision highp float;
varying vec3 vColor;
void main() {
gl_FragColor = vec4(vColor, 1.0);
}
`;
const uniforms = {
size: { value: 1.0 },
color: {
type: 'c',
value: new THREE.Color(0x3db230),
},
offsetTexture: {
type: 't',
value: offsetTexture,
},
};
// CREATE MY MATERIAL
const material = new THREE.RawShaderMaterial({
uniforms,
vertexShader,
fragmentShader,
side: THREE.DoubleSide,
transparent: false,
});
scene.add(new THREE.Mesh(geometry, material));
At the moment it seems that the data from the image isn't accessible in the vertex shader (if I just set the vUv to vec2(1.0, 0.0), for example, and change the offset positions, nothing changes), and also I'm not sure how to go about making sure that the instance can reference the correct texel in the texture.
So, my two issues are:
1) How to correctly set the Data Image Texture, and access that data in the Vertex Shader
2) How to correctly reference the texel storing the data for each particular instance (e.g, instance 1000 should use vec2(1000,1), etc
Also, do I have to normalize the data (0.0-1.0, or 0–255, or -1 – +1)
Thanks
You need to compute some kind of an index into the texture per instance.
Meaning, you need an attribute that is going to be shared by each instance.
if your triangle is
[a,b,c]
your index should be
[0,0,0]
Lets say you have 1024 instances and a 1024x1 px texture.
attribute float aIndex;
vec2 myIndex = ((aIndex + 0.5)/1024.,1.);
vec4 myRes = texture2D( mySampler, myIndex);

Per instance UV texture mapping in Three.js InstancedBufferGeometry

I have a InstancedBufferGeometry made up of a single Plane:
const plane = new THREE.PlaneBufferGeometry(100, 100, 1, 1);
const geometry = new THREE.InstancedBufferGeometry();
geometry.maxInstancedCount = 100;
geometry.attributes.position = plane.attributes.position;
geometry.index = plane.index;
geometry.attributes.uv = plane.attributes.uv;
geometry.addAttribute( 'offset', new THREE.InstancedBufferAttribute( new Float32Array( offsets ), 3 ) ); // an offset position
I am applying a texture to each plane, which is working as expected, however I wish to apply a different region of the texture to each instance, and I'm not sure about the correct approach.
At the moment I have tried to build up uv's per instance, based on the structure of the uv's for a single plane:
let uvs = [];
for (let i = 0; i < 100; i++) {
const tl = [0, 1];
const tr = [1, 1];
const bl = [0, 0];
const br = [1, 0];
uvs = uvs.concat(tl, tr, bl, br);
}
...
geometry.addAttribute( 'uv', new THREE.InstancedBufferAttribute( new Float32Array( uvs ), 2) );
When I do this, I don't have any errors, but every instance is just a single colour (all instances are the the same colour). I have tried changing the instance size, and also the meshes per attribute (which I don't fully understand, struggling to find a good explanation in the docs).
I feel like I'm close, but I'm missing something, so a point in the right direction would be fantastic!
(For reference, here are my shaders):
const vertexShader = `
precision mediump float;
uniform vec3 color;
uniform sampler2D tPositions;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
attribute vec2 uv;
attribute vec2 dataUv;
attribute vec3 position;
attribute vec3 offset;
attribute vec3 particlePosition;
attribute vec4 orientationStart;
attribute vec4 orientationEnd;
varying vec3 vPosition;
varying vec3 vColor;
varying vec2 vUv;
void main(){
vPosition = position;
vec4 orientation = normalize( orientationStart );
vec3 vcV = cross( orientation.xyz, vPosition );
vPosition = vcV * ( 2.0 * orientation.w ) + ( cross( orientation.xyz, vcV ) * 2.0 + vPosition );
vec4 data = texture2D( tPositions, vec2(dataUv.x, 0.0));
vec3 particlePosition = (data.xyz - 0.5) * 1000.0;
vUv = uv;
vColor = data.xyz;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position + particlePosition + offset, 1.0 );
}
`;
const fragmentShader = `
precision mediump float;
uniform sampler2D map;
varying vec3 vPosition;
varying vec3 vColor;
varying vec2 vUv;
void main() {
vec3 color = texture2D(map, vUv).xyz;
gl_FragColor = vec4(color, 1.0);
}
`;
As all my instances need to take the same size rectangular area, but offset (like a sprite sheet), I have added a UV offset and UV scale attribute to each instance, and use this to define which area of the map to use:
const uvOffsets = [];
for (let i = 0; i < INSTANCES; i++) {
const u = i % textureWidthHeight;
const v = ~~ (i / textureWidthHeight);
uvOffsets.push(u, v);
}
...
geometry.attributes.uv = plane.attributes.uv;
geometry.addAttribute( 'uvOffsets', new THREE.InstancedBufferAttribute( new Float32Array( uvOffsets ), 2 ) );
uniforms: {
...
uUvScale: { value: 1 / textureWidthHeight }
}
And in the fragment shader:
void main() {
vec4 color = texture2D(map, (vUv * uUvScale) + (vUvOffsets * uUvScale));
gl_FragColor = vec4(1.0, 1.0, 1.0, color.a);
}
\o/

Shader wireframe of an object

I want to see a wireframe of an object without the diagonals like
Currently, I add lines according to the vertices, the problem is after I have several of those I experience a major performance degradation.
The examples here are either too new for my version of Three or don't work (I commented there about it).
So I want to try to implement a shader instead.
I tried to use this shader: https://stackoverflow.com/a/31610464/4279201 but it breaks the shape to parts and I'm getting WebGL errors.
That's how I use it:
const vertexShader = `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
`
const fragmentShader = `
#version 150 compatibility
flat in float diffuse;
flat in float specular;
flat in vec3 edge_mask;
in vec2 bary;
uniform float mesh_width = 1.0;
uniform vec3 mesh_color = vec3(0.0, 0.0, 0.0);
uniform bool lighting = true;
out vec4 frag_color ;
float edge_factor(){
vec3 bary3 = vec3(bary.x, bary.y, 1.0 - bary.x - bary.y);
vec3 d = fwidth(bary3);
vec3 a3 = smoothstep(vec3(0.0, 0.0, 0.0), d * mesh_width, bary3);
a3 = vec3(1.0, 1.0, 1.0) - edge_mask + edge_mask * a3;
return min(min(a3.x, a3.y), a3.z);
}
void main() {
float s = (lighting && gl_FrontFacing) ? 1.0 : -1.0;
vec4 Kdiff = gl_FrontFacing ?
gl_FrontMaterial.diffuse : gl_BackMaterial.diffuse;
float sdiffuse = s * diffuse;
vec4 result = vec4(0.1, 0.1, 0.1, 1.0);
if (sdiffuse > 0.0) {
result += sdiffuse * Kdiff +
specular * gl_FrontMaterial.specular;
}
frag_color = (mesh_width != 0.0) ?
mix(vec4(mesh_color, 1.0), result, edge_factor()) :
result;
}`
...
const uniforms = {
color: {
value: new THREE.Vector4(0, 0, 1, 1),
type: 'v4'
}
}
const material = new THREE.ShaderMaterial({
fragmentShader: data.fragmentShader,
vertexShader: data.vertexShader,
uniforms
})
this._viewer.impl.matman().addMaterial(
data.name, material, true)
const fragList = this._viewer.model.getFragmentList()
this.toArray(fragIds).forEach((fragId) => {
fragList.setMaterial(fragId, material)
})
So to implement this shader, is the right approach would be to basically check the angle between every two vertices, and draw a line if the degree is 90?
How can I have access to all the vertices of the shape from the vertex shader?
And how can I tell the fragment shader to draw a line between two vertices that match the above condition? (also to leave the default shading for everything else as is)
I'm using Autodesk viewer that uses Three.js rev 71.
// -- Vertex Shader --
precision mediump float;
// Input from buffers
attribute vec3 aPosition;
attribute vec2 aBaryCoord;
// Value interpolated accross pixels and passed to the fragment shader
varying vec2 vBaryCoord;
// Uniforms
uniform mat4 uModelMatrix;
uniform mat4 uViewMatrix;
uniform mat4 uProjMatrix;
void main() {
vBaryCoord = aBaryCoord;
gl_Position = uProjMatrix * uViewMatrix * uModelMatrix * vec4(aPosition,1.0);
}
// ---------------------
// -- Fragment Shader --
// This shader doesn't perform any lighting
precision mediump float;
varying vec2 vBaryCoord;
uniform vec3 uMeshColour;
float edgeFactor() {
vec3 d = fwidth(vBaryCoord);
vec3 a3 = smoothstep(vec3(0.0,0.0,0.0),d * 1.5,vBaryCoord);
return min(min(a3.x,a3.y),a3.z);
}
void main() {
gl_FragColor = vec4(uMeshColour,(1.0 - edgeFactor()) * 0.95);
}
// ---------------------
/*
This code isn't tested so take it with a grain of salt
Idea taken from
http://codeflow.org/entries/2012/aug/02/easy-wireframe-display-with-barycentric-coordinates/
*/

Shadow artifacts in opengl

I am trying to render an object and two lights, one of the lights cast shadows. Everything works ok but I noticed that there are some obvious artifacts, as shown in the below image, some shadows seem to overflow to bright areas.
Below is the shaders to render depth information into a framebuffer
<script id="shadow-shader-vertex" type="x-shader/x-vertex">
attribute vec4 aVertexPosition;
uniform mat4 uObjMVP;
void main() {
gl_Position = uObjMVP * aVertexPosition;
}
</script>
<script id="shadow-shader-fragment" type="x-shader/x-vertex">
precision mediump float;
void main() {
//pack gl_FragCoord.z
const vec4 bitShift = vec4(1.0, 256.0, 256.0 * 256.0, 256.0 * 256.0 * 256.0);
const vec4 bitMask = vec4(1.0/256.0, 1.0/256.0, 1.0/256.0, 0.0);
vec4 rgbaDepth = fract(gl_FragCoord.z * bitShift);
rgbaDepth -= rgbaDepth.gbaa * bitMask;
gl_FragColor = rgbaDepth;
}
</script>
In the above shaders, uObjMVP is the MVP matrix used when looking from the position of the light that cast shadow (the warm light, the cold light does not cast shadow)
And here are the shaders to draw everything:
<script id="shader-vertex" type="x-shader/x-vertex">
//position of a vertex.
attribute vec4 aVertexPosition;
//vertex normal.
attribute vec3 aNormal;
//mvp matrix
uniform mat4 uObjMVP;
uniform mat3 uNormalMV;
//shadow mvp matrix
uniform mat4 uShadowMVP;
//interplate normals
varying vec3 vNormal;
//for shadow calculation
varying vec4 vShadowPositionFromLight;
void main() {
gl_Position = uObjMVP * aVertexPosition;
//convert normal direction from object space to view space
vNormal = uNormalMV * aNormal;
vShadowPositionFromLight = uShadowMVP * aVertexPosition;
}
</script>
<script id="shader-fragment" type="x-shader/x-fragment">
precision mediump float;
uniform sampler2D uShadowMap;
varying vec3 vNormal;
varying vec4 vShadowPositionFromLight;
struct baseColor {
vec3 ambient;
vec3 diffuse;
};
struct directLight {
vec3 direction;
vec3 color;
};
baseColor mysObjBaseColor = baseColor(
vec3(1.0, 1.0, 1.0),
vec3(1.0, 1.0, 1.0)
);
directLight warmLight = directLight(
normalize(vec3(-83.064, -1.99, -173.467)),
vec3(0.831, 0.976, 0.243)
);
directLight coldLight = directLight(
normalize(vec3(37.889, 47.864, -207.187)),
vec3(0.196, 0.361, 0.608)
);
vec3 ambientLightColor = vec3(0.3, 0.3, 0.3);
float unpackDepth(const in vec4 rgbaDepth) {
const vec4 bitShift = vec4(1.0, 1.0/256.0, 1.0/(256.0*256.0), 1.0/(256.0*256.0*256.0));
float depth = dot(rgbaDepth, bitShift);
return depth;
}
float calVisibility() {
vec3 shadowCoord = (vShadowPositionFromLight.xyz/vShadowPositionFromLight.w)/2.0 + 0.5;
float depth = unpackDepth(texture2D(uShadowMap, shadowCoord.xy));
return (shadowCoord.z > depth + 0.005) ? 0.4 : 1.0;
}
vec3 calAmbientLight(){
return ambientLightColor * mysObjBaseColor.ambient;
}
vec3 calDiffuseLight(const in directLight light, const in float visibility){
vec3 inverseLightDir = light.direction * -1.0;
float dot = max(dot(inverseLightDir, normalize(vNormal)), 0.0);
return light.color * mysObjBaseColor.diffuse * dot * visibility;
}
void main() {
vec3 ambientLight = calAmbientLight();
float visibility = calVisibility();
vec3 warmDiffuseLight = calDiffuseLight(warmLight, visibility);
// cold light does not cast shadow and hence visilibility is always 1.0
vec3 coldDiffuseLight = calDiffuseLight(coldLight, 1.0);
gl_FragColor = vec4(coldDiffuseLight + warmDiffuseLight + ambientLight, 1.0);
}
</script>
If I simply draw the depth information out on to the canvas,
void main() {
// vec3 ambientLight = calAmbientLight();
// float visibility = calVisibility();
// vec3 warmDiffuseLight = calDiffuseLight(warmLight, visibility);
// // cold light does not cast shadow and hence visilibility is always 1.0
// vec3 coldDiffuseLight = calDiffuseLight(coldLight, 1.0);
// gl_FragColor = vec4(coldDiffuseLight + warmDiffuseLight + ambientLight, 1.0);
vec3 shadowCoord = (vShadowPositionFromLight.xyz/vShadowPositionFromLight.w)/2.0 + 0.5;
gl_FragColor = vec4(unpackDepth(texture2D(uShadowMap, shadowCoord.xy)), 0.0, 0.0, 1.0);
}
I would get this image
Thanks in advance.

Resources