I'm having a problem with fragment shader as you can see in the image below.
I don't want to render the parts that are in red in the image
I have to passes which culls clockwise and anticlockwise
and I'm getting the dot product of the normal with the camera position
if its less than 0 I set a transparent fragment
otherwise discard the fargment
here is the shader
#version 100
precision mediump float;
varying float lightDiffuse;
void main()
{
float light = ( 1.0 - lightDiffuse) * 0.5;
vec3 lightColor = vec3(0.0,1.0,1.0);
vec3 diffuseColor = lightColor * light ;
vec4 c;
if(lightDiffuse <0.0 )
{
// back faces, opaque
// front faces, very transparent
c = vec4(diffuseColor, 0.2);
}
else
{
discard;
}
gl_FragColor = c;
}
#version 100
#define lowp
#define mediump
#define highp
attribute vec4 vertex;
attribute vec3 normal;
uniform mat4 normalMatrix;
uniform mat4 modelViewProjectionMatrix;
uniform mat4 modelView;
uniform vec3 camera_world_position;
varying highp float lightDiffuse;
void main()
{
gl_Position = modelViewProjectionMatrix * vertex;
vec3 norm = normal;
norm *=-1.0;
lightDiffuse = dot(normalize(vec3(norm.x, norm.y, norm.z)), normalize(camera_world_position));
}
here is the material script
material 44267_1508405690_0##carpaint
{
technique
{
pass
{
cull_hardware anticlockwise
scene_blend alpha_blend
vertex_program_ref char_shader2_vs100
{
param_named_auto modelViewProjectionMatrix worldviewproj_matrix
param_named_auto normalMatrix inverse_transpose_world_matrix
param_named_auto modelView worldview_matrix
param_named_auto camera_world_position camera_position
}
fragment_program_ref char_shader2_fs100
{
}
}
pass
{
cull_hardware clockwise
vertex_program_ref char_shader2_vs100
{
param_named_auto modelViewProjectionMatrix worldviewproj_matrix
param_named_auto normalMatrix inverse_transpose_world_matrix
param_named_auto modelView worldview_matrix
param_named_auto camera_world_position camera_position
}
fragment_program_ref char_shader2_fs100
{
}
}
}
}
If you want to cull occluded triangles, allocate a depth buffer and use a depth test, it's what it exists for.
Don't do facing tests in the fragment shader, that's not necessary and horribly expensive, just enable GL_CULL_FACE and set the front face correctly. Note that not all models downloaded off the internet get this right, so you might have an input model with broken winding for some triangles.
Related
I'd like to add a second crate texture to the shader using the shader/technique provided with this example:
https://threejs.org/examples/#webgl_buffergeometry_instancing_dynamic
I figured I could pass in another uniform to add a second map.
crateMaterial = new THREE.RawShaderMaterial( {
uniforms: {
map: { value: new THREE.TextureLoader().load( './img/textures/crate/crate.gif' ),
map2: { value: new THREE.TextureLoader().load( './img/textures/crate2/crate2.gif' ) }
};
However I'm struggling to figure out how to "tag" specific crates and then use the shader to draw vertices with the correct texture, as my experience and skill with GLSL are quite limited.
Could I just pass in another uniform consisting of (vertex) indices to specify where the shader should apply the second texture? ie:
crateMaterial.uniforms.cratesTexturemap = [];
for(i=0;i<cratesToRender;i++) {
/* set position */
...
this._instancePositions.push( position.x, position.y, position.z );
if (drawCrate2) {
crateMaterial.uniforms.cratesTexturemap.push(i); /* correlates to (vertex) position index */
crateMaterial.uniforms.cratesTexturemap.push(i+1);
crateMaterial.uniforms.cratesTexturemap.push(i+2);
}
...
}
Also performance/memory-wise, is it better to have a (dynamic) array of textures passed to the shader or is passing them one by one (a uniform value for every texture as above) more advantageous?
Example shader code for reference:
<script id="vertexShader" type="x-shader/x-vertex">
precision highp float;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
attribute vec3 position;
attribute vec3 offset;
attribute vec2 uv;
attribute vec4 orientation;
varying vec2 vUv;
// http://www.geeks3d.com/20141201/how-to-rotate-a-vertex-by-a-quaternion-in-glsl/
vec3 applyQuaternionToVector( vec4 q, vec3 v ){
return v + 2.0 * cross( q.xyz, cross( q.xyz, v ) + q.w * v );
}
void main() {
vec3 vPosition = applyQuaternionToVector( orientation, position );
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( offset + vPosition, 1.0 );
}
</script>
<script id="fragmentShader" type="x-shader/x-fragment">
precision highp float;
uniform sampler2D map;
varying vec2 vUv;
void main() {
gl_FragColor = texture2D( map, vUv );
}
</script>
Can't you just pass an array of textures and choose which to apply using an index passed by a int uniform?
So for every crate you could do
crate.material.uniforms.index = 2
to choose the 3rd texture in the array
I have the following shader with two passes and scene blend but I see those faces marked in black..
how would I maintain transparency and not see these faces in the picture ?
The material does two passes on cull faces anticlockwise and the other pass cull faces clockwise.
The shader does a dot product between the normal and the camera position and then modulate the transparency based on that result.
#version 100
precision highp int;
precision highp float;
uniform float time;
uniform float touchX;
uniform float touchY;
uniform float touchZ;
uniform float line;
varying float lightDiffuse;
void main()
{
float rampLight =lightDiffuse;
float light = (1.0 - rampLight) * 1.0;
vec4 lightColor = vec4(1.0,1.0,1.0, 1.0);
vec4 diffuseColor = lightColor * light;
vec4 c;
if(rampLight < 0.0 )
{
discard;
}
diffuseColor = smoothstep(vec4(0.0, 0.0, 0.0, 0.0), vec4(0.8, 0.8, 0.8, 0.8), vec4(diffuseColor));
gl_FragColor = diffuseColor;
}
material Router
{
technique
{
pass
{
scene_blend alpha_blend
depth_write on
depth_check on
cull_hardware anticlockwise
vertex_program_ref movingline_101_vs
{
}
fragment_program_ref movingline_101_fs
{
}
}
pass
{
scene_blend alpha_blend
cull_hardware clockwise
depth_write on
depth_check on
vertex_program_ref movingline_101_vs
{
}
fragment_program_ref movingline_101_fs
{
}
}
}
}
Update:
material Router
{
technique
{
pass
{
depth_write on
vertex_program_ref pass_101_vs
{
}
fragment_program_ref pass_101_fs
{
}
}
pass
{
depth_write off
depth_fun equal
scene_blend add
vertex_program_ref movingline_101_vs
{
}
fragment_program_ref movingline_101_fs
{
}
}
}
}
pass shader
void main()
{
gl_FragColor = vec4(0.0,0.0,0.0,0.0);
}
main shader:
#version 120
precision highp int;
precision highp float;
uniform float time;
uniform float touchX;
uniform float touchY;
uniform float touchZ;
uniform float line;
varying float lightDiffuse;
void main()
{
float rampLight =lightDiffuse;
float light = (1.0 - rampLight) * 1.0;
vec4 lightColor = vec4(1.0,1.0,1.0, 1.0);
vec4 diffuseColor = lightColor * light;
diffuseColor = smoothstep(vec4(0.0, 0.0, 0.0, 0.0), vec4(0.9, 0.9, 0.9, 0.9), vec4(diffuseColor));
gl_FragColor = diffuseColor;
}
If you're just trying to render something with transparency, but without being able to see the faces which would be occluded if it was opaque, then a common simple technique is to prime the depth buffer first.
Pass 1: Write to just the depth buffer. Easiest way to achieve this is by rendering with blending on and a shader that just outputs vec4(0.0, 0.0, 0.0, 0.0) for every fragment. There may be more efficient ways to prime the depth buffer (e.g. using glColorMask instead of blending), but this approach is simple and probably good enough.
Pass 2: Render with a depth test of GL_EQUAL. Use whatever shader you like.
Performance note: discard is often slower than just outputting vec4(0.0, 0.0, 0.0, 0.0) for pixels you want to be transparent on mobile hardware, so avoid discard unless you really need it - in this case you don't need it.
I want to see a wireframe of an object without the diagonals like
Currently, I add lines according to the vertices, the problem is after I have several of those I experience a major performance degradation.
The examples here are either too new for my version of Three or don't work (I commented there about it).
So I want to try to implement a shader instead.
I tried to use this shader: https://stackoverflow.com/a/31610464/4279201 but it breaks the shape to parts and I'm getting WebGL errors.
That's how I use it:
const vertexShader = `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
`
const fragmentShader = `
#version 150 compatibility
flat in float diffuse;
flat in float specular;
flat in vec3 edge_mask;
in vec2 bary;
uniform float mesh_width = 1.0;
uniform vec3 mesh_color = vec3(0.0, 0.0, 0.0);
uniform bool lighting = true;
out vec4 frag_color ;
float edge_factor(){
vec3 bary3 = vec3(bary.x, bary.y, 1.0 - bary.x - bary.y);
vec3 d = fwidth(bary3);
vec3 a3 = smoothstep(vec3(0.0, 0.0, 0.0), d * mesh_width, bary3);
a3 = vec3(1.0, 1.0, 1.0) - edge_mask + edge_mask * a3;
return min(min(a3.x, a3.y), a3.z);
}
void main() {
float s = (lighting && gl_FrontFacing) ? 1.0 : -1.0;
vec4 Kdiff = gl_FrontFacing ?
gl_FrontMaterial.diffuse : gl_BackMaterial.diffuse;
float sdiffuse = s * diffuse;
vec4 result = vec4(0.1, 0.1, 0.1, 1.0);
if (sdiffuse > 0.0) {
result += sdiffuse * Kdiff +
specular * gl_FrontMaterial.specular;
}
frag_color = (mesh_width != 0.0) ?
mix(vec4(mesh_color, 1.0), result, edge_factor()) :
result;
}`
...
const uniforms = {
color: {
value: new THREE.Vector4(0, 0, 1, 1),
type: 'v4'
}
}
const material = new THREE.ShaderMaterial({
fragmentShader: data.fragmentShader,
vertexShader: data.vertexShader,
uniforms
})
this._viewer.impl.matman().addMaterial(
data.name, material, true)
const fragList = this._viewer.model.getFragmentList()
this.toArray(fragIds).forEach((fragId) => {
fragList.setMaterial(fragId, material)
})
So to implement this shader, is the right approach would be to basically check the angle between every two vertices, and draw a line if the degree is 90?
How can I have access to all the vertices of the shape from the vertex shader?
And how can I tell the fragment shader to draw a line between two vertices that match the above condition? (also to leave the default shading for everything else as is)
I'm using Autodesk viewer that uses Three.js rev 71.
// -- Vertex Shader --
precision mediump float;
// Input from buffers
attribute vec3 aPosition;
attribute vec2 aBaryCoord;
// Value interpolated accross pixels and passed to the fragment shader
varying vec2 vBaryCoord;
// Uniforms
uniform mat4 uModelMatrix;
uniform mat4 uViewMatrix;
uniform mat4 uProjMatrix;
void main() {
vBaryCoord = aBaryCoord;
gl_Position = uProjMatrix * uViewMatrix * uModelMatrix * vec4(aPosition,1.0);
}
// ---------------------
// -- Fragment Shader --
// This shader doesn't perform any lighting
precision mediump float;
varying vec2 vBaryCoord;
uniform vec3 uMeshColour;
float edgeFactor() {
vec3 d = fwidth(vBaryCoord);
vec3 a3 = smoothstep(vec3(0.0,0.0,0.0),d * 1.5,vBaryCoord);
return min(min(a3.x,a3.y),a3.z);
}
void main() {
gl_FragColor = vec4(uMeshColour,(1.0 - edgeFactor()) * 0.95);
}
// ---------------------
/*
This code isn't tested so take it with a grain of salt
Idea taken from
http://codeflow.org/entries/2012/aug/02/easy-wireframe-display-with-barycentric-coordinates/
*/
I am trying to render an object and two lights, one of the lights cast shadows. Everything works ok but I noticed that there are some obvious artifacts, as shown in the below image, some shadows seem to overflow to bright areas.
Below is the shaders to render depth information into a framebuffer
<script id="shadow-shader-vertex" type="x-shader/x-vertex">
attribute vec4 aVertexPosition;
uniform mat4 uObjMVP;
void main() {
gl_Position = uObjMVP * aVertexPosition;
}
</script>
<script id="shadow-shader-fragment" type="x-shader/x-vertex">
precision mediump float;
void main() {
//pack gl_FragCoord.z
const vec4 bitShift = vec4(1.0, 256.0, 256.0 * 256.0, 256.0 * 256.0 * 256.0);
const vec4 bitMask = vec4(1.0/256.0, 1.0/256.0, 1.0/256.0, 0.0);
vec4 rgbaDepth = fract(gl_FragCoord.z * bitShift);
rgbaDepth -= rgbaDepth.gbaa * bitMask;
gl_FragColor = rgbaDepth;
}
</script>
In the above shaders, uObjMVP is the MVP matrix used when looking from the position of the light that cast shadow (the warm light, the cold light does not cast shadow)
And here are the shaders to draw everything:
<script id="shader-vertex" type="x-shader/x-vertex">
//position of a vertex.
attribute vec4 aVertexPosition;
//vertex normal.
attribute vec3 aNormal;
//mvp matrix
uniform mat4 uObjMVP;
uniform mat3 uNormalMV;
//shadow mvp matrix
uniform mat4 uShadowMVP;
//interplate normals
varying vec3 vNormal;
//for shadow calculation
varying vec4 vShadowPositionFromLight;
void main() {
gl_Position = uObjMVP * aVertexPosition;
//convert normal direction from object space to view space
vNormal = uNormalMV * aNormal;
vShadowPositionFromLight = uShadowMVP * aVertexPosition;
}
</script>
<script id="shader-fragment" type="x-shader/x-fragment">
precision mediump float;
uniform sampler2D uShadowMap;
varying vec3 vNormal;
varying vec4 vShadowPositionFromLight;
struct baseColor {
vec3 ambient;
vec3 diffuse;
};
struct directLight {
vec3 direction;
vec3 color;
};
baseColor mysObjBaseColor = baseColor(
vec3(1.0, 1.0, 1.0),
vec3(1.0, 1.0, 1.0)
);
directLight warmLight = directLight(
normalize(vec3(-83.064, -1.99, -173.467)),
vec3(0.831, 0.976, 0.243)
);
directLight coldLight = directLight(
normalize(vec3(37.889, 47.864, -207.187)),
vec3(0.196, 0.361, 0.608)
);
vec3 ambientLightColor = vec3(0.3, 0.3, 0.3);
float unpackDepth(const in vec4 rgbaDepth) {
const vec4 bitShift = vec4(1.0, 1.0/256.0, 1.0/(256.0*256.0), 1.0/(256.0*256.0*256.0));
float depth = dot(rgbaDepth, bitShift);
return depth;
}
float calVisibility() {
vec3 shadowCoord = (vShadowPositionFromLight.xyz/vShadowPositionFromLight.w)/2.0 + 0.5;
float depth = unpackDepth(texture2D(uShadowMap, shadowCoord.xy));
return (shadowCoord.z > depth + 0.005) ? 0.4 : 1.0;
}
vec3 calAmbientLight(){
return ambientLightColor * mysObjBaseColor.ambient;
}
vec3 calDiffuseLight(const in directLight light, const in float visibility){
vec3 inverseLightDir = light.direction * -1.0;
float dot = max(dot(inverseLightDir, normalize(vNormal)), 0.0);
return light.color * mysObjBaseColor.diffuse * dot * visibility;
}
void main() {
vec3 ambientLight = calAmbientLight();
float visibility = calVisibility();
vec3 warmDiffuseLight = calDiffuseLight(warmLight, visibility);
// cold light does not cast shadow and hence visilibility is always 1.0
vec3 coldDiffuseLight = calDiffuseLight(coldLight, 1.0);
gl_FragColor = vec4(coldDiffuseLight + warmDiffuseLight + ambientLight, 1.0);
}
</script>
If I simply draw the depth information out on to the canvas,
void main() {
// vec3 ambientLight = calAmbientLight();
// float visibility = calVisibility();
// vec3 warmDiffuseLight = calDiffuseLight(warmLight, visibility);
// // cold light does not cast shadow and hence visilibility is always 1.0
// vec3 coldDiffuseLight = calDiffuseLight(coldLight, 1.0);
// gl_FragColor = vec4(coldDiffuseLight + warmDiffuseLight + ambientLight, 1.0);
vec3 shadowCoord = (vShadowPositionFromLight.xyz/vShadowPositionFromLight.w)/2.0 + 0.5;
gl_FragColor = vec4(unpackDepth(texture2D(uShadowMap, shadowCoord.xy)), 0.0, 0.0, 1.0);
}
I would get this image
Thanks in advance.
VertexShader:
varying vec4 diffuseColor;
uniform mat4 modelViewProj;
uniform mat4 modelViewTranspose;
attribute vec3 streamNormal;
attribute vec3 streamPosition;
void main() {
gl_Position = modelViewProj * streamPosition;
diffuseColor = max(0, dot(normalize(modelViewTranspose * streamNormal), vec3(0,0,1)) * vec4(1,0,0,1);
}
PixelShader:
varying vec4 diffuseColor;
void main() {
gl_FragColor = diffuseColor;
}
the matrix is by left-hand (row major), modelViewProj calculated by C++, it is the world * view * projection, modelViewTranspose = transpose(inverse(world * view)).
The render result is not right, I cannot see any thing, but if I set gl_FragColor always to vec4(1,1,1,1), the model render is no problem. Does my code have errors? or other problems?