custom vertex color with threejs LineSegments() - opengl-es

I am struggling with threejs to use a shaderMaterial on a THREE.LineSegments
my goal is to have per vertex color, on a bunch of line segments:
I have a vertex and a fragment shader :
<script type="x-shader/x-vertex" id="vertexshader">
varying vec3 vColor;
attribute vec3 customColor;
void main() {
vColor = customColor;
}
</script>
<script type="x-shader/x-fragment" id="fragmentshader">
varying vec3 vColor;
void main() {
gl_FragColor = vec4(vColor, 1.0 );
}
</script>
then in the threejs scene , I create all the relevant stuff for the shader, and apply it to the object:
var customColors = new Float32Array(_count*2*3); // 2 vertex by segment *length(RGB)
for (var i = 0; i < customColors.length/3; i++) {
var ratio = i / (customColors.length/3.0);
customColors[(i*3)] = ratio;
customColors[(i*3)+1] = ratio;
customColors[(i*3)+2] = ratio;
};
geo.addAttribute('customColor' , new THREE.BufferAttribute(customColors,3));
var drawCount = _count * 2;
geo.setDrawRange( 0, drawCount );
var uniforms = {
opacity : 0.5
};
var customMaterial = new THREE.ShaderMaterial( {
uniforms : uniforms,
vertexShader : document.getElementById( 'vertexshader' ).textContent,
fragmentShader : document.getElementById( 'fragmentshader' ).textContent,
}
);
var lines = new THREE.LineSegments(geo, customMaterial);
I can't find where the problem is.
The line segments don't appear at all,not even in black.
All I have managed to do is to 'hard code' the color in the fragment shader ( which defeats the purpose evidently).
No error message whatsoever in the console.
please help, what am i doing wrong here ?

After struggling again , I finally can see some line segments with a custom vertex color.
It looks I needed to give a value to the gl_Position variable ( I thought it would automatic, since I don't wan't to change vertex position).
I added this line into main() of the vertexShader:
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);

Related

Three.js InstancedMesh: Shader is NOT changed after updating instance matrix

I am using InstancedMesh with an custom outline shader:
<script type="x-shader/x-vertex" id="vertexShader">
uniform float offset;
void main() {
vec4 pos = modelViewMatrix * vec4( position + normal * offset, 1.0 );
gl_Position = projectionMatrix * pos;
}
</script>
<script type="x-shader/x-fragment" id="fragmentShader">
uniform vec3 color;
uniform float intensity;
void main() {
gl_FragColor = vec4( color, intensity );
}
</script>
Then, I created shader material:
var uniforms = {
offset: { type: 'f', value: 0.001 },
color: { type: 'c', value: new THREE.Color(0x6B6B6B) },
intensity: { type: 'f', value: 1.0 },
};
var outlineMaterial = new THREE.ShaderMaterial( {
uniforms: uniforms,
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent
});
Then I created an InstancedMesh with a box geometry and this material. However, when I update instance matrix, the shader seems NOT be affected.
var dummy = new THREE.Object3D();
var outlineGometry = new THREE.BoxGeometry(1, 1, 1);
var outline = new THREE.InstancedMesh(outlineGometry, outlineMaterial, 1);
dummy.position.set(0, 0, 0);
dummy.rotation.set(0, Math.PI/4, 0);
dummy.scale.set(0.1, 0.1, 0.1);
dummy.updateMatrix();
outline.setMatrixAt( 0, dummy.matrix );
outline.instanceMatrix.needsUpdate = true;
Please check the fiddle link below: I updated the instance rotation and scale, but shader is NOT changed.
Fiddle Link
Do you know how to solve this problem?
Thanks.
According to the Westlangley suggestion, I can use instanceMatrix in vertex shader to solve the problem based on the document https://threejs.org/docs/#api/en/renderers/webgl/WebGLProgram

How to add opacity map to ShaderMaterial

I've applied ShaderMaterial to a glb model that has opacity map (the model is human body and the opacity map is used to create hair and eyelashes), the reference for the model material was this -
So as you can see - the material is some sort of glow effect, so i was manage to find This Example which is pretty much what i need - the problem is that i can't figure out how to apply the models opacity map - if you look closely on the difference between my result (left picture) to the right picture - you'll see that the hair doesn't looks as it should - since the opacity map do not applied... i wonder is the ShaderMaterial is the good for this look or should i use other kind of shader.
Here is my material code -
let m = new THREE.MeshStandardMaterial({
roughness: 0.25,
metalness: 0.75,
opacity: 0.3,
map: new THREE.TextureLoader().load(
"/maps/opacity.jpg",
(tex) => {
tex.wrapS = THREE.RepeatWrapping;
tex.wrapT = THREE.RepeatWrapping;
tex.repeat.set(16, 1);
}
),
onBeforeCompile: (shader) => {
shader.uniforms.s = uniforms.s;
shader.uniforms.b = uniforms.b;
shader.uniforms.p = uniforms.p;
shader.uniforms.glowColor = uniforms.glowColor;
shader.vertexShader = document.getElementById("vertexShader").textContent;
shader.fragmentShader = document.getElementById(
"fragmentShader"
).textContent;
shader.side = THREE.FrontSide;
shader.transparent = true;
// shader.uniforms['alphaMap'].value.needsUpdate = true;
console.log(shader.vertexShader);
console.log(shader.fragmentShader);
},
});
Shader setting:
<script id="vertexShader" type="x-shader/x-vertex">
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
vNormal = normalize( normalMatrix * normal ); // 转换到视图空间
vPositionNormal = normalize(( modelViewMatrix * vec4(position, 1.0) ).xyz);
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<!-- fragment shader a.k.a. pixel shader -->
<script id="fragmentShader" type="x-shader/x-vertex">
uniform vec3 glowColor;
uniform float b;
uniform float p;
uniform float s;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
float a = pow( b + s * abs(dot(vNormal, vPositionNormal)), p );
gl_FragColor = vec4( mix(vec3(0), glowColor, a), 1. );
}
</script>
You're creating a MeshStandardMaterial, but then you're overriding all its shader code when you assign new vertex and fragment shaders, making the Standard material useless. You should stick to ShaderMaterial like the demo you linked. It would make your code cleaner:
// Get shader code
let vertShader = document.getElementById("vertexShader").textContent;
let fragShader = document.getElementById("fragmentShader").textContent;
// Build texture
let alphaTex = new THREE.TextureLoader().load("/maps/opacity.jpg");
alphaTex.wrapS = THREE.RepeatWrapping;
alphaTex.wrapT = THREE.RepeatWrapping;
// alphaTex.repeat.set(16, 1); <- repeat won't work in a custom shader
// Build material
let m = new THREE.ShaderMaterial({
transparent: true,
// side: THREE.FrontSide, <- this is already default. Not needed
uniforms: {
s: {value: 1},
b: {value: 2},
p: {value: 3},
alphaMap: {value: alphaTex},
glowColor: {value: new THREE.Color(0x0099ff)},
// we create a Vec2 to manually handle repeat
repeat: {value: new THREE.Vector2(16, 1)}
},
vertexShader: vertShader,
fragmentShader: fragShader
});
This helps build you material in a cleaner way, since you're using its native build method without having to override anything. Then, you can sample the alphaMap texture in your fragment shader:
uniform float s;
uniform float b;
uniform float p;
uniform vec3 glowColor;
uniform vec2 repeat;
// Declare the alphaMap uniform if we're gonna use it
uniform sampler2D alphaMap;
// Don't forget to declare UV coordinates
varying vec2 vUv;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
float a = pow( b + s * abs(dot(vNormal, vPositionNormal)), p );
// Sample map with UV coordinates. Multiply by uniform to get repeat
float a2 = texture2D(alphaMap, vUv * repeat).r;
// Combine both alphas
float opacity = a * a2;
gl_FragColor = vec4( mix(vec3(0), glowColor, opacity), 1. );
}
Also, don't forget to carry over the UVs from your vertex shader:
// Don't forget to declare UV coordinates
varying vec2 vUv;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
// convert uv attribute to vUv varying
vUv = uv;
vNormal = normalize( normalMatrix * normal ); // 转换到视图空间
vPositionNormal = normalize(( modelViewMatrix * vec4(position, 1.0) ).xyz);
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
Update
The error
'=' : cannot convert from 'lowp 4-component vector of float' to 'highp float'
means I made a mistake when taking the texture2D() sample in the fragment shader. It should have been texture2D().r so we only read the red channel to get a float instead of cramming all RGBA channels (yielding a vec4) into a float. See the following snippet for the final result:
var container, scene, camera, renderer, controls, torusKnot;
init()
function init() {
initBase()
initObject()
render()
}
function initBase () {
container = document.getElementById( 'ThreeJS' )
// SCENE
scene = new THREE.Scene();
// CAMERA
var SCREEN_WIDTH = window.innerWidth, SCREEN_HEIGHT = window.innerHeight
var VIEW_ANGLE = 45, ASPECT = SCREEN_WIDTH / SCREEN_HEIGHT, NEAR = 0.1, FAR = 20000
camera = new THREE.PerspectiveCamera( VIEW_ANGLE, ASPECT, NEAR, FAR)
camera.position.set(0,0,50)
camera.lookAt(scene.position)
// RENDERER
renderer = new THREE.WebGLRenderer( {antialias:true} )
renderer.setSize(SCREEN_WIDTH, SCREEN_HEIGHT)
renderer.setClearColor(0x333333)
container.appendChild( renderer.domElement )
// CONTROLS
controls = new THREE.OrbitControls( camera, renderer.domElement )
// Resize
window.addEventListener("resize", onWindowResize);
}
function onWindowResize() {
var w = window.innerWidth;
var h = window.innerHeight;
renderer.setSize(w, h);
camera.aspect = w / h;
camera.updateProjectionMatrix();
}
function initObject () {
let vertShader = document.getElementById("vertexShader").textContent;
let fragShader = document.getElementById("fragmentShader").textContent;
// Build texture
let alphaTex = new THREE.TextureLoader().load("https://threejs.org/examples/textures/floors/FloorsCheckerboard_S_Diffuse.jpg");
alphaTex.wrapS = THREE.RepeatWrapping;
alphaTex.wrapT = THREE.RepeatWrapping;
var customMaterial = new THREE.ShaderMaterial({
uniforms: {
s: {value: -1},
b: {value: 1},
p: {value: 2},
alphaMap: {value: alphaTex},
glowColor: {value: new THREE.Color(0x00ffff)},
// we create a Vec2 to manually handle repeat
repeat: {value: new THREE.Vector2(16, 1)}
},
vertexShader: vertShader,
fragmentShader: fragShader
})
var geometry = new THREE.TorusKnotBufferGeometry( 10, 3, 100, 32 )
torusKnot = new THREE.Mesh( geometry, customMaterial )
scene.add( torusKnot )
}
function render() {
torusKnot.rotation.y += 0.01;
renderer.render( scene, camera );
requestAnimationFrame(render);
}
body{
overflow: hidden;
margin: 0;
}
<script src="https://threejs.org/build/three.js"></script>
<script src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
<!-- vertext shader a.k.a. pixel shader -->
<script id="vertexShader" type="x-shader/x-vertex">
varying vec2 vUv;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
// convert uv attribute to vUv varying
vUv = uv;
vNormal = normalize( normalMatrix * normal ); // 转换到视图空间
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
vPositionNormal = normalize(( mvPosition ).xyz);
gl_Position = projectionMatrix * mvPosition;
}
</script>
<!-- fragment shader a.k.a. pixel shader -->
<script id="fragmentShader" type="x-shader/x-vertex">
uniform float s;
uniform float b;
uniform float p;
uniform vec3 glowColor;
uniform vec2 repeat;
// Declare the alphaMap uniform if we're gonna use it
uniform sampler2D alphaMap;
// Don't forget to declare UV coordinates
varying vec2 vUv;
varying vec3 vNormal;
varying vec3 vPositionNormal;
void main()
{
float a = pow( b + s * abs(dot(vNormal, vPositionNormal)), p );
// Sample map with UV coordinates. Multiply by uniform to get repeat
float a2 = texture2D(alphaMap, vUv * repeat).r;
// Combine both alphas
float opacity = a * a2;
gl_FragColor = vec4( mix(vec3(0), glowColor, opacity), 1. );
}
</script>
<div id="ThreeJS" style="position: absolute; left:0px; top:0px"></div>

THREE.JS ShaderMaterial exceeds of my mesh

I have a classic mesh composed by a THREE.PlaneGeometry and a material. If I add a THREE.MeshNormalMaterial() here's the result I get :
So far, so good. But when I call my THREE.ShaderMaterial(), using an external texture, the dimension of my mesh completely changes :
I always get that weird ratio even if - like in the screenshot - my texture is a square (512x512). I just want my MaterialShader to fit inside my geometry.
Here is the code of my MaterialShader :
var material = new THREE.ShaderMaterial( {
uniforms: uniforms,
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent,
} );
var mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
I don't see what I'm missing. Does anyone has an idea ? Thank you very much.
UPDATE :
Here's the fully code of my ShaderMaterial :
material = new THREE.ShaderMaterial({
uniforms:{
u_time: { type: "f", value: 1.0 },
u_resolution: { type: "v2", value: new THREE.Vector2() },
u_mouse: { type: "v2", value: new THREE.Vector2() },
texture1: { type: "t", value: texture }
},
vertexShader:`
void main() {
gl_Position = vec4( position, 1.0 );
}
`,
fragmentShader:`
#ifdef GL_ES
precision highp float;
precision highp int;
#endif
uniform vec2 u_resolution;
uniform vec2 u_mouse;
uniform float u_time;
uniform sampler2D texture1;
void main(){
float pyt=3.1415926*2./3.;
float m=-1e10;
vec4 mv= vec4(0.);
vec2 xy = gl_FragCoord.xy/u_resolution.xy;
int ic=0;
for (int i=0;i<30;i++){
vec2 np=vec2(xy.x+float(i)/u_resolution.x*sin(3.14/2.) * 4.,xy.y+float(i)/u_resolution.y*cos(3.14/2.) * 4.);
float jTime = u_time*1.618;
vec4 tk=texture2D(texture1,np);
float t=tk.r*sin(jTime)+tk.g*sin(jTime+pyt)+tk.b*sin(jTime+2.*pyt)-.01*float(i);
if (t>m){m=t; mv=tk;ic=i;}
}
float sc=float(ic)/30.;
vec4 tk=texture2D(texture1,xy);
mv=sc*tk+(1.-sc)*mv;
gl_FragColor = vec4(mv.r,mv.g,mv.b,1.0);
}
`
});
UPDATE2 :
I changed my vertex shader but nothing has changed.
I might have a lead : I think this is related to my camera settings. I changed them and I've a better result. Now my texture fits into my square mesh.
Unfortunately, the scale isn't good. Since my texture is a square too, I want it to have exactly the same size than my mesh, for now it's zoomed.
How can I manage the size of my texture ? Should I do it inside my vertexShader ?
Here's my texture settings for now :
texture = new THREE.TextureLoader().load( "test5.jpg");
texture.wrapS = THREE.RepeatWrapping;
texture.wrapT = THREE.RepeatWrapping;
UPDATE 3 :
I found that code to apply a texture and make it fit to my mesh :
https://bl.ocks.org/duhaime/c8375f1c313587ac629e04e0253481f9
It's working but as soon as I change the example fragement shader by mine, I've no errors but the shaders become one unique color. I don't understand what I'm missing...
Try this code of the vertex shader:
void main() {
gl_Position = projectionMatrix *
modelViewMatrix *
vec4(position,1.0);
}
Reference
Simply pass the uv coordinates from the vertex shader to the fragment shader and use them there.
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight, 1, 1000);
camera.position.set(0, 0, 5);
camera.lookAt(scene.position);
var renderer = new THREE.WebGLRenderer();
renderer.setClearColor(0x404040);
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
var iResolution = new THREE.Vector2();
var planeGeo = new THREE.PlaneBufferGeometry(5, 5);
var planeMat = new THREE.ShaderMaterial({
uniforms: {
texture: {
value: null
},
iResolution: {
value: iResolution
},
iTime: {
value: 0
}
},
vertexShader: `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix *
modelViewMatrix *
vec4(position,1.0);
}
`,
fragmentShader: `
uniform sampler2D texture;
uniform float iTime;
uniform vec2 iResolution;
varying vec2 vUv;
void main() {
float pyt=3.1415926*2./3.;
float m=-1e10;//very negitive start value for maximisation algorithm.
vec4 mv= vec4(0.);//lank starting value of max so far
vec2 xy = vUv;
int ic=0;//stores smear distance
for (int i=0;i<30;i++){
//point offset on a circle
vec2 np=vec2(xy.x+float(i)/iResolution.x*sin(iTime),xy.y+float(i)/iResolution.y*cos(iTime));
//colour cycles faster than position
float jTime = iTime*1.618;
//get neerby point
vec4 tk=texture2D(texture,np);
// and if its colourfull enough, use that
float t=tk.r*sin(jTime)+tk.g*sin(jTime+pyt)+tk.b*sin(jTime+2.*pyt)-.01*float(i);
if (t>m){m=t; mv=tk;ic=i;}
}
//mix smeared with background depending ondistance
float sc=float(ic)/30.;
vec4 tk=texture2D(texture,xy);
mv=sc*tk+(1.-sc)*mv;
gl_FragColor = vec4(mv.rgb,1.0);
}
`
});
var textureLoader = new THREE.TextureLoader();
textureLoader.load("https://threejs.org/examples/textures/UV_Grid_Sm.jpg", tex => {
planeMat.uniforms.texture.value = tex;
planeMat.uniforms.texture.value.needsUpdate = true;
iResolution.set(tex.image.width, tex.image.height);
planeMat.needsUpdate = true;
console.log(texture);
});
var plane = new THREE.Mesh(planeGeo, planeMat);
scene.add(plane);
var clock = new THREE.Clock();
var time = 0;
render();
function render() {
requestAnimationFrame(render);
time += clock.getDelta();
planeMat.uniforms.iTime.value = time;
renderer.render(scene, camera);
}
body {
overflow: hidden;
margin: 0;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/96/three.min.js"></script>

ThreeJS draggable CSS3DObject

Been using THREE JS CSS3DRenderer to render a panorama view based in this example.
I have added another CSS3DObject image which represents a GPS marker with the following code below:
var marker = document.createElement( 'img' );
marker.classList.add('gps_marker');
marker.src = 'gps_marker.jpg';
marker.width = 5;
marker.style.border = 'solid 1px red';
var object = new THREE.CSS3DObject( marker );
object.position.set( 0, 0, 30 );
scene.add( object );
Now I'd like to make the gps markers draggable around the scene around the scene. Dragcontrols doesn't seem to work with CSS3DObject. Any help would be very appreciated.
You need to use "textured" point cloud (THREE.Points) object with shader material. Use uniform to pass texture to fragment shader. Not sure that dragcontrols will work with it (will move all points at ones if so). But this is IMHO the best way to draw icons. Example:
let uniforms = {
iconSize: {
value: self.iconSize
},
color: {
value: new THREE.Color(0xffffff)
},
texture: {
value: self.textureLoader.load(url, texture => {
texture.flipY = false;
texture.needsUpdate = true;
})
}
};
let material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: document.getElementById('point-cloud-vertex-shader').textContent,
fragmentShader: document.getElementById('point-cloud-fragment-shader').textContent,
side: THREE.DoubleSide,
transparent: true,
opacity: 0.7,
depthTest: false,
clipping: true,
clippingPlanes: self.clippingPlanes,
});
//... create THREE.Points object with this material
shaders:
<script type="x-shader/x-vertex" id="point-cloud-vertex-shader">
uniform vec3 color;
uniform float iconSize;
uniform sampler2D texture;
void main() {
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = iconSize;
gl_Position = projectionMatrix * mvPosition;
}
</script>
<script type="x-shader/x-fragment" id="point-cloud-fragment-shader">
uniform vec3 color;
uniform float iconSize;
uniform sampler2D texture;
void main() {
gl_FragColor = texture2D( texture, gl_PointCoord );
}
</script>

How to maintain the glow effect of a json model rotating in three.js scene?

I add a json model with glow effect into the scene.
As follows:
I try to rotate the json model automatically.
However, it looks weird when it is rotating.
The glow effect of the model does not work.
I assume that the position of the json model does not be changed when this model is rotating. As the result, the viewVector.value of the ShaderMaterial is constant when this model is rotating(I do not change position of the camera).
if(jsonMesh){
jsonMesh.rotation.y += 0.1;
jsonMesh.material.uniforms.viewVector.value =
new THREE.Vector3().subVectors( camera.position, jsonMesh.position );
}
This is the Three.ShaderMaterial.
VertexShader and FragmentShader
<script id="vertexShader" type="x-shader/x-vertex">
uniform vec3 viewVector;
uniform float c;
uniform float p;
varying float intensity;
void main()
{
vec3 vNormal = normalize( normalMatrix * normal );
vec3 vNormel = normalize( normalMatrix * viewVector );
intensity = pow( c - dot(vNormal, vNormel), p );
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script id="fragmentShader" type="x-shader/x-fragment">
uniform vec3 glowColor;
varying float intensity;
void main()
{
vec3 glow = glowColor * intensity;
gl_FragColor = vec4( glow, 1.0 );
}
</script>
Three.ShaderMaterial.
var customMaterial = new THREE.ShaderMaterial(
{
uniforms:
{
"c": { type: "f", value: 1.0 },
"p": { type: "f", value: 1.4 },
glowColor: { type: "c", value: new THREE.Color(0xffff00) },
viewVector: { type: "v3", value: camera.position }
},
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent,
side: THREE.FrontSide,
blending: THREE.AdditiveBlending,
transparent: true
}
);
How should I modify the code in this case?
Here is the Demo and source code.
You can use built in three.js functions for this. Instead of using the camera position, I chose to show you how to set a light source position in the world. That way you can match the light source on your custom shader to any light sources you plan to add later to your 3d world. Feel free to change the worldLightPoint value to camera.position instead of new THREE.Vector3(100,100,100). and in that case the effect will remain constant with the camera position.
var v = new THREE.Vector3();
//var worldLightPoint = camera.position;
var worldLightPoint = new THREE.Vector3(100,100,100);
function update()
{
controls.update();
stats.update();
if(jsonMesh){
jsonMesh.rotation.y += 0.1;
jsonMesh.material.uniforms.viewVector.value = jsonMesh.worldToLocal(v.copy(worldLightPoint));
}
}

Resources