I'm pretty new to Three.js, and 3D in general, so please bear with me!
I'm trying to make a THREE.Mirror and change its opacity. I would think this would work, but unfortunately it does not, nor does any other solution I've tried thus far.
// inside init, render has no been called
// create a THREE.Mirror and add it to the provided THREE.Scene
function createMirror( props ) {
let { name, width, height, options, scene } = props;
this[ name ] = new THREE.Mirror( width, height, options );
scene.add( this[ name ] );
}
// create 3js mirror and add to scene
createMirror.call( this, {
name: "Mirror_1",
width: 1.75,
height: 1.75,
options: {
clipBias: 0.01,
textureWidth: this.width,
textureHeight: this.height,
color: 0xefefef
},
scene: this.Main_Scene
} );
this.Mirror_1.rotation.x = -Math.PI / 2;
this.Mirror_1.material.opacity = 0.1;
this.Mirror_1.material.transparent = true;
// render is called inside an ObjectLoader after objects are loaded, here is
// the render function
renderScene() {
requestAnimationFrame( this.renderScene );
this.P_Camera_1.lookAt( this.Main_Scene.position );
this.controls.update();
this.WebGL_Renderer_1.setViewport(0, 0, this.width, this.height);
this.WebGL_Renderer_1.render( this.Main_Scene, this.P_Camera_1 );
}
Everything else works fine. Objects are loaded, scene renders beautifully, mirror looks great... just can't change it's opacity. Any help would be greatly appreciate. If this is not enough context, please let me know and I can provide more.
P.S. This is inside a React application. render() { ... } is a method on the component controlling the scene.
EDIT: I edited the fragmentShader portion of the Mirror.js provided in the examples\js portion of npm install three and written by #author Slayvin / http://slayvin.net.
var mirrorShader = {
uniforms: {
...
},
vertexShader: [
...
].join( '\n' ),
fragmentShader: [
'uniform vec3 mirrorColor;',
'uniform sampler2D mirrorSampler;',
'varying vec4 mirrorCoord;',
'float blendOverlay(float base, float blend) {',
' return( base < 0.5 ? ( 2.0 * base * blend ) : (1.0 - 2.0 * ( 1.0 - base ) * ( 1.0 - blend ) ) );',
'}',
'void main() {',
' vec4 color = texture2DProj(mirrorSampler, mirrorCoord);',
// Changed the vec4(r, g, b, alpha) alpha from 1.0 to 0.25
// No change in rendered THREE.Mirror
' color = vec4(blendOverlay(mirrorColor.r, color.r), blendOverlay(mirrorColor.g, color.g), blendOverlay(mirrorColor.b, color.b), 0.25);',
' gl_FragColor = color;',
'}'
].join( '\n' )
}
I'm obviously missing something though!
For anyone wondering how to affect the opacity of a THREE.Mirror(), you need to do two things:
Your_Mirror.material.transparent = true; Make sure this is done before the scene is rendered, or before the mirror is added to the scene and the fragmentShader has applied it's magic.
Change the alpha in the fragmentShader used by the Your_Mirror.material.fragmentShader
Here is a solution I am using to change the opacity of a mirror dynamically.
const fragmentShaderBeforeAlpha = `
uniform vec3 mirrorColor;
uniform sampler2D mirrorSampler;
varying vec4 mirrorCoord;
float blendOverlay(float base, float blend) {
return( base < 0.5 ? ( 2.0 * base * blend ) : (1.0 - 2.0 * ( 1.0 - base ) * ( 1.0 - blend ) ) );
}
void main() {
vec4 color = texture2DProj(mirrorSampler, mirrorCoord);
color = vec4(blendOverlay(mirrorColor.r, color.r), blendOverlay(mirrorColor.g, color.g), blendOverlay(mirrorColor.b, color.b),
`;
const fragmentShaderAfterAlpha = `
);
gl_FragColor = color;
}
`;
export default function fragmentShader( alpha ) {
return fragmentShaderBeforeAlpha + alpha + fragmentShaderAfterAlpha;
}
add another geometry over mirror and set transparency for it
var mesh = new THREE.Mesh( new THREE.PlaneBufferGeometry( 2000, 2000 ), new THREE.MeshLambertMaterial ( { opacity : 0.98 , transparent: true } ) );
Related
EDIT: The Forge Viewer I'm using has customized version of Three.js release r71 (source) which is why I'm using outdated code. The current release of Three.js is r121.
I've created THREE.Group() that contains various THREE.Pointcloud(geometry, material). One of the Points is composed of THREE.BufferGeometry() and THREE.ShaderMaterial().
When I add a colour attribute to a BufferGeometry, only only red (1,0,0), white (1,1,1), or yellow (1,1,0) seem to work. This image is when I set the colour to (1,0,0). This image is when I set the colour to blue (0,0,1).
My question is, how do I resolve this? Is the issue in the shaders? Is the issue with how I build the BufferGeometry? Is it a bug? Thanks.
My shaders:
var vShader = `uniform float size;
varying vec3 vColor;
void main() {
vColor = color;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = size * ( size / (length(mvPosition.xyz) + 0.00001) );
gl_Position = projectionMatrix * mvPosition;
}`
var fShader = `varying vec3 vColor;
uniform sampler2D sprite;
void main() {
gl_FragColor = vec4(vColor, 1.0 ) * texture2D( sprite, gl_PointCoord );
if (gl_FragColor.x < 0.2) discard;
}`
My material:
var materialForBuffers = new THREE.ShaderMaterial( {
uniforms: {
size: { type: 'f', value: this.pointSize},
sprite: { type: 't', value: THREE.ImageUtils.loadTexture("../data/white.png") },
},
vertexShader: vShader,
fragmentShader: fShader,
transparent: true,
vertexColors: true,
});
How the color is added:
const colors = new Float32Array( [ 1.0, 0.0, 0.0 ] );
geometryForBuffers.addAttribute('color', new THREE.BufferAttribute( colors, 3 ));
Link to code
It looks like you may already be using parts of that sample code but if not, please refer to https://github.com/petrbroz/forge-point-clouds/blob/develop/public/scripts/extensions/pointcloud.js (live demo https://forge-point-clouds.autodesk.io). This sample code uses the color geometry attribute already to specify colors of individual points.
What i want to do is to load a texture with only alpha values from a PNG-picture while using the color of the material to render the RGB. To give you some context i use this for GPU-picking to find sprites that are clicked on. This way i can know if a sprite was clicked on or if the user clicked on the transparent part of the sprite.
I tried using the THREE.AlphaFormat as format and i tried all the types but
what i get is a sprite with correct alpha, but the color from the texture is combined with the color of the material.
Here is the code i tried so far
var type = THREE.UnsignedByteType;
var spriteMap = new THREE.TextureLoader().load( url );
spriteMap.format = THREE.AlphaFormat;
spriteMap.type = type;
var spriteMaterial = new THREE.SpriteMaterial( { map: spriteMap , color: idcolor.getHex() } ); //
var sprite = new THREE.Sprite( spriteMaterial );
sprite.position.set( this.position.x , this.position.y , this.position.z );
sprite.scale.set( this.scale.x , this.scale.y , this.scale.z );
Selection.GpuPicking.pickingScene.add( sprite );
Any ideas on how to achieve this?
three.js r.91
I didn't manage to do what i wanted with combining texture and material. My solution was to create a plane and add my own custom shaders handling the Sprite functionality. I copied the shaders for sprites from three.js library and removed the code i didn't need since i only needed the correct alpha and one color to be visible.
My code for creating a sprite with color from material and alpha values from texture
//Create the position, scale you want for your sprite and add the url to your texture
var spriteMap = new THREE.TextureLoader().load( url , function( texture ){
var geometry = new THREE.PlaneGeometry( 1.0, 1.0 );
uniforms = {
color: {type:"v3", value: color },
map: {value: texture },
opacity: {type:"f", value: 1.0 },
alphaTest: {type:"f", value: 0.0 },
scale: {type:"v3", value: scale }
};
var material = new THREE.ShaderMaterial(
{
uniforms: uniforms,
vertexShader: vertexShader, //input the custom shader here
fragmentShader: fragmentShader, //input the custom shader here
transparent: true,
});
var mesh = new THREE.Mesh( geometry, material );
mesh.position.set( position.x , position.y , position.z );
mesh.scale.set( scale.x , scale.y ,sprite.scale.z );
scene.add(mesh);
} );
This is my vertex shader:
uniform vec3 scale;
varying vec2 vUV;
void main() {
float rotation = 0.0;
vUV = uv;
vec3 alignedPosition = position * scale;
vec2 rotatedPosition;
rotatedPosition.x = cos( rotation ) * alignedPosition.x - sin( rotation ) * alignedPosition.y;
rotatedPosition.y = sin( rotation ) * alignedPosition.x + cos( rotation ) * alignedPosition.y;
vec4 mvPosition;
mvPosition = modelViewMatrix * vec4( 0.0, 0.0 , 0.0 , 1.0 );
mvPosition.xy += rotatedPosition;
gl_Position = projectionMatrix * mvPosition;
}
my fragment shader:
varying vec2 vUV;
uniform vec3 color;
uniform sampler2D map;
uniform float opacity;
uniform float alphaTest;
void main() {
vec4 texture = texture2D( map, vUV );
//showing the color from material, but uses alpha from texture
gl_FragColor = vec4( color , texture.a * opacity );
if ( gl_FragColor.a < alphaTest ) discard;
}
I'm trying to replicate the effect shown in this Three.js example but instead of showing the wireframe and an opaque box, I'd like to show just the edges without any faces (like what is shown when using the THREE.EdgesGeometry.) I know that setting the linewidth property doesn't work and that using shaders is necessary but I'm not really sure where to begin. For reference, these are the shaders being used in the above Three.js example:
Vertex Shader:
attribute vec3 center;
varying vec3 vCenter;
void main() {
vCenter = center;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
Fragment Shader:
varying vec3 vCenter;
float edgeFactorTri() {
vec3 d = fwidth( vCenter.xyz );
vec3 a3 = smoothstep( vec3( 0.0 ), d * 1.5, vCenter.xyz );
return min( min( a3.x, a3.y ), a3.z );
}
void main() {
gl_FragColor.rgb = mix( vec3( 1.0 ), vec3( 0.2 ), edgeFactorTri() );
gl_FragColor.a = 1.0;
}
I've gotten as far as figuring out that changing what d gets multiplied by (1.5 in the example) is what determines the thickness of the line but I'm completely lost as to how the vCenter variable is actually used (it's a vec3 that is either [1, 0, 0], [0, 1, 0] or [0, 0, 1]) or what I could use to make the THREE.EdgesGeometry render with thicker lines like in the example.
Here is what happens when I try rendering the edges geometry with these shaders:
<script type="x-shader/x-vertex" id="vertexShader">
attribute vec3 center;
varying vec3 vCenter;
void main() {
vCenter = center;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script type="x-shader/x-fragment" id="fragmentShader">
varying vec3 vCenter;
uniform float lineWidth;
float edgeFactorTri() {
float newWidth = lineWidth + 0.5;
vec3 d = fwidth( vCenter.xyz );
vec3 a3 = smoothstep( vec3( 0.0 ), d * newWidth, vCenter.xyz );
return min( min( a3.x, a3.y ), a3.z );
}
void main() {
gl_FragColor.rgb = mix( vec3( 1.0 ), vec3( 0.2 ), edgeFactorTri() );
gl_FragColor.a = 1.0;
}
</script>
Javascript:
size = 150
geometry = new THREE.BoxGeometry(size, size, size);
material = new THREE.MeshBasicMaterial({ wireframe: true });
mesh = new THREE.Mesh(geometry, material);
mesh.position.x = -150;
scene.add(mesh);
//
// geometry = new THREE.BufferGeometry().fromGeometry(new THREE.BoxGeometry(size, size, size));
geometry = new THREE.EdgesGeometry(new THREE.BoxGeometry(size, size, size));
setupAttributes(geometry);
material = new THREE.ShaderMaterial({
uniforms: { lineWidth: { value: 10 } },
vertexShader: document.getElementById("vertexShader").textContent,
fragmentShader: document.getElementById("fragmentShader").textContent
});
material.extensions.derivatives = true;
mesh = new THREE.Mesh(geometry, material);
mesh.position.x = 150;
scene.add(mesh);
//
geometry = new THREE.BufferGeometry().fromGeometry(new THREE.SphereGeometry(size / 2, 32, 16));
setupAttributes(geometry);
material = new THREE.ShaderMaterial({
uniforms: { lineWidth: { value: 1 } },
vertexShader: document.getElementById("vertexShader").textContent,
fragmentShader: document.getElementById("fragmentShader").textContent
});
material.extensions.derivatives = true;
mesh = new THREE.Mesh(geometry, material);
mesh.position.x = -150;
scene.add(mesh);
jsFiddle
As you can see in the fiddle, this is not what I'm looking for, but I don't have a good enough grasp on how the shaders work to know where I'm going wrong or if this approach would work for what I want.
I've looked into this answer but I'm not sure how to use it as a ShaderMaterial and I can't use it as a shader pass (here are the shaders he uses for his answer.)
I've also looked into THREE.MeshLine and this issue doesn't seem to have been resolved.
Any guidance would be greatly appreciated!
You want to modify this three.js example so the mesh is rendered as a thick wireframe.
The solution is to modify the shader and discard fragments in the center portion of each face -- that is, discard fragments not close to an edge.
You can do that like so:
void main() {
float factor = edgeFactorTri();
if ( factor > 0.8 ) discard; // cutoff value is somewhat arbitrary
gl_FragColor.rgb = mix( vec3( 1.0 ), vec3( 0.2 ), factor );
gl_FragColor.a = 1.0;
}
You can also set material.side = THREE.DoubleSide if you want.
updated fiddle: https://jsfiddle.net/vy0we5wb/4.
three.js r.89
I have an object which when a normal texture is applied allows me to change the offset.y value, however when using RGBELoader and a HDR file I can no longer change the offset.y.
My code is as follows:
var loader3 = new THREE.ObjectLoader();
loader3.load("model/dome/dome2.json",function ( obj ) {
obj.scale.x = 7;
obj.scale.y = 7;
obj.scale.z = 7;
obj.position.x = 0;
obj.position.z = 0;
obj.position.y = 0;
var loader = new THREE.RGBELoader();
var texture = loader.load( "maps/scene2.hdr", function( texture, textureData ){
materialHDR = new THREE.ShaderMaterial( {
uniforms: {
tDiffuse: { type: "t", value: texture },
exposure: { type: "f", value: textureData.exposure },
brightMax: { type: "f", value: textureData.gamma }
},
vertexShader: getText( 'vs-hdr' ),
fragmentShader: getText( 'fs-hdr' )
} );
texture.offset.y = 0.5; // HERE LIES THE PROBLEM
texture.flipY = true;
obj.traverse( function ( child ) {
if ( child instanceof THREE.Mesh ) {
child.material = materialHDR;
child.receiveShadow = true;
//child.material.side = THREE.BackSide;
child.material.side = THREE.DoubleSide;
}
});
scene.add( obj );
} );
});
The HDR image loads just fine and is applied to the object just as it is when I use a normal texture however I just cannot move the texture around the model at all.
I have tried wrap with repeat and all sorts of combinations but the stubborn offset will not work!
I would also like to add I am currently learning three.js (awesome!) so excuse the code above if it has any additional errors.
Thanks for any help in advance it is driving me nuts!
Shader code below
<script id="fs-hdr" type="x-shader/x-fragment">
uniform sampler2D tDiffuse;
uniform float exposure;
uniform float brightMax;
varying vec2 vUv;
vec3 decode_pnghdr( const in vec4 color ) {
vec4 rgbcolor = vec4( 0.0, 0.0, 0.0, 0.0 );
if ( color.w > 0.0 ) {
float f = pow(2.0, 127.0*(color.w-0.5));
rgbcolor.xyz = color.xyz * f;
}
return rgbcolor.xyz;
/*
// remove gamma correction
vec4 res = color * color;
// decoded RI
float ri = pow( 2.0, res.w * 32.0 - 16.0 );
// decoded HDR pixel
res.xyz = res.xyz * ri;
return res.xyz;
*/
}
void main() {
vec4 color = texture2D( tDiffuse, vUv );
color.xyz = decode_pnghdr( color );
// apply gamma correction and exposure
//gl_FragColor = vec4( pow( exposure * color.xyz, vec3( 0.474 ) ), 1.0 );
// Perform tone-mapping
float Y = dot(vec4(0.30, 0.59, 0.11, 0.0), color);
float YD = exposure * (exposure/brightMax + 1.0) / (exposure + 1.0);
color *= YD;
gl_FragColor = vec4( color.xyz, 1.0 );
}
</script>
<!-- HDR vertex shader -->
<script id="vs-hdr" type="x-shader/x-vertex">
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 3 );
}
</script>
Your custom shader must handle the offset/repeat.
Add offsetRepeat to your uniforms;
offsetRepeat: { type: "v4", value: new THREE.Vector4( 0, 0.5, 1, 1 ) }
And modify your vertex shader
uniform vec4 offsetRepeat;
varying vec2 vUv;
void main() {
vUv = uv * offsetRepeat.zw + offsetRepeat.xy;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
Your texture must be power-of-two, and set
texture.wrapS = texture.wrapT = THREE.RepeatWrapping;
three.js r.75
I am using a texture atlas to hold a sequence of images. When mapping to a mesh with MeshLambertMaterial, using Texture.offset and Texture.repeat works beautifully to cut the subtexture out of the entire image.
However, using the exact same texture instance for a PointCloudMaterial renders the particles with the entire atlas, not just the selected subimage.
I tried to follow the three.js source code, but the documentation is scarce.
Is there a workaround for this better than using canvases to chop up the image?
Edit: As requested, a work-in-progress is available at http://jnm2.com/minesweeper/.
THREE.PointCloudMaterial has been renamed THREE.PointsMaterial.
THREE.PointCloud has been renamed THREE.Points.
You want to use a sprite sheet with your point cloud.
Instead of using PointsMaterial with your Points, you can create a custom ShaderMaterial instead.
The custom ShaderMaterial can access your sprite sheet and use a different sub-image for each particle.
To do so, use a shader like this one:
<script type="x-shader/x-vertex" id="vertexshader">
attribute vec2 offset;
varying vec2 vOffset;
void main() {
vOffset = offset;
gl_PointSize = 25.0;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script type="x-shader/x-fragment" id="fragmentshader">
uniform sampler2D spriteSheet;
uniform vec2 repeat;
varying vec2 vOffset;
void main() {
vec2 uv = vec2( gl_PointCoord.x, 1.0 - gl_PointCoord.y );
vec4 tex = texture2D( spriteSheet, uv * repeat + vOffset );
if ( tex.a < 0.5 ) discard;
gl_FragColor = tex;
}
</script>
Then
// geometry
geometry = new THREE.BufferGeometry();
// attributes
var numVertices = 20;
var positions = new Float32Array( numVertices * 3 ); // 3 coordinates per point
var offsets = new Float32Array( numVertices * 2 ); // 2 coordinates per point
geometry.setAttribute( 'position', new THREE.BufferAttribute( positions, 3 ) );
geometry.setAttribute( 'offset', new THREE.BufferAttribute( offsets, 2 ) );
// populate offsets
var offset = new THREE.Vector2();
for ( var i = 0, index = 0, l = numVertices; i < l; i ++, index += 3 ) {
positions[ index ] = 100 * Math.random() - 50;
positions[ index + 1 ] = 100 * Math.random() - 50;
positions[ index + 2 ] = 100 * Math.random() - 50;
}
for ( var i = 0, index = 0, l = numVertices; i < l; i ++, index += 2 ) {
offset.set( THREE.Math.randInt( 1, 3 ), THREE.Math.randInt( 2, 3 ) ).multiplyScalar( 0.25 ); // sprite sheet: 4 rows x 4 cols
offsets[ index ] = offset.x;
offsets[ index + 1 ] = offset.y;
}
// image
image = document.createElement( 'img' );
image.addEventListener( 'load', function ( event ) { texture.needsUpdate = true; } );
// texture
var texture = new THREE.Texture( image );
// uniforms
uniforms = {
spriteSheet: { value: texture },
repeat: { value: new THREE.Vector2( 0.25, 0.25 ) }
};
// material
var material = new THREE.ShaderMaterial( {
uniforms: uniforms,
vertexShader: document.getElementById( 'vertexshader' ).textContent,
fragmentShader: document.getElementById( 'fragmentshader' ).textContent,
transparent: true
} );
// point cloud
pointCloud = new THREE.Points( geometry, material );
scene.add( pointCloud );
fiddle:https://jsfiddle.net/nL0b6hco/
three.js r.137.4