WebGL2: How to render via shaders onto a TEXTURE_3D - three.js

I've read that WebGL2 gives us access to 3d textures. I'm trying to use this to perform some GPU-side computations and then store the output in a 64x64x64 3D texture. The render flow is
compute shader -> render to 3dTexture -> read shader -> render to screen
This is my simple compute shader, the texture's RGB channels should correspond to the XYZ fragment coordinates.
#version 300 es
precision mediump sampler3D;
precision highp float;
layout(location = 0) out highp vec4 pc_fragColor;
void main() {
vec3 color = vec3(gl_FragCoord.x / 64.0, gl_FragCoord.y / 64.0, gl_FragDepth);
pc_fragColor.rgb = color;
pc_fragColor.a = 1.0;
}
However, this only seems to be rendering to a single "slice" of the 3DTexture, where depth is 0.0. All subsequent depths from 1 to 63 px remain black:
I've created a working demo below to demonstrate this issue.
var renderer, target3d, camera;
const SIDE = 64;
var computeMaterial, computeMesh;
var readDataMaterial, readDataMesh,
read3dTargetMaterial, read3dTargetMesh;
var textField = document.querySelector("#textField");
function init() {
// Three.js boilerplate
renderer = new THREE.WebGLRenderer({antialias: true});
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setClearColor(new THREE.Color(0x000000), 1.0);
document.body.appendChild(renderer.domElement);
camera = new THREE.Camera();
// Create volume material to render to 3dTexture
computeMaterial = new THREE.RawShaderMaterial({
vertexShader: SIMPLE_VERTEX,
fragmentShader: COMPUTE_FRAGMENT,
uniforms: {
uZCoord: { value: 0.0 },
},
depthTest: false,
});
computeMaterial.type = "VolumeShader";
computeMesh = new THREE.Mesh(new THREE.PlaneGeometry(2, 2), computeMaterial);
// Left material, reads Data3DTexture
readDataMaterial = new THREE.RawShaderMaterial({
vertexShader: SIMPLE_VERTEX,
fragmentShader: READ_FRAGMENT,
uniforms: {
uZCoord: { value: 0.0 },
tDiffuse: { value: create3dDataTexture() }
},
depthTest: false
});
readDataMaterial.type = "DebugShader";
readDataMesh = new THREE.Mesh(new THREE.PlaneGeometry(2, 2), readDataMaterial);
// Right material, reads 3DRenderTarget texture
target3d = new THREE.WebGL3DRenderTarget(SIDE, SIDE, SIDE);
target3d.depthBuffer = false;
read3dTargetMaterial = readDataMaterial.clone();
read3dTargetMaterial.uniforms.tDiffuse.value = target3d.texture;
read3dTargetMesh = new THREE.Mesh(new THREE.PlaneGeometry(2, 2), read3dTargetMaterial);
}
// Creates 3D texture with RGB gradient along the XYZ axes
function create3dDataTexture() {
const d = new Uint8Array( SIDE * SIDE * SIDE * 4 );
window.dat = d;
let i4 = 0;
for ( let z = 0; z < SIDE; z ++ ) {
for ( let y = 0; y < SIDE; y ++ ) {
for ( let x = 0; x < SIDE; x ++ ) {
d[i4 + 0] = (x / SIDE) * 255;
d[i4 + 1] = (y / SIDE) * 255;
d[i4 + 2] = (z / SIDE) * 255;
d[i4 + 3] = 1.0;
i4 += 4;
}
}
}
const texture = new THREE.Data3DTexture( d, SIDE, SIDE, SIDE );
texture.format = THREE.RGBAFormat;
texture.minFilter = THREE.NearestFilter;
texture.magFilter = THREE.NearestFilter;
texture.unpackAlignment = 1;
texture.needsUpdate = true;
return texture;
}
function onResize() {
renderer.setSize(window.innerWidth, window.innerHeight);
}
function animate(t) {
// Render volume shader to target3d buffer
renderer.setRenderTarget(target3d);
renderer.render(computeMesh, camera);
// Update z texture coordinate along sine wave
renderer.autoClear = false;
const sinZCoord = Math.sin(t / 1000);
readDataMaterial.uniforms.uZCoord.value = sinZCoord;
read3dTargetMaterial.uniforms.uZCoord.value = sinZCoord;
textField.innerText = sinZCoord.toFixed(4);
// Render data3D texture to screen
renderer.setViewport(0, window.innerHeight - SIDE*4, SIDE * 4, SIDE * 4);
renderer.setRenderTarget(null);
renderer.render(readDataMesh, camera);
// Render 3dRenderTarget texture to screen
renderer.setViewport(SIDE * 4, window.innerHeight - SIDE*4, SIDE * 4, SIDE * 4);
renderer.setRenderTarget(null);
renderer.render(read3dTargetMesh, camera);
renderer.autoClear = true;
requestAnimationFrame(animate);
}
init();
window.addEventListener("resize", onResize);
requestAnimationFrame(animate);
html, body {
width: 100%;
height: 100%;
margin: 0;
overflow: hidden;
}
#title {
position: absolute;
top: 0;
left: 0;
color: white;
font-family: sans-serif;
}
h3 {
margin: 2px;
}
<div id="title">
<h3>texDepth</h3><h3 id="textField"></h3>
</div>
<script src="https://threejs.org/build/three.js"></script>
<script>
/////////////////////////////////////////////////////////////////////////////////////
// Compute frag shader
// It should output an RGB gradient in the XYZ axes to the 3DRenderTarget
// But gl_FragCoord.z is always 0.5 and gl_FragDepth is always 0.0
const COMPUTE_FRAGMENT = `#version 300 es
precision mediump sampler3D;
precision highp float;
precision highp int;
layout(location = 0) out highp vec4 pc_fragColor;
void main() {
vec3 color = vec3(gl_FragCoord.x / 64.0, gl_FragCoord.y / 64.0, gl_FragDepth);
pc_fragColor.rgb = color;
pc_fragColor.a = 1.0;
}`;
/////////////////////////////////////////////////////////////////////////////////////
// Reader frag shader
// Samples the 3D texture along uv.x, uv.y, and uniform Z coordinate
const READ_FRAGMENT = `#version 300 es
precision mediump sampler3D;
precision highp float;
precision highp int;
layout(location = 0) out highp vec4 pc_fragColor;
in vec2 vUv;
uniform sampler3D tDiffuse;
uniform float uZCoord;
void main() {
vec3 UV3 = vec3(vUv.x, vUv.y, uZCoord);
vec3 diffuse = texture(tDiffuse, UV3).rgb;
pc_fragColor.rgb = diffuse;
pc_fragColor.a = 1.0;
}
`;
/////////////////////////////////////////////////////////////////////////////////////
// Simple vertex shader,
// renders a full-screen quad with UVs without any transformations
const SIMPLE_VERTEX = `#version 300 es
precision highp float;
precision highp int;
in vec2 uv;
in vec3 position;
out vec2 vUv;
void main() {
vUv = uv;
gl_Position = vec4(position, 1.0);
}`;
/////////////////////////////////////////////////////////////////////////////////////
</script>
On the left side, I’m sampling a Data3DTexture that I created via JavaScript. The blue channel smoothly transitions as I move up and down the depth axis, as expected.
On the right side I’m sampling the WebGL3DRenderTarget texture rendered in the frag shader I showed above. As you can see, it's only rendering to the texture when the depth coordinate is 0.0. All the other “slices” are black.
How can I render my computations to all 64 depth slices? I'm using Three.js for this demo, but I could use any other library like TWGL or vanilla WebGL to achieve the same results.

It doesn't look documented but you can use a second argument to setRenderTarget to set the "layer" of the 3d render target to render to. Here are the changes to make:
When rendering to the render target perform a new render for every layer:
for ( let i = 0; i < SIDE; i ++ ) {
// set the uZCoord color value for the shader
computeMesh.material.uniforms.uZCoord.value = i / (SIDE - 1);
// Set the 3d target "layer" to render into before rendering
renderer.setRenderTarget(target3d, i);
renderer.render(computeMesh, camera);
}
Use the "uZCoord" uniform in the compute fragment shader:
uniform float uZCoord;
void main() {
vec3 color = vec3(gl_FragCoord.x / 64.0, gl_FragCoord.y / 64.0, uZCoord);
pc_fragColor.rgb = color;
pc_fragColor.a = 1.0;
}
Other than that I don't believe theres a way to render to the full 3d volume of the target in a single draw call. This three.js example shows how to do this but with render target arrays, as well:
https://threejs.org/examples/?q=array#webgl2_rendertarget_texture2darray
var renderer, target3d, camera;
const SIDE = 64;
var computeMaterial, computeMesh;
var readDataMaterial, readDataMesh,
read3dTargetMaterial, read3dTargetMesh;
var textField = document.querySelector("#textField");
function init() {
// Three.js boilerplate
renderer = new THREE.WebGLRenderer({antialias: true});
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setClearColor(new THREE.Color(0x000000), 1.0);
document.body.appendChild(renderer.domElement);
camera = new THREE.Camera();
// Create volume material to render to 3dTexture
computeMaterial = new THREE.RawShaderMaterial({
vertexShader: SIMPLE_VERTEX,
fragmentShader: COMPUTE_FRAGMENT,
uniforms: {
uZCoord: { value: 0.0 },
},
depthTest: false,
});
computeMaterial.type = "VolumeShader";
computeMesh = new THREE.Mesh(new THREE.PlaneGeometry(2, 2), computeMaterial);
// Left material, reads Data3DTexture
readDataMaterial = new THREE.RawShaderMaterial({
vertexShader: SIMPLE_VERTEX,
fragmentShader: READ_FRAGMENT,
uniforms: {
uZCoord: { value: 0.0 },
tDiffuse: { value: create3dDataTexture() }
},
depthTest: false
});
readDataMaterial.type = "DebugShader";
readDataMesh = new THREE.Mesh(new THREE.PlaneGeometry(2, 2), readDataMaterial);
// Right material, reads 3DRenderTarget texture
target3d = new THREE.WebGL3DRenderTarget(SIDE, SIDE, SIDE);
target3d.depthBuffer = false;
read3dTargetMaterial = readDataMaterial.clone();
read3dTargetMaterial.uniforms.tDiffuse.value = target3d.texture;
read3dTargetMesh = new THREE.Mesh(new THREE.PlaneGeometry(2, 2), read3dTargetMaterial);
}
// Creates 3D texture with RGB gradient along the XYZ axes
function create3dDataTexture() {
const d = new Uint8Array( SIDE * SIDE * SIDE * 4 );
window.dat = d;
let i4 = 0;
for ( let z = 0; z < SIDE; z ++ ) {
for ( let y = 0; y < SIDE; y ++ ) {
for ( let x = 0; x < SIDE; x ++ ) {
d[i4 + 0] = (x / SIDE) * 255;
d[i4 + 1] = (y / SIDE) * 255;
d[i4 + 2] = (z / SIDE) * 255;
d[i4 + 3] = 1.0;
i4 += 4;
}
}
}
const texture = new THREE.Data3DTexture( d, SIDE, SIDE, SIDE );
texture.format = THREE.RGBAFormat;
texture.minFilter = THREE.NearestFilter;
texture.magFilter = THREE.NearestFilter;
texture.unpackAlignment = 1;
texture.needsUpdate = true;
return texture;
}
function onResize() {
renderer.setSize(window.innerWidth, window.innerHeight);
}
function animate(t) {
for ( let i = 0; i < SIDE; i ++ ) {
// Render volume shader to target3d buffer
computeMesh.material.uniforms.uZCoord.value = i / ( SIDE - 1 );
renderer.setRenderTarget(target3d, i);
renderer.render(computeMesh, camera);
}
// Update z texture coordinate along sine wave
renderer.autoClear = false;
const sinZCoord = Math.sin(t / 1000);
readDataMaterial.uniforms.uZCoord.value = sinZCoord;
read3dTargetMaterial.uniforms.uZCoord.value = sinZCoord;
textField.innerText = sinZCoord.toFixed(4);
// Render data3D texture to screen
renderer.setViewport(0, window.innerHeight - SIDE*4, SIDE * 4, SIDE * 4);
renderer.setRenderTarget(null);
renderer.render(readDataMesh, camera);
// Render 3dRenderTarget texture to screen
renderer.setViewport(SIDE * 4, window.innerHeight - SIDE*4, SIDE * 4, SIDE * 4);
renderer.setRenderTarget(null);
renderer.render(read3dTargetMesh, camera);
renderer.autoClear = true;
requestAnimationFrame(animate);
}
init();
window.addEventListener("resize", onResize);
requestAnimationFrame(animate);
html, body {
width: 100%;
height: 100%;
margin: 0;
overflow: hidden;
}
#title {
position: absolute;
top: 0;
left: 0;
color: white;
font-family: sans-serif;
}
h3 {
margin: 2px;
}
<div id="title">
<h3>texDepth</h3><h3 id="textField"></h3>
</div>
<script src="https://threejs.org/build/three.js"></script>
<script>
/////////////////////////////////////////////////////////////////////////////////////
// Compute frag shader
// It should output an RGB gradient in the XYZ axes to the 3DRenderTarget
// But gl_FragCoord.z is always 0.5 and gl_FragDepth is always 0.0
const COMPUTE_FRAGMENT = `#version 300 es
precision mediump sampler3D;
precision highp float;
precision highp int;
layout(location = 0) out highp vec4 pc_fragColor;
uniform float uZCoord;
void main() {
vec3 color = vec3(gl_FragCoord.x / 64.0, gl_FragCoord.y / 64.0, uZCoord);
pc_fragColor.rgb = color;
pc_fragColor.a = 1.0;
}`;
/////////////////////////////////////////////////////////////////////////////////////
// Reader frag shader
// Samples the 3D texture along uv.x, uv.y, and uniform Z coordinate
const READ_FRAGMENT = `#version 300 es
precision mediump sampler3D;
precision highp float;
precision highp int;
layout(location = 0) out highp vec4 pc_fragColor;
in vec2 vUv;
uniform sampler3D tDiffuse;
uniform float uZCoord;
void main() {
vec3 UV3 = vec3(vUv.x, vUv.y, uZCoord);
vec3 diffuse = texture(tDiffuse, UV3).rgb;
pc_fragColor.rgb = diffuse;
pc_fragColor.a = 1.0;
}
`;
/////////////////////////////////////////////////////////////////////////////////////
// Simple vertex shader,
// renders a full-screen quad with UVs without any transformations
const SIMPLE_VERTEX = `#version 300 es
precision highp float;
precision highp int;
in vec2 uv;
in vec3 position;
out vec2 vUv;
void main() {
vUv = uv;
gl_Position = vec4(position, 1.0);
}`;
/////////////////////////////////////////////////////////////////////////////////////
</script>

Related

Decompose a GLSL mat4 to original RTS values within vertex shader to calculate a View UV Offset

I need to get the rotation differences between the model and the camera, convert the values to radians/degrees, and pass it to the fragment shader.
For that I need to decompose and the Model rotation matrix and maybe the camera view matrix as well. I cannot seem to find a way to decompose mechanism suitable within a shader.
The rotation details goes into fragment shader to calculate uv offset.
original_rotation + viewing_angles to calculate a final sprite-like offset of the following texture and shown as billboards.
Ultimately UV should offset downwards (ex:H3 to A3) looking from down, upwards looking from up (ex:A3 to H3), left to right looking and viceversa looking from sides (ex: D1 to D8 and viceversa).
const vertex_shader = `
precision highp float;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
attribute vec3 position;
attribute vec2 uv;
attribute mat4 instanceMatrix;
attribute float index;
attribute float texture_index;
uniform vec2 rows_cols;
uniform vec3 camera_location;
varying float vTexIndex;
varying vec2 vUv;
varying vec4 transformed_normal;
float normal_to_orbit(vec3 rotation_vector, vec3 view_vector){
rotation_vector = normalize(rotation_vector);
view_vector = normalize(view_vector);
vec3 x_direction = vec3(1.0,0,0);
vec3 y_direction = vec3(0,1.0,0);
vec3 z_direction = vec3(0,0,1.0);
float rotation_x_length = dot(rotation_vector, x_direction);
float rotation_y_length = dot(rotation_vector, y_direction);
float rotation_z_length = dot(rotation_vector, z_direction);
float view_x_length = dot(view_vector, x_direction);
float view_y_length = dot(view_vector, y_direction);
float view_z_length = dot(view_vector, z_direction);
//TOP
float top_rotation = degrees(atan(rotation_x_length, rotation_z_length));
float top_view = degrees(atan(view_x_length, view_z_length));
float top_final = top_view-top_rotation;
float top_idx = floor(top_final/(360.0/rows_cols.x));
//FRONT
float front_rotation = degrees(atan(rotation_x_length, rotation_z_length));
float front_view = degrees(atan(view_x_length, view_z_length));
float front_final = front_view-front_rotation;
float front_idx = floor(front_final/(360.0/rows_cols.y));
return abs((front_idx*rows_cols.x)+top_idx);
}
vec3 extractEulerAngleXYZ(mat4 mat) {
vec3 rotangles = vec3(0,0,0);
rotangles.x = atan(mat[2].z, -mat[1].z);
float cosYangle = sqrt(pow(mat[0].x, 2.0) + pow(mat[0].y, 2.0));
rotangles.y = atan(cosYangle, mat[0].z);
float sinXangle = sin(rotangles.x);
float cosXangle = cos(rotangles.x);
rotangles.z = atan(cosXangle * mat[1].y + sinXangle * mat[2].y, cosXangle * mat[1].x + sinXangle * mat[2].x);
return rotangles;
}
float view_index(vec3 position, mat4 mv_matrix, mat4 rot_matrix){
vec4 posInView = mv_matrix * vec4(0.0, 0.0, 0.0, 1.0);
// posInView /= posInView[3];
vec3 VinView = normalize(-posInView.xyz); // (0, 0, 0) - posInView
// vec4 NinView = normalize(rot_matrix * vec4(0.0, 0.0, 1.0, 1.0));
// float NdotV = dot(NinView, VinView);
vec4 view_normal = rot_matrix * vec4(VinView.xyz, 1.0);
float view_x_length = dot(view_normal.xyz, vec3(1.0,0,0));
float view_y_length = dot(view_normal.xyz, vec3(0,1.0,0));
float view_z_length = dot(view_normal.xyz, vec3(0,0,1.0));
// float radians = atan(-view_x_length, -view_z_length);
float radians = atan(view_x_length, view_z_length);
// float angle = radians/PI*180.0 + 180.0;
float angle = degrees(radians);
if (radians < 0.0) { angle += 360.0; }
if (0.0<=angle && angle<=360.0){
return floor(angle/(360.0/rows_cols.x));
}
return 0.0;
}
void main(){
vec4 original_normal = vec4(0.0, 0.0, 1.0, 1.0);
// transformed_normal = modelViewMatrix * instanceMatrix * original_normal;
vec3 rotangles = extractEulerAngleXYZ(modelViewMatrix * instanceMatrix);
// transformed_normal = vec4(rotangles.xyz, 1.0);
transformed_normal = vec4(camera_location.xyz, 1.0);
vec4 v = (modelViewMatrix* instanceMatrix* vec4(0.0, 0.0, 0.0, 1.0)) + vec4(position.x, position.y, 0.0, 0.0) * vec4(1.0, 1.0, 1.0, 1.0);
vec4 model_center = (modelViewMatrix* instanceMatrix* vec4(0.0, 0.0, 0.0, 1.0));
vec4 model_normal = (modelViewMatrix* instanceMatrix* vec4(0.0, 0.0, 1.0, 1.0));
vec4 cam_loc = vec4(camera_location.xyz, 1.0);
vec4 view_vector = normalize((cam_loc-model_center));
//float findex = normal_to_orbit(model_normal.xyz, view_vector.xyz);
float findex = view_index(position, base_matrix, combined_rot);
vTexIndex = texture_index;
vUv = vec2(mod(findex,rows_cols.x)/rows_cols.x, floor(findex/rows_cols.x)/rows_cols.y) + (uv / rows_cols);
//vUv = vec2(mod(index,rows_cols.x)/rows_cols.x, floor(index/rows_cols.x)/rows_cols.y) + (uv / rows_cols);
gl_Position = projectionMatrix * v;
// gl_Position = projectionMatrix * modelViewMatrix * instanceMatrix * vec4(position, 1.0);
}
`
const fragment_shader = (texture_count) => {
var fragShader = `
precision highp float;
uniform sampler2D textures[${texture_count}];
varying float vTexIndex;
varying vec2 vUv;
varying vec4 transformed_normal;
void main() {
vec4 finalColor;
`;
for (var i = 0; i < texture_count; i++) {
if (i == 0) {
fragShader += `if (vTexIndex < ${i}.5) {
finalColor = texture2D(textures[${i}], vUv);
}
`
} else {
fragShader += `else if (vTexIndex < ${i}.5) {
finalColor = texture2D(textures[${i}], vUv);
}
`
}
}
//fragShader += `gl_FragColor = finalColor * transformed_normal; }`;
fragShader += `gl_FragColor = finalColor; }`;
// fragShader += `gl_FragColor = startColor * finalColor; }`;
// int index = int(v_TexIndex+0.5); //https://stackoverflow.com/questions/60896915/texture-slot-not-getting-picked-properly-in-shader-issue
//console.log('frag shader: ', fragShader)
return fragShader;
}
function reset_instance_positions() {
const dummy = new THREE.Object3D();
const offset = 500*4
for (var i = 0; i < max_instances; i++) {
dummy.position.set(offset-(Math.floor(i % 8)*500), offset-(Math.floor(i / 8)*500), 0);
dummy.updateMatrix();
mesh.setMatrixAt(i, dummy.matrix);
}
mesh.instanceMatrix.needsUpdate = true;
}
function setup_geometry() {
const geometry = new THREE.InstancedBufferGeometry().copy(new THREE.PlaneBufferGeometry(400, 400));
const index = new Float32Array(max_instances * 1); // index
for (let i = 0; i < max_instances; i++) {
index[i] = (i % max_instances) * 1.0 /* index[i] = 0.0 */
}
geometry.setAttribute("index", new THREE.InstancedBufferAttribute(index, 1));
const texture_index = new Float32Array(max_instances * 1); // texture_index
const max_maps = 1
for (let i = 0; i < max_instances; i++) {
texture_index[i] = (Math.floor(i / max_instances) % max_maps) * 1.0 /* index[i] = 0.0 */
}
geometry.setAttribute("texture_index", new THREE.InstancedBufferAttribute(texture_index, 1));
const textures = [texture]
const grid_xy = new THREE.Vector2(8, 8)
mesh = new THREE.InstancedMesh(geometry,
new THREE.RawShaderMaterial({
uniforms: {
textures: {
type: 'tv',
value: textures
},
rows_cols: {
value: new THREE.Vector2(grid_xy.x * 1.0, grid_xy.y * 1.0)
},
camera_location: {
value: camera.position
}
},
vertexShader: vertex_shader,
fragmentShader: fragment_shader(textures.length),
side: THREE.DoubleSide,
// transparent: true,
}), max_instances);
scene.add(mesh);
reset_instance_positions()
}
var camera, scene, mesh, renderer;
const max_instances = 64
function init() {
camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight,1, 10000 );
camera.position.z = 1024;
scene = new THREE.Scene();
scene.background = new THREE.Color(0xffffff);
setup_geometry()
var canvas = document.createElement('canvas');
var context = canvas.getContext('webgl2');
renderer = new THREE.WebGLRenderer({
canvas: canvas,
context: context
});
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
window.addEventListener('resize', onWindowResize, false);
var controls = new THREE.OrbitControls(camera, renderer.domElement);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
function animate() {
requestAnimationFrame(animate);
renderer.render(scene, camera);
}
var dataurl = "https://i.stack.imgur.com/accaU.png"
var texture;
var imageElement = document.createElement('img');
imageElement.onload = function(e) {
texture = new THREE.Texture(this);
texture.needsUpdate = true;
init();
animate();
};
imageElement.src = dataurl;
JSFiddle of work so far
So You got 4x4 transform matrix M used on xy plane QUAD and want to map its 4 corners (p0,p1,p2,p3) to your texture with "repaeat" like manner (crossing border from left/right/up/down will return right/left/down/up) based on direction of Z axis of the matrix.
You face 2 problems...
M rotation is 3 DOF and you want just 2 DOF (yaw,pitch) so if roll present the result might be questionable
if texture crosses borders you need to handle this in GLSL to avoid seems
so either do this in geometry shader and divide the quad to more if needed or use enlarged texture where you have the needed overlaps ...
Now if I did not miss something the conversion is like this:
const float pi=3.1415926535897932384626433832795;
vec3 d = normalize(z axis from M);
vec2 dd = normalize(d.xy);
u = atan2(dd.y,dd.x);
v = acos(d.z);
u = (u+pi)/(2.0*pi);
v = v/pi
The z axis extraction is just simple copy of 3th column/row (depends on your notation) from your matrix 'M' or transforming (1,0,0,0) by it. For more info see:
Understanding 4x4 homogenous transform matrices
In case of overlapped texture you need to add also this:
const float ov = 1.0/8.0; // overlap size
u = ov + (u/(ov+ov+1.0));
v = ov + (v/(ov+ov+1.0));
And the texture would look like:
In case your quads cover more than 1/8 of your original texture you need to enlarge the overlap ...
Now to handle the corners of QUAD instead of just axis you could translate the quad by distance l in Z+ direction in mesh local coordinates, apply the M on them and use those 4 points as directions to compute u,v in vertex shader. The l will affect how much of the texture area is used for quad ... This approach might even handle roll but did not test any of this yet...
After implementing it my fears was well grounded as any 2 euler angles affect each other so the result is OK on most of the directions but in edge cases the stuff get mirrored and or jumped in one or both axises probably due to area coverage difference between 3 DOF and 2 DOF (unless I made a bug in my code or the math was not computed correctly in vertex which happened to me before due to bug in drivers)
If you going for azimut/elevation that should be fine as its 2 DOF too the equation above shoul dwork for them too +/- some range conversion if needed.

Shader for solid color for each triangle of plane geometry

I have been learning webgl from webglfundamentals where I came across a simple shader example where you can paint triangles with solid color. Here is link to tutorial and original demo.
I tried to create same effect in three js using plane geometry but I can't manage achieve solid color shader. When I use almost same setup in Three js, I get more like gradient effect. What am I doing wrong here? (I am noticing that my shader isn't consistent either as it renders differently on refresh) Also, is there place to learn shaders specifically for three js?
var vShader = `
precision mediump float;
precision mediump int;
attribute vec4 a_color;
varying vec4 vColor;
void main() {
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
vColor = a_color;
}`;
var fShader = ` precision mediump float;
precision mediump int;
varying vec4 vColor;
void main() {
vec4 color = vec4( vColor );
gl_FragColor = vColor;
}`;
var row = 1;
var col = 1;
var w = 600;
var h = 400;
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(45, w / h, 0.1, 100);
renderer = new THREE.WebGLRenderer();
camera.position.z = 5;
var viewSize = getViewSize(camera);
document.body.appendChild(renderer.domElement);
renderer.setSize(w, h);
var geometry = new THREE.PlaneBufferGeometry(viewSize.width, viewSize.height, col, row);
var color = new THREE.Color();
const blossomPalette = [0xff0000, 0xff0000, 0xff0000, 0x0000ff, 0x0000ff, 0x0000ff];
var colors = new Float32Array(4 * 2 * 3 * col * row);
for (let i = 0; i < 4; i++) {
color.setHex(blossomPalette[Math.floor(Math.random() * blossomPalette.length)]);
color.toArray(colors, i * 3);
}
geometry.setAttribute('a_color', new THREE.BufferAttribute(colors, 4, false));
var material = new THREE.ShaderMaterial({
vertexShader: vShader,
fragmentShader: fShader,
transparent: true,
blending: THREE.AdditiveBlending,
depthTest: false,
vertexColors: true,
flatShading: true
});
var plane = new THREE.Mesh(geometry, material);
scene.add(plane);
function animate() {
renderer.render(scene, camera);
requestAnimationFrame(animate)
}
animate();
function getViewSize(camera) {
var fovInRadians = (camera.fov * Math.PI) / 180;
var height = Math.abs(camera.position.z * Math.tan(fovInRadians / 2) * 2);
return {
width: height * camera.aspect,
height: height
}
}
<script src="https://threejsfundamentals.org/threejs/resources/threejs/r122/build/three.min.js"></script>
The PlaneBufferGeometry in three.js is using indexed vertices to share vertices so there are only 4 vertices and then 6 indices to use those 4 vertices to make 2 triangles. That means you can't give each triangle different solid colors because they share 2 vertices and a vertex can only have 1 color.
Further, the code is choosing random colors for each vertex so even if you you used 6 vertices so that the 2 triangles didn't share any you still wouldn't get the result you linked to, instead you'd get this result which is further down the page on the same tutorial.
Finally the code is only generating 3 floats per color so you need to set the number of components for the color attribute to 3 instead of 4
If you want to repeat the webgl sample you'll need to provide your own 6 vertices.
var vShader = `
precision mediump float;
precision mediump int;
attribute vec4 a_color;
varying vec4 vColor;
void main() {
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
vColor = a_color;
}`;
var fShader = ` precision mediump float;
precision mediump int;
varying vec4 vColor;
void main() {
vec4 color = vec4( vColor );
gl_FragColor = vColor;
}`;
var row = 1;
var col = 1;
var w = 600;
var h = 400;
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(45, w / h, 0.1, 100);
renderer = new THREE.WebGLRenderer();
camera.position.z = 5;
var viewSize = getViewSize(camera);
document.body.appendChild(renderer.domElement);
renderer.setSize(w, h);
var geometry = new THREE.BufferGeometry();
var x = viewSize.width / 2;
var y = viewSize.height / 2;
var positions = new Float32Array([
-x, -y, 0,
x, -y, 0,
-x, y, 0,
-x, y, 0,
x, -y, 0,
x, y, 0,
]);
geometry.setAttribute('position', new THREE.BufferAttribute(positions, 3, false));
var color = new THREE.Color();
const blossomPalette = [
0xff0000, 0xff0000, 0xff0000,
0x0000ff, 0x0000ff, 0x0000ff,
];
var colors = new Float32Array(2 * 3 * 3 * col * row);
for (let i = 0; i < 6; i++) {
color.setHex(blossomPalette[i]);
color.toArray(colors, i * 3);
}
geometry.setAttribute('a_color', new THREE.BufferAttribute(colors, 3, false));
var material = new THREE.ShaderMaterial({
vertexShader: vShader,
fragmentShader: fShader,
transparent: true,
blending: THREE.AdditiveBlending,
depthTest: false,
vertexColors: true,
flatShading: true
});
var plane = new THREE.Mesh(geometry, material);
scene.add(plane);
function animate() {
renderer.render(scene, camera);
requestAnimationFrame(animate)
}
animate();
function getViewSize(camera) {
var fovInRadians = (camera.fov * Math.PI) / 180;
var height = Math.abs(camera.position.z * Math.tan(fovInRadians / 2) * 2);
return {
width: height * camera.aspect,
height: height
}
}
<script src="https://threejsfundamentals.org/threejs/resources/threejs/r122/build/three.min.js"></script>
See this article

three.js How to have 2 textures on one object, the second being transparent according to a grayscale png?

I need to have an object with a texture I can change the color of, and another one on top of the first one for colored details.
Here are some pictures to describe this :
The result I need : Fig1 and 2.
Fig3 is the texture of the background.
Fig4 is the alpha texture of the details.
I know how to do this with lightwave for example, it's called texture layers. But I can't figure it out in threejs.
Thank you.
You can use THREE.ShaderMaterial() to mix those textures, using .r channel for the value of mixing from the texture with the pattern:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(60, innerWidth / innerHeight, 1, 100);
camera.position.set(0, 0, 10);
var renderer = new THREE.WebGLRenderer();
renderer.setSize(innerWidth, innerHeight);
document.body.appendChild(renderer.domElement);
var c1 = document.createElement("canvas");
c1.width = 128;
c1.height = 128;
var ctx1 = c1.getContext("2d");
ctx1.fillStyle = "gray";
ctx1.fillRect(0, 0, 128, 128);
var tex1 = new THREE.CanvasTexture(c1); // texture of a solid color
var c2 = document.createElement("canvas");
c2.width = 128;
c2.height = 128;
var ctx2 = c2.getContext("2d");
ctx2.fillStyle = "black";
ctx2.fillRect(0, 0, 128, 128);
ctx2.strokeStyle = "white";
ctx2.moveTo(50, -20);
ctx2.lineTo(100, 148);
ctx2.lineWidth = 20;
ctx2.stroke();
var tex2 = new THREE.CanvasTexture(c2); // texture with a pattern
var planeGeom = new THREE.PlaneBufferGeometry(10, 10);
var planeMat = new THREE.ShaderMaterial({
uniforms: {
tex1: {
value: tex1
},
tex2: {
value: tex2
},
color: {
value: new THREE.Color() //color of the pattern
}
},
vertexShader: `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
`,
fragmentShader: `
uniform sampler2D tex1;
uniform sampler2D tex2;
uniform vec3 color;
varying vec2 vUv;
void main() {
vec3 c1 = texture2D(tex1, vUv).rgb;
float m = texture2D(tex2, vUv).r;
vec3 col = mix(c1, color, m);
gl_FragColor = vec4(col, 1);
}
`
});
var plane = new THREE.Mesh(planeGeom, planeMat);
scene.add(plane);
var clock = new THREE.Clock();
renderer.setAnimationLoop(() => {
let t = (clock.getElapsedTime() * 0.125) % 1;
planeMat.uniforms.color.value.setHSL(t, 1, 0.5);
renderer.render(scene, camera);
});
body {
overflow: hidden;
margin: 0;
}
<script src="https://threejs.org/build/three.min.js"></script>

Three js postProcessing solution with transparency

I discovered that solution to create postporcessing effects with three.js :
https://medium.com/#luruke/simple-postprocessing-in-three-js-91936ecadfb7
(made by Luigi De Rosa)
It's a great way to do it. Unfortunately I can't manage to add transparency in my final renderer. Should I add a transparency component inside my postprocessing fragment shader ?
const fragmentShader = `precision highp float;
uniform sampler2D uScene;
uniform vec2 uResolution;
uniform float uTime;
void main() {
vec2 uv = gl_FragCoord.xy / uResolution.xy;
vec3 color = vec3(uv, 1.0);
//simple distortion effect
uv.y += sin(uv.x*30.0+uTime*10.0)/40.0;
uv.x -= sin(uv.y*10.0-uTime)/40.0;
color = texture2D(uScene, uv).rgb;
gl_FragColor = vec4(color, 1.0);
}
`;
Thank you
EDIT 1 :
I added the attribute transparent:true to the RawShaderMaterial.
I changed the format of the new THREE.WebGLRenderTarget by THREE.RGBAFormat instead of THREE.RGBFormat.
I also added those lines at the end of my fragment shader :
gl_FragColor = vec4(color, 1.0);
vec4 tex = texture2D( uScene, uv );
if(tex.a < 0.0) {
gl_FragColor.a = 1.0;
}
But I still doesn't see through my canvas
EDIT 2 :
Here's a snippet with the postProcessing class
let renderer, camera, scene, W = window.innerWidth, H = window.innerHeight, geometry, material, mesh;
initWebgl();
function initWebgl(){
renderer = new THREE.WebGLRenderer( { alpha: true, antialias: true } );
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( W, H );
document.querySelector('.innerCanvas').appendChild( renderer.domElement );
camera = new THREE.OrthographicCamera(-W/H/2, W/H/2, 1/2, -1/2, -0.1, 0.1);
scene = new THREE.Scene();
geometry = new THREE.PlaneBufferGeometry(0.5, 0.5);
material = new THREE.MeshNormalMaterial();
mesh = new THREE.Mesh( geometry , material );
scene.add(mesh);
}
function rafP(){
requestAnimationFrame(rafP);
// renderer.render(scene, camera);
post.render(scene, camera);
}
const vertexShader = `precision highp float;
attribute vec2 position;
void main() {
// Look ma! no projection matrix multiplication,
// because we pass the values directly in clip space coordinates.
gl_Position = vec4(position, 1.0, 1.0);
}`;
const fragmentShader = `precision highp float;
uniform sampler2D uScene;
uniform vec2 uResolution;
uniform float uTime;
void main() {
vec2 uv = gl_FragCoord.xy / uResolution.xy;
vec3 color = vec3(uv, 1.0);
uv.y += sin(uv.x*20.0)/10.0;
color = texture2D(uScene, uv).rgb;
gl_FragColor = vec4(color, 1.0);
vec4 tex = texture2D( uScene, uv );
// if(tex.a - percent < 0.0) {
if(tex.a < 0.0) {
gl_FragColor.a = 1.0;
//or without transparent = true use
// discard;
}
}`;
//PostProcessing
class PostFX {
constructor(renderer) {
this.renderer = renderer;
this.scene = new THREE.Scene();
// three.js for .render() wants a camera, even if we're not using it :(
this.dummyCamera = new THREE.OrthographicCamera();
this.geometry = new THREE.BufferGeometry();
// Triangle expressed in clip space coordinates
const vertices = new Float32Array([
-1.0, -1.0,
3.0, -1.0,
-1.0, 3.0
]);
this.geometry.addAttribute('position', new THREE.BufferAttribute(vertices, 2));
this.resolution = new THREE.Vector2();
this.renderer.getDrawingBufferSize(this.resolution);
this.target = new THREE.WebGLRenderTarget(this.resolution.x, this.resolution.y, {
format: THREE.RGBAFormat, //THREE.RGBFormat
stencilBuffer: false,
depthBuffer: true
});
this.material = new THREE.RawShaderMaterial({
fragmentShader,
vertexShader,
uniforms: {
uScene: { value: this.target.texture },
uResolution: { value: this.resolution }
},
transparent:true
});
// TODO: handle the resize -> update uResolution uniform and this.target.setSize()
this.triangle = new THREE.Mesh(this.geometry, this.material);
// Our triangle will be always on screen, so avoid frustum culling checking
this.triangle.frustumCulled = false;
this.scene.add(this.triangle);
}
render(scene, camera) {
this.renderer.setRenderTarget(this.target);
this.renderer.render(scene, camera);
this.renderer.setRenderTarget(null);
this.renderer.render(this.scene, this.dummyCamera);
console.log(this.renderer);
}
}
post = new PostFX(renderer);
rafP();
body{
margin:0;
padding:0;
background:#00F;
}
.innerCanvas{
position:fixed;
top:0;
left:0;
width:100%;
height:100%;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/105/three.js"></script>
<div class="innerCanvas"></div>
On the Alpha channel, 0 means fully transparent and 1 means fully opaque.
The only thing you need, in this case, is to pass gl_FragColor the result from your texture sample. You don't even need to worry about its value.
gl_FragColor = texture2D(uScene, uv);
JSFiddle

FBO Particles with Cumulative Movement

Link to thread threejs discourse: https://discourse.threejs.org/t/fbo-particles-with-cumulative-movement/7221
This is difficult for me to explain because of my limited knowledge on the subject, but I'm gonna do my best..
At this point, I have a basic FBO particle system in place that works. The following is how it's set up:
var FBO = function( exports ){
var scene, orthoCamera, rtt;
exports.init = function( width, height, renderer, simulationMaterial, renderMaterial ){
var gl = renderer.getContext();
//1 we need FLOAT Textures to store positions
//https://github.com/KhronosGroup/WebGL/blob/master/sdk/tests/conformance/extensions/oes-texture-float.html
if (!gl.getExtension("OES_texture_float")){
throw new Error( "float textures not supported" );
}
//2 we need to access textures from within the vertex shader
//https://github.com/KhronosGroup/WebGL/blob/90ceaac0c4546b1aad634a6a5c4d2dfae9f4d124/conformance-suites/1.0.0/extra/webgl-info.html
if( gl.getParameter(gl.MAX_VERTEX_TEXTURE_IMAGE_UNITS) == 0 ) {
throw new Error( "vertex shader cannot read textures" );
}
//3 rtt setup
scene = new THREE.Scene();
orthoCamera = new THREE.OrthographicCamera(-1,1,1,-1,1/Math.pow( 2, 53 ),1 );
//4 create a target texture
var options = {
minFilter: THREE.NearestFilter,//important as we want to sample square pixels
magFilter: THREE.NearestFilter,//
format: THREE.RGBAFormat,//180407 changed to RGBAFormat
type:THREE.FloatType//important as we need precise coordinates (not ints)
};
rtt = new THREE.WebGLRenderTarget( width,height, options);
//5 the simulation:
//create a bi-unit quadrilateral and uses the simulation material to update the Float Texture
var geom = new THREE.BufferGeometry();
geom.addAttribute( 'position', new THREE.BufferAttribute( new Float32Array([ -1,-1,0, 1,-1,0, 1,1,0, -1,-1, 0, 1, 1, 0, -1,1,0 ]), 3 ) );
geom.addAttribute( 'uv', new THREE.BufferAttribute( new Float32Array([ 0,1, 1,1, 1,0, 0,1, 1,0, 0,0 ]), 2 ) );
scene.add( new THREE.Mesh( geom, simulationMaterial ) );
//6 the particles:
//create a vertex buffer of size width * height with normalized coordinates
var l = (width * height );
var vertices = new Float32Array( l * 3 );
for ( var i = 0; i < l; i++ ) {
var i3 = i * 3;
vertices[ i3 ] = ( i % width ) / width ;
vertices[ i3 + 1 ] = ( i / width ) / height;
}
//create the particles geometry
var geometry = new THREE.BufferGeometry();
geometry.addAttribute( 'position', new THREE.BufferAttribute( vertices, 3 ) );
//the rendermaterial is used to render the particles
exports.particles = new THREE.Points( geometry, renderMaterial );
exports.particles.frustumCulled = false;
exports.renderer = renderer;
};
//7 update loop
exports.update = function(){
//1 update the simulation and render the result in a target texture
// exports.renderer.render( scene, orthoCamera, rtt, true );
exports.renderer.setRenderTarget( rtt );
exports.renderer.render( scene, orthoCamera );
exports.renderer.setRenderTarget( null );
//2 use the result of the swap as the new position for the particles' renderer
// had to add .texture on the end of rtt for r103
exports.particles.material.uniforms.positions.value = rtt.texture;
};
return exports;
}({});
The following are the shaders it uses:
<script type="x-shader/x-vertex" id="simulation_vs">
//vertex shader
varying vec2 vUv;
void main() {
vUv = vec2(uv.x, uv.y);
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script type="x-shader/x-fragment" id="simulation_fs">
//fragment Shader
uniform sampler2D positions;//DATA Texture containing original positions
varying vec2 vUv;
void main() {
//basic simulation: displays the particles in place.
vec3 pos = texture2D( positions, vUv ).rgb;
// we can move the particle here
gl_FragColor = vec4( pos,1.0 );
}
</script>
<script type="x-shader/x-vertex" id="render_vs">
//vertex shader
uniform sampler2D positions;//RenderTarget containing the transformed positions
uniform float pointSize;//size
void main() {
//the mesh is a nomrliazed square so the uvs = the xy positions of the vertices
vec3 pos = texture2D( positions, position.xy ).xyz;
//pos now contains a 3D position in space, we can use it as a regular vertex
//regular projection of our position
gl_Position = projectionMatrix * modelViewMatrix * vec4( pos, 1.0 );
//sets the point size
gl_PointSize = pointSize;
}
</script>
<script type="x-shader/x-fragment" id="render_fs">
//fragment shader
void main()
{
gl_FragColor = vec4( vec3( 1. ), .25 );
}
</script>
I understand that I would move the particles in the "simulation_fs", but if I move a particle in that shader, if I try to do something like this,
pos.x += 1.0;
it will still only shift it one unit from the original texture position. I want the movement to be cumulative.
Would using a 2nd set of simulation shaders allow me to move the particles in a cumulative way? Is that a practical solution?
For cumulative movement, you need to use uniforms:
Look into passing a uniform named time to your vertex shader. Then you can update the time once per frame, and you can use that to animate your vertex positions. For example:
position.x = 2.0 * time; // Increment linearly
position.x = sin(time); // Sin wave back-forth animation
Without a changing variable, your vertex animations will be static from one frame to the next.
I needed to accomplish something like this, and set up an absolutely minimal example that I can tweak in the future. You'll see the positional changes are cumulative.
The following was simplified from a wonderful discussion of FBO's by Nicolas Barradeau (a webgl wizard):
// specify the container where we'll render the scene
var elem = document.querySelector('body'),
elemW = elem.clientWidth,
elemH = elem.clientHeight
// generate a scene object
var scene = new THREE.Scene();
// generate a camera
var camera = new THREE.PerspectiveCamera(75, elemW/elemH, 0.001, 100);
// generate a renderer
var renderer = new THREE.WebGLRenderer({antialias: true, alpha: true});
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(elemW, elemH);
elem.appendChild(renderer.domElement);
// generate controls
var controls = new THREE.TrackballControls(camera, renderer.domElement);
// position camera and controls
camera.position.set(0.5, 0.5, -5);
controls.target = new THREE.Vector3(0.5, 0.5, 0);
/**
* FBO
**/
// verify browser agent supports "frame buffer object" features
gl = renderer.getContext();
if (!gl.getExtension('OES_texture_float') ||
gl.getParameter(gl.MAX_VERTEX_TEXTURE_IMAGE_UNITS) == 0) {
alert(' * Cannot create FBO :(');
}
// set initial positions of `w*h` particles
var w = h = 256,
i = 0,
data = new Float32Array(w*h*3);
for (var x=0; x<w; x++) {
for (var y=0; y<h; y++) {
data[i++] = x/w;
data[i++] = y/h;
data[i++] = 0;
}
}
// feed those positions into a data texture
var dataTex = new THREE.DataTexture(data, w, h, THREE.RGBFormat, THREE.FloatType);
dataTex.minFilter = THREE.NearestFilter;
dataTex.magFilter = THREE.NearestFilter;
dataTex.needsUpdate = true;
// add the data texture with positions to a material for the simulation
var simMaterial = new THREE.RawShaderMaterial({
uniforms: { posTex: { type: 't', value: dataTex }, },
vertexShader: document.querySelector('#sim-vs').textContent,
fragmentShader: document.querySelector('#sim-fs').textContent,
});
// delete dataTex; it isn't used after initializing point positions
delete dataTex;
THREE.FBO = function(w, simMat) {
this.scene = new THREE.Scene();
this.camera = new THREE.OrthographicCamera(-w/2, w/2, w/2, -w/2, -1, 1);
this.scene.add(new THREE.Mesh(new THREE.PlaneGeometry(w, w), simMat));
};
// create a scene where we'll render the positional attributes
var fbo = new THREE.FBO(w, simMaterial);
// create render targets a + b to which the simulation will be rendered
var renderTargetA = new THREE.WebGLRenderTarget(w, h, {
wrapS: THREE.RepeatWrapping,
wrapT: THREE.RepeatWrapping,
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter,
format: THREE.RGBFormat,
type: THREE.FloatType,
stencilBuffer: false,
});
// a second render target lets us store input + output positional states
renderTargetB = renderTargetA.clone();
// render the positions to the render targets
renderer.render(fbo.scene, fbo.camera, renderTargetA, false);
renderer.render(fbo.scene, fbo.camera, renderTargetB, false);
// store the uv attrs; each is x,y and identifies a given point's
// position data within the positional texture; must be scaled 0:1!
var geo = new THREE.BufferGeometry(),
arr = new Float32Array(w*h*3);
for (var i=0; i<arr.length; i++) {
arr[i++] = (i%w)/w;
arr[i++] = Math.floor(i/w)/h;
arr[i++] = 0;
}
geo.addAttribute('position', new THREE.BufferAttribute(arr, 3, true))
// create material the user sees
var material = new THREE.RawShaderMaterial({
uniforms: {
posMap: { type: 't', value: null }, // `posMap` is set each render
},
vertexShader: document.querySelector('#ui-vert').textContent,
fragmentShader: document.querySelector('#ui-frag').textContent,
transparent: true,
});
// add the points the user sees to the scene
var mesh = new THREE.Points(geo, material);
scene.add(mesh);
function render() {
// at the start of the render block, A is one frame behind B
var oldA = renderTargetA; // store A, the penultimate state
renderTargetA = renderTargetB; // advance A to the updated state
renderTargetB = oldA; // set B to the penultimate state
// pass the updated positional values to the simulation
simMaterial.uniforms.posTex.value = renderTargetA.texture;
// run a frame and store the new positional values in renderTargetB
renderer.render(fbo.scene, fbo.camera, renderTargetB, false);
// pass the new positional values to the scene users see
material.uniforms.posMap.value = renderTargetB.texture;
// render the scene users see as normal
renderer.render(scene, camera);
controls.update();
requestAnimationFrame(render);
};
render();
html, body { width: 100%; height: 100%; background: #000; }
body { margin: 0; overflow: hidden; }
canvas { width: 100%; height: 100%; }
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/101/three.min.js"></script>
<script src="https://threejs.org/examples/js/controls/TrackballControls.js"></script>
<!-- The simulation shaders update positional attributes -->
<script id='sim-vs' type='x-shader/x-vert'>
precision mediump float;
uniform mat4 projectionMatrix;
uniform mat4 modelViewMatrix;
attribute vec2 uv; // x,y offsets of each point in texture
attribute vec3 position;
varying vec2 vUv;
void main() {
vUv = vec2(uv.x, 1.0 - uv.y);
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script id='sim-fs' type='x-shader/x-frag'>
precision mediump float;
uniform sampler2D posTex;
varying vec2 vUv;
void main() {
// read the supplied x,y,z vert positions
vec3 pos = texture2D(posTex, vUv).xyz;
// update the positional attributes here!
pos.x += cos(pos.y) / 100.0;
pos.y += tan(pos.x) / 100.0;
// render the new positional attributes
gl_FragColor = vec4(pos, 1.0);
}
</script>
<!-- The ui shaders render what the user sees -->
<script id='ui-vert' type='x-shader/x-vert'>
precision mediump float;
uniform sampler2D posMap; // contains positional data read from sim-fs
uniform mat4 projectionMatrix;
uniform mat4 modelViewMatrix;
attribute vec2 position;
void main() {
// read this particle's position, which is stored as a pixel color
vec3 pos = texture2D(posMap, position.xy).xyz;
// project this particle
vec4 mvPosition = modelViewMatrix * vec4(pos, 1.0);
gl_Position = projectionMatrix * mvPosition;
// set the size of each particle
gl_PointSize = 0.3 / -mvPosition.z;
}
</script>
<script id='ui-frag' type='x-shader/x-frag'>
precision mediump float;
void main() {
gl_FragColor = vec4(0.0, 0.5, 1.5, 1.0);
}
</script>

Resources