Apologies for the vague question but I wasn't sure how to phrase this. I'm trying to write some THREE.js/GLSL code that produces a circular gradient (for some SDF stuff). With the code below I would expect to see the gradient on the plane, but the plane remains white and nothing else renders on it. Can anyone tell me where I'm going wrong?
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>My first three.js app</title>
<style>
body { margin: 0; }
canvas { width: 100%; height: 100% }
</style>
</head>
<body>
<script type="x-shader/x-vertex" id="sdfVS">
varying vec2 vUv; // pass the uv coordinates of each pixel to the frag shader
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script type="x-shader/x-fragment" id="sdfFS">
precision mediump float;
uniform vec2 u_resolution;
varying vec2 vUv;
float circle(vec2 pos, float radius)
{
return distance(pos, vec2(radius));
}
void main()
{
vec2 pos = (gl_FragCoord.xy / vUv) * 2.0 - 1.0;
float circle1 = circle(pos, 0.5);
vec3 color = vec3(circle1);
gl_FragColor = vec4(color, 1.0);
}
</script>
<script src="../../js/three.min.js"></script>
<script>
var scene, camera, renderer, aspect, geometry, material, plane;
var container;
var frustumSize = 2;
init();
animate();
function init() {
container = document.createElement( 'div' );
document.body.appendChild( container );
scene = new THREE.Scene();
scene.background = new THREE.Color(0x0000ff);
aspect = window.innerWidth / window.innerHeight;
camera = new THREE.OrthographicCamera( 0.5 * frustumSize * aspect / - 2, 0.5 * frustumSize * aspect / 2, frustumSize / 2, frustumSize / - 2, 0.1, 1 );
cameraOrthoHelper = new THREE.CameraHelper( camera );
scene.add( cameraOrthoHelper );
var width = 1;
var height = 1;
geometry = new THREE.PlaneGeometry(width, height);
material = new THREE.ShaderMaterial( {
vertexShader: document.getElementById('sdfVS').textContent,
fragmentShader: document.getElementById('sdfFS').textContent,
side: THREE.DoubleSide
} );
plane = new THREE.Mesh( geometry, material );
plane.rotation.x = 0;
plane.rotation.y = THREE.Math.degToRad( -90 );
plane.rotation.z = 0;
scene.add( plane )
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
container.appendChild( renderer.domElement );
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
camera.position.x = -1;
camera.position.y = 0;
camera.position.z = 0;
camera.lookAt( scene.position );
camera.updateMatrixWorld();
renderer.render( scene, camera );
}
</script>
</body>
</html>
Can't get this line:
vec2 pos = (gl_FragCoord.xy / vUv) * 2.0 - 1.0;
Seems like you want to calculate uv coordinates, but if so, then you already have uvs (passed in vUv).
You can do the thing this way, as an option:
var scene, camera, renderer, aspect, geometry, material, plane;
var container;
var clock = new THREE.Clock();
var frustumSize = 2;
init();
animate();
function init() {
container = document.createElement('div');
document.body.appendChild(container);
scene = new THREE.Scene();
scene.background = new THREE.Color(0x0000ff);
aspect = window.innerWidth / window.innerHeight;
camera = new THREE.OrthographicCamera(0.5 * frustumSize * aspect / -2, 0.5 * frustumSize * aspect / 2, frustumSize / 2, frustumSize / -2, 0.1, 1);
cameraOrthoHelper = new THREE.CameraHelper(camera);
scene.add(cameraOrthoHelper);
var width = 1;
var height = 1;
geometry = new THREE.PlaneGeometry(width, height);
material = new THREE.ShaderMaterial({
uniforms: {time: {value: 0}},
vertexShader: document.getElementById('sdfVS').textContent,
fragmentShader: document.getElementById('sdfFS').textContent,
side: THREE.DoubleSide
});
plane = new THREE.Mesh(geometry, material);
plane.rotation.x = 0;
plane.rotation.y = THREE.Math.degToRad(-90);
plane.rotation.z = 0;
scene.add(plane)
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
container.appendChild(renderer.domElement);
window.addEventListener('resize', onWindowResize, false);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
function animate() {
requestAnimationFrame(animate);
render();
}
function render() {
camera.position.x = -1;
camera.position.y = 0;
camera.position.z = 0;
camera.lookAt(scene.position);
camera.updateMatrixWorld();
var t = clock.getElapsedTime();
material.uniforms.time.value = t;
renderer.render(scene, camera);
}
body {
overflow: hidden;
margin: 0;
}
<script src="https://threejs.org/build/three.min.js"></script>
<script type="x-shader/x-vertex" id="sdfVS">
varying vec2 vUv; // pass the uv coordinates of each pixel to the frag shader
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script type="x-shader/x-fragment" id="sdfFS">
precision mediump float;
uniform float time;
uniform vec2 u_resolution;
varying vec2 vUv;
float circle(vec2 uv, vec2 pos, float radius) {
return smoothstep(radius, 0., length(uv - pos));
}
void main()
{
vec2 uv = vUv * 2. - 1.;
vec2 pos = vec2(cos(time), sin(time));
float circle1 = circle(uv, pos, 1.0);
vec3 color = vec3(circle1);
gl_FragColor = vec4(color, 1.0);
}
</script>
Related
the title make it looks easy but I'm struggling to get this pie chart shader on my 3d model. For this I'm using three js. here is my code till now:
index.html:
<!DOCTYPE html>
<html>
<head>
<meta charset='utf-8'>
<meta http-equiv='X-UA-Compatible' content='IE=edge'>
<title>Site Prof Matheus</title>
<meta name='viewport' content='width=device-width, initial-scale=1'>
<link rel='stylesheet' type='text/css' media='screen' href='./styles/main.css'>
<script type='module' src='./src/main.js'></script>
</head>
<body>
</body>
</html>
main.js:
import * as THREE from 'https://cdn.skypack.dev/three'
import { OrbitControls } from 'https://cdn.skypack.dev/three-stdlib/controls/OrbitControls'
import { GLTFLoader } from 'https://cdn.skypack.dev/three-stdlib/loaders/GLTFLoader'
import { vertexShader } from '../shaders/vertex.glsl.js'
import { fragmentShader } from '../shaders/fragment.glsl.js'
const scene = new THREE.Scene();
const camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000);
const loader = new GLTFLoader();
const renderer = new THREE.WebGLRenderer({ antialias: true });
renderer.setClearColor(0x002653, 0.5)
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
renderer.setPixelRatio(window.devicePixelRatio);
const r_material = new THREE.ShaderMaterial({ //Roulette material
uniforms: {
iTime: { value: 1.0 },
resolution: { value: new THREE.Vector2 }
},
vertexShader,
fragmentShader
})
loader.load(
'../roulette.glb',
function (gltf) {
gltf.scene.traverse((o) => {
if (o.isMesh) o.material = r_material;
});
scene.add(gltf.scene);
},
function () { },
function (err) {
console.log('error: ' + err);
}
)
camera.position.z = 5;
const controls = new OrbitControls(camera, renderer.domElement);
controls.enableZoom = true;
controls.enableDamping = true;
function animate() {
requestAnimationFrame(animate);
controls.update();
renderer.render(scene, camera);
};
animate();
vertex.glsl.js:
const vertexShader = /* glsl */`
void main()
{
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
`
// uniform vec3 iResolution;
// uniform int SEGMENTCOUNT;
export { vertexShader };
fragment.glsl.js:
const fragmentShader = /* glsl */`
void main() {
gl_FragColor = vec4(0, 1, 0, 1);
gl_Position
}
`
export { fragmentShader };
and here is my roulette.gbl model
the intended result is to have a shader with colors that I choose, all the parts equal, the colors can repeat and the shader should covers the whole mesh, less the bottom. intended result
PS. i see my object looks a flat plain on the image, i guess just need to add some proper light, here is my object geometry just for sake of curiosity: my mesh geometry
PSS. some antialiasing on the pie chart shader would be very welcome
This is how you can do it, modifying a material with .onBeforeCompile():
body{
overflow: hidden;
margin: 0;
}
<script type="module">
import * as THREE from "https://cdn.skypack.dev/three#0.136.0";
import {
OrbitControls
} from "https://cdn.skypack.dev/three#0.136.0/examples/jsm/controls/OrbitControls";
let scene = new THREE.Scene();
let camera = new THREE.PerspectiveCamera(60, innerWidth / innerHeight, 1, 2000);
camera.position.set(0, 1, 1).setLength(12);
let renderer = new THREE.WebGLRenderer({
antialias: true
});
renderer.setSize(innerWidth, innerHeight);
document.body.appendChild(renderer.domElement);
window.addEventListener("resize", onWindowResize);
let controls = new OrbitControls(camera, renderer.domElement);
scene.add(new THREE.GridHelper());
let light = new THREE.DirectionalLight(0xffffff, 0.5);
light.position.setScalar(1);
scene.add(light, new THREE.AmbientLight(0xffffff, 0.5));
let path = new THREE.Path();
path.moveTo(0, -1);
path.lineTo(4, -1);
path.absarc(4, -0.5, 0.5, Math.PI * 1.5, 0);
path.absarc(4, 0.5, 0.5, 0, Math.PI * 0.5);
path.lineTo(0, -0.5);
let g = new THREE.LatheGeometry(path.getPoints(50), 72);
let m = new THREE.MeshLambertMaterial({
color: 0x00ff7f,
//wireframe: true,
onBeforeCompile: shader => {
shader.vertexShader = `
varying vec3 vPos;
${shader.vertexShader}
`.replace(
`#include <begin_vertex>`,
`#include <begin_vertex>
vPos = position;
`
);
//console.log(shader.vertexShader);
shader.fragmentShader = `
#define ss(a, b, c) smoothstep(a, b, c)
varying vec3 vPos;
${shader.fragmentShader}
`.replace(
`vec4 diffuseColor = vec4( diffuse, opacity );`,
`
vec3 col = diffuse;
int N = 37;
float a = atan(vPos.x,-vPos.z)+PI;
float r = PI2/float(N);
float cId = floor(a/r);
vec3 br = mod(cId, 2.) == 0. ? vec3(0) : vec3(1, 0, 0); // black / red
br = cId == 0. ? vec3(0, 0.75, 0) : br; // green
float d = length(vPos.xz);
float fw = length(fwidth(vPos.xz));
col = mix(col, br, ss(3. - fw, 3., d) - ss(4., 4. + fw, d));
col = mix(diffuse, col, clamp(sign(vPos.y), 0., 1.));
vec4 diffuseColor = vec4( col, opacity );
`
);
//console.log(shader.fragmentShader);
}
})
let o = new THREE.Mesh(g, m);
o.position.y = 0.5;
o.rotation.y = Math.PI;
scene.add(o);
renderer.setAnimationLoop(() => {
renderer.render(scene, camera);
});
function onWindowResize() {
camera.aspect = innerWidth / innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(innerWidth, innerHeight);
}
</script>
I am trying to rotate a box using vertex shader but its getting sheared for reason I can't figure out, following is the code, please help me. Here is the fiddle and my code of vertex shader
uniform float delta;
void main()
{
vec4 modelViewPosition = modelViewMatrix * vec4(position, 1.0);
gl_Position = (projectionMatrix * modelViewPosition);
float new_x = gl_Position.x*cos(delta) - gl_Position.y*sin(delta);
float new_y = gl_Position.y*cos(delta) + gl_Position.x*sin(delta);
gl_Position.x = new_x;
gl_Position.y = new_y;
}
https://jsfiddle.net/co4vbhye/
If the viewport is rectangular , then this is compensated by the projection matrix.
This means that last transformation which has to be applied to the vertex coordinates is the projection matrix:
clip_position = projection * view * model * position
You've to apply the rotation to the vertex coordinate position. After that transform the result by the view matrix and projection matrix:
uniform float delta;
void main()
{
vec3 p = position.xyz;
float new_x = p.x*cos(delta) - p.y*sin(delta);
float new_y = p.y*cos(delta) + p.x*sin(delta);
gl_Position = projectionMatrix * modelViewMatrix * vec4(new_x, new_y, p.z, 1.0);
}
Furthermore, the aspect ration which is set to the projection matrix (PerspectiveCamera), has to match the aspect ration of the viewport (canvas):
either
//RENDERER
renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
//CAMERA
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.01, 10000);
or
//RENDERER
renderer = new THREE.WebGLRenderer();
renderer.setSize(CANVAS_WIDTH, CANVAS_HEIGHT);
//CAMERA
camera = new THREE.PerspectiveCamera(75, CANVAS_WIDTH / CANVAS_HEIGHT, 0.01, 10000);
See the example:
var renderer,
scene,
camera,
container = document.getElementById('Canvas_3');
//RENDERER
renderer = new THREE.WebGLRenderer();
//renderer.setClearColor(0xffffff);
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
container.appendChild(renderer.domElement);
//CAMERA
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.01, 10000);
//SCENE
scene = new THREE.Scene();
var customUniforms = {
delta: {
value: 0
}
};
var material = new THREE.ShaderMaterial({
uniforms: customUniforms,
vertexShader: document.getElementById('vertexShader').textContent,
fragmentShader: document.getElementById('fragmentShader').textContent
});
var geometry = new THREE.BoxBufferGeometry(1, 1, 1, 0, 0, 0);
var mesh = new THREE.Mesh(geometry, material);
mesh.position.z = -5;
mesh.position.x = 0;
scene.add(mesh);
window.onresize = function() {
renderer.setSize(window.innerWidth, window.innerHeight);
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
}
//RENDER LOOP
render();
var delta = 0;
function render() {
delta += 0.006;
if (delta > 1.57) delta = 0;
mesh.material.uniforms.delta.value = delta;
renderer.render(scene, camera);
requestAnimationFrame(render);
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/109/three.min.js"></script>
<div id="Canvas_3"></div>
<script type="x-shader/x-vertex" id="vertexShader">
uniform float delta;
void main()
{
vec3 p = position.xyz;
float new_x = p.x*cos(delta) - p.y*sin(delta);
float new_y = p.y*cos(delta) + p.x*sin(delta);
gl_Position = projectionMatrix * modelViewMatrix * vec4(new_x, new_y, p.z, 1.0);
}
</script>
<script type="x-shader/x-fragment" id="fragmentShader">
uniform float delta;
void main() {
gl_FragColor = vec4(delta, 0.0, 1.0-delta, 1.0);
}
</script>
I have two objects that I need to render in two different textures using WebGLRenderTarget. After mapping both textures to a plane using shaders and then adding that plane to the main scene, I am having issues drawing the closer object.
I have created a jsfiddle to show my issue: https://jsfiddle.net/11qb7ay7/82/
The real magic occurs here:
void main() {
vec4 color = texture2D(tex_sphere, vUv);
vec4 color_cube = texture2D(tex_cube, vUv);
gl_FragColor = vec4(color.rgb * color_cube.rgb, 1.0);
}
The sphere is placed in front of the cube relative to the camera. How can I draw the sphere pixels instead of the cubes when they overlap?
To be clear, I am trying to find a way to compute the distance from camera of each pixel and render the closer one first
In general if tex_sphere has an alpha channel the you can mix the colors by the alpha channel:
void main()
{
vec4 color = texture2D(tex_sphere, vUv);
vec4 color_cube = texture2D(tex_cube, vUv);
vec3 mixCol = mix(color_cube.rgb, color.rgb, color.a);
gl_FragColor = vec4(mixCol.rgb, 1.0);
}
If the tex_sphere has a black background, which should be omitted, the you have to check if the color of tex_sphere is not black:
void main()
{
vec4 color = texture2D(tex_sphere, vUv);
vec4 color_cube = texture2D(tex_cube, vUv);
vec3 test = step(1.0/512.0, color.rgb);
float a = max(max(test.r, test.g), test.b);
vec3 mixCol = mix(color_cube.rgb, color.rgb, a);
gl_FragColor = vec4(mixCol.rgb, 1.0);
}
Note, mix interpolates between 2 values according to a floating point interpolation value a in the range [0.0, 1.0]. If the a is equal 0.0 then the 1st value is returned and if the a is equal 1.0 then the 2nd value is returned.
step tests whether a value is less than an edge value. If it is less then 0.0 is returned, else 1.0 is returned.
To get a black background you have to set a black "clear" color when you render the sphere to the render target:
function render() {
controls.update();
renderer.setClearColor(0x00);
renderer.render(sphereScene, camera, renderTargets.sphere, true);
renderer.setClearColor(0xccccff);
renderer.render(cubeScene, camera, renderTargets.cube, true);
renderer.setClearColor(0xccccff);
renderer.render(scene, camera);
}
If you want to use an alpha channel, the you have to set setClearAlpha before you render to the render target:
function render() {
controls.update();
renderer.setClearAlpha(0);
renderer.render(sphereScene, camera, renderTargets.sphere, true);
renderer.setClearAlpha(1);
renderer.render(cubeScene, camera, renderTargets.cube, true);
renderer.render(scene, camera);
}
var scene, renderer, camera, controls;
var cubeScene, sphereScene;
var renderTargets;
init();
animate();
function init() {
scene = new THREE.Scene();
cubeScene = new THREE.Scene();
sphereScene = new THREE.Scene();
renderer = new THREE.WebGLRenderer( { antialias: false, alpha: true } );
renderer.setClearColor(0xccccff);
camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 0.1, 4000 );
camera.position.set(0, 0, 200);
renderer.setSize( window.innerWidth, window.innerHeight );
controls = new THREE.OrbitControls(camera, renderer.domElement);
camera.lookAt( scene.position );
var light = new THREE.HemisphereLight( 0xffffff, 0x444444 );
scene.add( light );
container = document.createElement('div');
document.body.appendChild(container);
container.appendChild(renderer.domElement);
initObjects();
initRenderTargets(window.innerWidth, window.innerHeight);
}
function onResize() {
renderer.setSize( window.innerWidth, window.innerHeight );
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderTargets.sphere.setSize( window.innerWidth, window.innerHeight );
renderTargets.cube.setSize( window.innerWidth, window.innerHeight );
}
function initObjects() {
var cGeometry = new THREE.BoxGeometry( 100, 100, 100 );
var cMaterial = new THREE.MeshBasicMaterial( {color: 0x00ff00} );
var cube = new THREE.Mesh( cGeometry, cMaterial );
cube.position.z = -210;
cube.position.y = 100;
var sGeometry = new THREE.SphereGeometry( 75, 32, 32 );
var sMaterial = new THREE.MeshBasicMaterial({
color: 0xff0000
});
var sphere = new THREE.Mesh( sGeometry, sMaterial );
sphere.position.z = -100;
sphereScene.add( sphere );
cubeScene.add( cube );
}
function initRenderTargets(width, height){
renderTargets = createRenderTargets(width, height);
var uniforms = {
"tex_cube": { type: "t", value: renderTargets.cube.texture },
"tex_sphere": { type: "t", value: renderTargets.sphere.texture }
}
material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: document.getElementById('vs_rt').textContent,
fragmentShader: document.getElementById('fs_rt').textContent
});
var plane = new THREE.PlaneGeometry(width, height);
quad = new THREE.Mesh(plane, material);
scene.add(quad);
}
function createRenderTargets(width, height) {
var parameters = {
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter,
};
return {
cube: new THREE.WebGLRenderTarget( width, height, parameters ),
sphere: new THREE.WebGLRenderTarget( width, height, parameters )
};
}
function animate() {
requestAnimationFrame(animate);
render();
}
//------------------------------------------
// Main rendering
//------------------------------------------
function render() {
controls.update();
renderer.setClearAlpha(0);
renderer.render(sphereScene, camera, renderTargets.sphere, true);
renderer.setClearAlpha(1);
renderer.render(cubeScene, camera, renderTargets.cube, true);
renderer.render(scene, camera);
}
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/three.js/89/three.min.js"></script>
<script type="text/javascript" src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
<script id="vs_rt" type="x-shader/x-vertex">
uniform sampler2D tex_cube;
uniform sampler2D tex_sphere;
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script id="fs_rt" type="x-shader/x-fragment">
uniform sampler2D tex_cube;
uniform sampler2D tex_sphere;
varying vec2 vUv;
void main() {
vec4 color = texture2D(tex_sphere, vUv);
vec4 color_cube = texture2D(tex_cube, vUv);
vec3 mixCol = mix(color_cube.rgb, color.rgb, color.a);
gl_FragColor = vec4(mixCol.rgb, 1.0);
}
</script>
I have setup this minimal test case, which you can easily see the moire patterns produced by undersampling the oscilating red colour using a custom fragment shader (jsfiddle).
What is the general technique for removing such patterns using GLSL? I assume it involves the derivatives extension, but I've never quite understood how to implement it. I basically have to do anti-aliasing, I think?
var canvas = document.getElementById('canvas');
var scene = new THREE.Scene();
var renderer = new THREE.WebGLRenderer({canvas: canvas, antialias: true});
var camera = new THREE.PerspectiveCamera(75, canvas.clientWidth / canvas.clientWidth, 1, 1000);
var geometry = new THREE.SphereGeometry(50, 50, 50);
var material = new THREE.ShaderMaterial({
vertexShader: document.getElementById('vertex-shader').textContent,
fragmentShader: document.getElementById('fragment-shader').textContent
});
var sphere = new THREE.Mesh(geometry, material);
scene.add(sphere);
camera.position.z = 100;
var period = 30;
var clock = new THREE.Clock();
render();
function render() {
requestAnimationFrame(render);
if (canvas.width !== canvas.clientWidth || canvas.height !== canvas.clientHeight) {
renderer.setSize(canvas.clientWidth, canvas.clientHeight, false);
camera.aspect = canvas.clientWidth / canvas.clientHeight;
camera.updateProjectionMatrix();
}
sphere.rotation.y -= clock.getDelta() * 2 * Math.PI / period;
renderer.render(scene, camera);
}
html, body, #canvas {
margin: 0;
padding: 0;
width: 100%;
height: 100%;
overflow: hidden;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r73/three.min.js"></script>
<canvas id="canvas"></canvas>
<script id="vertex-shader" type="x-shader/x-vertex">
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script id="fragment-shader" type="x-shader/x-fragment">
#define M_TAU 6.2831853071795864769252867665590
varying vec2 vUv;
void main() {
float w = sin(500.0 * M_TAU * vUv.x) / 2.0 + 0.5;
vec3 color = vec3(w, 0.0, 0.0);
gl_FragColor = vec4(color, 1.0);
}
</script>
Update: I've tried to implement super-sampling, not sure if I have implemented it correctly but it doesn't seem to help too much.
Unfortunately, the moire pattern here is a result of the high-contrast lines approaching the Nyquist Frequency. In other words, there's no good way to have a 1- or 2-pixel-wide high-contrast line smoothly shift to the next pixel over, without either introducing such artifacts, or blurring the lines to be indistinguishable.
You mentioned the derivatives extension, and indeed that extension can be used to figure out how quickly your UVs are changing in screen space, and thus, figure out how much blurring is needed to sort of sweep this problem under the rug. In the modified version of your own example below, I attempt to use fwidth to turn the sphere red where the noise gets bad. Try playing with some of the floats that are defined to constants here, see what you can find.
var canvas = document.getElementById('canvas');
var scene = new THREE.Scene();
var renderer = new THREE.WebGLRenderer({canvas: canvas, antialias: true});
var camera = new THREE.PerspectiveCamera(75, canvas.clientWidth / canvas.clientWidth, 1, 1000);
var geometry = new THREE.SphereGeometry(50, 50, 50);
var material = new THREE.ShaderMaterial({
vertexShader: document.getElementById('vertex-shader').textContent,
fragmentShader: document.getElementById('fragment-shader').textContent
});
var sphere = new THREE.Mesh(geometry, material);
scene.add(sphere);
camera.position.z = 100;
var period = 30;
var clock = new THREE.Clock();
render();
function render() {
requestAnimationFrame(render);
if (canvas.width !== canvas.clientWidth || canvas.height !== canvas.clientHeight) {
renderer.setSize(canvas.clientWidth, canvas.clientHeight, false);
camera.aspect = canvas.clientWidth / canvas.clientHeight;
camera.updateProjectionMatrix();
}
sphere.rotation.y -= clock.getDelta() * 2 * Math.PI / period;
renderer.render(scene, camera);
}
html, body, #canvas {
margin: 0;
padding: 0;
width: 100%;
height: 100%;
overflow: hidden;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r73/three.min.js"></script>
<canvas id="canvas"></canvas>
<script id="vertex-shader" type="x-shader/x-vertex">
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script id="fragment-shader" type="x-shader/x-fragment">
#extension GL_OES_standard_derivatives : enable
#define M_TAU 6.2831853071795864769252867665590
varying vec2 vUv;
void main() {
float linecount = 200.0;
float thickness = 0.0;
float blendregion = 2.8;
// Loosely based on https://github.com/AnalyticalGraphicsInc/cesium/blob/1.16/Source/Shaders/Materials/GridMaterial.glsl#L17-L34
float scaledWidth = fract(linecount * vUv.s);
scaledWidth = abs(scaledWidth - floor(scaledWidth + 0.5));
vec2 dF = fwidth(vUv) * linecount;
float value = 1.0 - smoothstep(dF.s * thickness, dF.s * (thickness + blendregion), scaledWidth);
gl_FragColor = vec4(value, 0.0, 0.0, 1.0);
}
</script>
I'm currently playing with this great library, but I have some difficulties with the EffectComposer.
I can't send a texture to a postprocess shader introduced by a ShaderPass.
I think this is a bug... or I'm doing something stupid (needUpDate, warp,... ??)
(r54, W7, Nv9700mGT, FF 17.0.1 and Chrome 24.0.1312.52)
I used the "webgl_postprocessing.html" example to reproduce the phenomenon just by adding these shaders :
<script id="vertexShaderBasic" type="x-shader/x-vertex">
varying vec2 glTexCoord;
void main() {
glTexCoord = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script id="fragmentShaderBlender" type="x-shader/x-fragment">
varying vec2 glTexCoord;
uniform sampler2D sprite1;
uniform sampler2D previousRender;
void main() {
vec3 color = texture2D( previousRender, glTexCoord ).rgb;
color += texture2D( sprite1, glTexCoord ).rgb;
gl_FragColor.rgb = color;
gl_FragColor.a = 1.0;
}
</script>
this at the begining of the main script to be sure the sprite is loaded :
var composer2;
function start() {
init();
animate();
}
var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", null, start );
this in the composer field, after :
composer.addPass( new THREE.RenderPass( scene, camera ) );
composer2 = new THREE.EffectComposer( renderer );
var uniforms1 = {
sprite1: { type: "t", value: sprite1 }, // <- something wrong here
previousRender: { type: "t", value: null }
};
var blenderShader = {
uniforms: uniforms1,
vertexShader: $( 'vertexShaderBasic' ).textContent,
fragmentShader: $( 'fragmentShaderBlender' ).textContent
};
// link with the previous render
blenderShader.uniforms.previousRender.value = composer.renderTarget2;
// the custom shader
var blenderPass = new THREE.ShaderPass( blenderShader );
blenderPass.renderToScreen = true;
composer2.addPass( blenderPass );
I also coment this, beacause it's not relevent any more :
//effect.renderToScreen = true;
and I add this at the end :
composer2.render();
The link between the two passes work well, but the sprite never appear on the EffectComposer quad that cover the screen...
Thanks and sorry for my english.
EDIT to recap :
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgl - postprocessing</title>
<meta charset="utf-8">
<style>
body {
margin: 0px;
background-color: #000000;
overflow: hidden;
}
</style>
</head>
<body>
<script src="../build/three.min.js"></script>
<script src="js/shaders/CopyShader.js"></script>
<script src="js/shaders/DotScreenShader.js"></script>
<script src="js/shaders/RGBShiftShader.js"></script>
<script src="js/postprocessing/EffectComposer.js"></script>
<script src="js/postprocessing/RenderPass.js"></script>
<script src="js/postprocessing/MaskPass.js"></script>
<script src="js/postprocessing/ShaderPass.js"></script>
<script id="vertexShaderBasic" type="x-shader/x-vertex">
varying vec2 glTexCoord;
void main() {
glTexCoord = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
<script id="fragmentShaderBlender" type="x-shader/x-fragment">
varying vec2 glTexCoord;
uniform sampler2D sprite1;
uniform sampler2D previousRender;
void main() {
vec3 color = texture2D( previousRender, glTexCoord ).rgb;
color += texture2D( sprite1, glTexCoord ).rgb;
gl_FragColor.rgb = color;
gl_FragColor.a = 1.0;
}
</script>
<script>
var camera, scene, renderer, composer;
var composer2;
var object, light;
function start() {
init();
animate();
}
var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", null, start );
//var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", new THREE.UVMapping(), start ); // change anything
function init() {
renderer = new THREE.WebGLRenderer();
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
//
camera = new THREE.PerspectiveCamera( 70, window.innerWidth / window.innerHeight, 1, 1000 );
camera.position.z = 400;
scene = new THREE.Scene();
scene.fog = new THREE.Fog( 0x000000, 1, 1000 );
object = new THREE.Object3D();
scene.add( object );
var geometry = new THREE.SphereGeometry( 1, 4, 4 );
var material = new THREE.MeshPhongMaterial( { color: 0xffffff, shading: THREE.FlatShading } );
for ( var i = 0; i < 100; i ++ ) {
var mesh = new THREE.Mesh( geometry, material );
mesh.position.set( Math.random() - 0.5, Math.random() - 0.5, Math.random() - 0.5 ).normalize();
mesh.position.multiplyScalar( Math.random() * 400 );
mesh.rotation.set( Math.random() * 2, Math.random() * 2, Math.random() * 2 );
mesh.scale.x = mesh.scale.y = mesh.scale.z = Math.random() * 50;
object.add( mesh );
}
scene.add( new THREE.AmbientLight( 0x222222 ) );
light = new THREE.DirectionalLight( 0xffffff );
light.position.set( 1, 1, 1 );
scene.add( light );
// postprocessing
composer = new THREE.EffectComposer( renderer );
composer.addPass( new THREE.RenderPass( scene, camera ) );
/////////////////////////////////////
/////////////////////////////////////
composer2 = new THREE.EffectComposer( renderer );
var uniforms1 = {
sprite1: { type: "t", value: sprite1 },
previousRender: { type: "t", value: null }
};
//uniforms1.sprite1.value.wrapS = uniforms1.sprite1.value.wrapT = THREE.RepeatWrapping;
var blenderShader = {
uniforms: uniforms1,
vertexShader: $( 'vertexShaderBasic' ).textContent,
fragmentShader: $( 'fragmentShaderBlender' ).textContent
};
blenderShader.uniforms.previousRender.value = composer.renderTarget2;
var blenderPass = new THREE.ShaderPass( blenderShader );
blenderPass.renderToScreen = true;
composer2.addPass( blenderPass );
/////////////////////////////////////
/////////////////////////////////////
var effect = new THREE.ShaderPass( THREE.DotScreenShader );
effect.uniforms[ 'scale' ].value = 4;
composer.addPass( effect );
var effect = new THREE.ShaderPass( THREE.RGBShiftShader );
effect.uniforms[ 'amount' ].value = 0.0015;
//effect.renderToScreen = true;
composer.addPass( effect );
//
window.addEventListener( 'resize', onWindowResize, false );
}
function $( id ) {
return document.getElementById( id );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
requestAnimationFrame( animate );
var time = Date.now();
object.rotation.x += 0.005;
object.rotation.y += 0.01;
composer.render();
composer2.render();
}
</script>
</body>
</html>
What I want :
Good
What I get :
NotGood
I had this issue too, and found a workaround.
I debugged it to determine that the texture ID for the extra texture is different in the shader pass than expected, which is bad. If you look in the ShaderPass constructor, you'll see it clones the uniforms. That seems to be the cause. If I edit that code to restore the original texture object in the cloned uniforms, it works as expected. So that should work for you too.
I'm trying to get some kind of (proper) bug fix integrated into the actual release.
Try this
var sprite1 = THREE.ImageUtils.loadTexture( "textures/sprite1.jpg", new THREE.UVMapping(), start );
three.js r.54