Here are comparisons, to show that I do indeed have SSAO set-up correctly.
WebGLRenderer with EffectsComposer + SSAO pass
WebGLDeferredRenderer with SSAO pass
I'm trying to mix WebGLDeferredRenderer with the SSAO composer effect like so:
renderer = new THREE.WebGLDeferredRenderer({
antialias: true,
tonemapping: THREE.FilmicOperator,
brightness: 2.5,
scale: 1.0,
width: renderWidth,
height: renderHeight
});
var depthShader = THREE.ShaderLib[ "depthRGBA" ];
var depthUniforms = THREE.UniformsUtils.clone( depthShader.uniforms );
depthMaterial = new THREE.ShaderMaterial( { fragmentShader: depthShader.fragmentShader, vertexShader: depthShader.vertexShader, uniforms: depthUniforms } );
depthMaterial.blending = THREE.NoBlending;
composer = new THREE.EffectComposer( renderer );
composer.setSize( window.innerWidth, window.innerHeight );
depthTarget = new THREE.WebGLRenderTarget( window.innerWidth, window.innerHeight, {
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter,
format: THREE.RGBAFormat,
antialias: true
});
var ssao = new THREE.ShaderPass( THREE.SSAOShader );
ssao.uniforms[ 'tDepth' ].value = depthTarget;
ssao.uniforms[ 'size' ].value.set( window.innerWidth, window.innerHeight );
ssao.uniforms[ 'cameraNear' ].value = camera.near;
ssao.uniforms[ 'cameraFar' ].value = camera.far * 0.8;
ssao.uniforms[ 'aoClamp' ].value = 0.3;
ssao.uniforms[ 'lumInfluence' ].value = 0.1;
renderer.addEffect( ssao );
scene.overrideMaterial = depthMaterial;
renderer.render( scene, camera, depthTarget );
scene.overrideMaterial = null;
renderer.render( scene, camera );
The results are.. well.. nothing.
What am I doing wrong? How would I go about enabling SSAO within a deferred renderer?
Not sure why this works but try replacing:
renderer.render( scene, camera, depthTarget );
With:
renderer.renderer.render( scene, camera, depthTarget );
Related
Based on this example i try to create a scene where several objects get the bloom, and other objects dont.
The white cube in the middle is supposed to be just white (without the bloom)
I'm confused on how to get the result that i want. I tried for example adding a 2nd scene with the white cube but it seems i cant get the order right. Maybe my approch with different scenes is wrong?
Whats the "best" way to achieve this behaviour? I always end up just seeing one scene, just the white cube or the 4 colored ones. (example below shows everything atm)
myFiddle: https://jsfiddle.net/qwertasyx/8qw3ys4z/16/
var scene,scene2,camera, controls, pointLight, stats;
var composer, renderer, mixer;
var params = {
exposure: 1,
bloomStrength: 1.5,
bloomThreshold: 0,
bloomRadius: 0
};
var objects = [];
var clock = new THREE.Clock();
var container = document.getElementById( 'container' );
stats = new Stats();
//container.appendChild( stats.dom );
renderer = new THREE.WebGLRenderer( { antialias: true } );
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
renderer.toneMapping = THREE.ReinhardToneMapping;
container.appendChild( renderer.domElement );
scene = new THREE.Scene();
//scene2 = new THREE.Scene();
camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 1, 100 );
camera.position.set( 2.5,2.5, 10 );
scene.add( camera );
// scene2.add( camera );
controls = new THREE.OrbitControls( camera, renderer.domElement );
controls.maxPolarAngle = Math.PI * 0.5;
controls.minDistance = 1;
controls.maxDistance = 10;
controls.target.set(2.5,2.5,0)
controls.update()
// scene.add( new THREE.AmbientLight( 0x404040 ) );
pointLight = new THREE.PointLight( 0xffffff, 1 );
// camera.add( pointLight );
var renderScene = new THREE.RenderPass( scene, camera );
//var renderScene2 = new THREE.RenderPass( scene2, camera );
var bloomPass = new THREE.UnrealBloomPass( new THREE.Vector2( window.innerWidth, window.innerHeight ), 1.5, 0.4, 0.85 );
bloomPass.renderToScreen = true;
bloomPass.threshold = params.bloomThreshold;
bloomPass.strength = params.bloomStrength;
bloomPass.radius = params.bloomRadius;
composer = new THREE.EffectComposer( renderer );
composer.setSize( window.innerWidth, window.innerHeight );
composer.addPass( renderScene );
composer.addPass( bloomPass );
//composer.addPass( renderScene2 );
//objects
var geometry = new THREE.BoxGeometry( 1, 1, 1 );
var material = new THREE.MeshBasicMaterial( { color: 0xffff00 } );
var cube = new THREE.Mesh( geometry, material );
cube.vrz = 0.01;
cube.position.x += 5
scene.add( cube );
objects.push(cube)
var material = new THREE.MeshBasicMaterial( { color: 0xff0000 } );
var cube = new THREE.Mesh( geometry, material );
cube.vrz = 0.01;
cube.position.x += 5
cube.position.y += 5
scene.add( cube );
objects.push(cube)
var material = new THREE.MeshBasicMaterial( { color: 0x00ff00 } );
var cube = new THREE.Mesh( geometry, material );
cube.vrz = 0.01;
cube.position.y += 5
scene.add( cube );
objects.push(cube)
var material = new THREE.MeshBasicMaterial( { color: 0x0000ff } );
var cube = new THREE.Mesh( geometry, material );
cube.vrz = 0.01;
scene.add( cube );
objects.push(cube)
// cube thats supposed to be not bloomy
var material = new THREE.MeshBasicMaterial( { color: 0xffffff } );
var cube = new THREE.Mesh( geometry, material );
cube.vrz = 0.01;
cube.position.y += 2.5
cube.position.x += 2.5
scene.add( cube );
objects.push(cube)
var gui = new dat.GUI();
gui.add( params, 'exposure', 0.1, 2 ).onChange( function ( value ) {
renderer.toneMappingExposure = Math.pow( value, 4.0 );
} );
gui.add( params, 'bloomThreshold', 0.0, 1.0 ).onChange( function ( value ) {
bloomPass.threshold = Number( value );
} );
gui.add( params, 'bloomStrength', 0.0, 3.0 ).onChange( function ( value ) {
bloomPass.strength = Number( value );
} );
gui.add( params, 'bloomRadius', 0.0, 1.0 ).step( 0.01 ).onChange( function ( value ) {
bloomPass.radius = Number( value );
} );
window.onresize = function () {
var width = window.innerWidth;
var height = window.innerHeight;
camera.aspect = width / height;
camera.updateProjectionMatrix();
renderer.setSize( width, height );
composer.setSize( width, height );
};
function animate() {
requestAnimationFrame( animate );
objects.forEach(function(obj){
obj.rotation.z += obj.vrz;
});
stats.update();
composer.render();
}
animate();
I had a similar problem once. An example from this comment helped me.
Note that in that example there are 2 scenes and 2 composers (the final composer gets output of the previous composer as its input)
ppoFinal.blendPass.uniforms.tAdd.value = ppoRGB.composer.renderTarget2.texture;
and render() is called on both composers.
ppoRGB.composer.render();
ppoFinal.composer.render();
This pattern allows you to apply postprocessing effects selectively and it works well. The problem is the scalability of the method and probably performance. Because when you want to apply another object with yet different effect, you need to introduce 3rd scene and 3rd composer. For my little project in the past I ended up with 4 scenes and 4 composers...
In Release 53 of Three.js I could fake a hole in a geometry with another geometry using a shader material with
vertex:void main() {
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
and
fragment:void main() {
gl_FragColor = vec4(1.0, 0.0, 1.0, 0.0); //alpha is zero
}
I could peep through the 'hole' and saw objects behind. Since release 54 I just see the inner object white, I can't peep through anymore.
How can I get it work again?
my complete sample:
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgl - geometry - cube</title>
<meta charset="utf-8">
<style>
body {
margin: 0px;
background-color: #DDFFDD;
overflow: hidden;
}
</style>
</head>
<body>
<script src="../build/old/three_53.js"></script>
<script src="../build/old/controls/TrackballControls_53.js"></script>
<script>
var camera, scene, renderer, controls, pointLight;
init();
animate();
function init() {
renderer = new THREE.WebGLRenderer({antialias: true, alpha: true});
//renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
renderer.sortObjects = false;
document.body.appendChild( renderer.domElement );
camera = new THREE.PerspectiveCamera( 70, window.innerWidth / window.innerHeight, 1, 1000 );
camera.position.z = 400;
controls = new THREE.TrackballControls( camera );
controls.rotateSpeed = 1.0;
controls.zoomSpeed = 1.2;
controls.panSpeed = 0.8;
controls.noZoom = false;
controls.noPan = false;
controls.staticMoving = true;
controls.dynamicDampingFactor = 0.3;
scene = new THREE.Scene();
scene.add( new THREE.AmbientLight( 0x505050 ) );
pointLight = new THREE.PointLight(0xFFFFFF, 0.9);
scene.add(pointLight);
var mainGroup = new THREE.Object3D();
var geometry = new THREE.CubeGeometry( 100, 100, 10 );
var mesh = new THREE.Mesh( geometry, new THREE.MeshLambertMaterial( { color: 0xaaaaaa } ) );
//mesh.renderOrder = 2;
var geometry2 = new THREE.CubeGeometry( 50, 50, 11 );
var material2 = new THREE.ShaderMaterial({vertexShader:'void main() { gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);}', fragmentShader:'void main() { gl_FragColor = vec4(1.0, 0.0, 1.0, 0.0);}'});
var innerGroup = new THREE.Object3D();
var mesh2 = new THREE.Mesh( geometry2, material2 );
//mesh2.renderOrder = 1;
mainGroup.add( mesh );
innerGroup.add(mesh2);
mainGroup.add( innerGroup );
//
var geometry3 = new THREE.SphereGeometry( 50);
var mesh3 = new THREE.Mesh( geometry3, new THREE.MeshLambertMaterial( { color: 0x00ff00 } ) );
mesh3.position.z = -200;
//mesh2.renderOrder = 3;
mainGroup.add( mesh3 );
scene.add(mainGroup);
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
requestAnimationFrame( animate );
controls.update();
pointLight.position.set(camera.position.x, camera.position.y, camera.position.z);
renderer.render( scene, camera );
}
</script>
</body>
It only works, if the 'hole-object' with the shader material is in a group.
If this worked before it's merely by accident. But you can, in 71, get this to work if you use renderOrder:
holeObj.renderOrder = 1;
bgObj.renderOrder = 2;
Now, if holeObj is in front of bgObj then in normal cases you will see through the bgObj. This is because holeObj will still write to the Z-buffer, when it draws its transparent pixels. The bgObj will be masked from that location. But this will only work for a particular view direction without some careful management of the sorting.
I'm running following code (jsfiddle: http://jsfiddle.net/sx9p7/ ) :
var scene, camera, renderer, composer;
var l1, l2, hemiLight;
var effectSSAO;
init();
animate();
function init() {
renderer = new THREE.WebGLRenderer( { antialias: false, alpha: false } );
renderer.shadowMapEnabled = true;
renderer.setClearColor( 0xd8e7ff );
renderer.setSize( 800,600);
document.body.appendChild( renderer.domElement );
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera( 40, 800 / 600, 1, 10000 );
camera.position.set(-200,100,-60);
camera.lookAt(new THREE.Vector3(0,0,0));
var container = document.createElement( 'div' );
document.body.appendChild( container );
composer = new THREE.EffectComposer( renderer );
composer.addPass( new THREE.RenderPass( scene, camera ) );
effectSSAO = new THREE.ShaderPass( THREE.SSAOShader );
effectSSAO.renderToScreen = true;
composer.addPass( effectSSAO );
hemiLight = new THREE.HemisphereLight( 0xffffff, 0xffffff, 0.4 );
hemiLight.position.set( 0, 300, 0 );
scene.add( hemiLight );
l1 = new THREE.DirectionalLight( 0xffffff, 0.3);
l1.position.set( 100, 100, 0 );
l1.castShadow = true;
scene.add(l1);
l2 = new THREE.DirectionalLight( 0xffffff, 0.3);
l2.position.set( -100, 100, 0 );
l2.castShadow = true;
scene.add(l2);
var plane = new THREE.Mesh( new THREE.PlaneGeometry( 200, 200), new THREE.MeshLambertMaterial({ }) );
plane.receiveShadow = true;
plane.castShadow = true;
plane.rotation.x = - 90 * Math.PI / 180;
plane.position.set(0,0,0);
scene.add( plane );
var cube = new THREE.Mesh(new THREE.CubeGeometry(50, 50, 50), new THREE.MeshPhongMaterial( { color: 0xaaaaaa, ambient: 0xbbbbbb, specular: 0xcccccc, perPixel: true, vertexColors: THREE.FaceColors } ));
cube.receiveShadow = true;
cube.castShadow = true;
scene.add(cube);
}
function animate() {
requestAnimationFrame( animate );
composer.render();
}
and i have a problem with SSAO shader. I did try many parameters, codes, examples but still i can't remove that square which is in the middle of scene ( which sometime look like correct SSAO effect but in wrong position ??? )
If i will remove one directional light fragment is gone - but still no SSAO effect and shadows are still super very low resolution.
Well, 2 directional lights are strange anyway, just leave it at 1 for the moment.
Concerning the SSAO, see the shader and its numerous parameters. you would need to adjust those according to your scene. example:
effectSSAO .uniforms[ 'tDepth' ].value = depthTarget;
effectSSAO .uniforms[ 'size' ].value.set( width, height );
effectSSAO .uniforms[ 'cameraNear' ].value = 0.1;
effectSSAO .uniforms[ 'cameraFar' ].value = 1000;
As you see above, you also need more than just a few parameter tweaks.
The Three.SSAO shader expects a previously rendered depth pass to work properly.
See here for more help:
How to use the three.js SSAO shader?
I have found this example with diffuse, specular and normal map:
var ambient = 0x111111, diffuse = 0xbbbbbb, specular = 0x060606, shininess = 35;
var shader = THREE.ShaderLib[ "normalmap" ];
var uniforms = THREE.UniformsUtils.clone( shader.uniforms );
uniforms[ "tNormal" ].value = THREE.ImageUtils.loadTexture( "textures/test_NRM.jpg" );
uniforms[ "uNormalScale" ].value.set( 2, 2 );
uniforms[ "tDiffuse" ].value = THREE.ImageUtils.loadTexture( "textures/test_COL.jpg" );
uniforms[ "tSpecular" ].value = THREE.ImageUtils.loadTexture( "textures/test_SPEC.jpg" );
uniforms[ "enableAO" ].value = false;
uniforms[ "enableDiffuse" ].value = true;
uniforms[ "enableSpecular" ].value = true;
uniforms[ "uDiffuseColor" ].value.setHex( diffuse );
uniforms[ "uSpecularColor" ].value.setHex( specular );
uniforms[ "uAmbientColor" ].value.setHex( ambient );
uniforms[ "uShininess" ].value = shininess;
uniforms[ "wrapRGB" ].value.set(1, 1,1 );
var parameters = { fragmentShader: shader.fragmentShader, vertexShader: shader.vertexShader, uniforms: uniforms, lights: true };
var material = new THREE.ShaderMaterial( parameters );
material.wrapAround = true;
loader = new THREE.JSONLoader( true );
document.body.appendChild( loader.statusDomElement );
loader.load( "geo/sphere.js", function( geometry ) { createScene( geometry, 100, material ) } );
renderer = new THREE.WebGLRenderer( { antialias: false, clearColor: 0x111111, clearAlpha: 1 } );
renderer.setSize( window.innerWidth, window.innerHeight );
container.appendChild( renderer.domElement );
Now I see my textures displayed on the sphere, but only one time. I need more tiles of the texture displayed and I inserted:
tNormal.wrapS = THREE.RepeatWrapping;
tNormal.wrapT = THREE.RepeatWrapping;
tNormal.repeat.x=100;
tNormal.repeat.y=100;
But it doesn`t work, does anyone have a clue?
Best Regards
The repeat.x and repeat.y determine how many times the texture needs to be repeated on the face. In you case 100x100 times. Try setting both values to 2 and see if that works
I can't manage to use ssao with three.js.
I tried to follow the webgl_postprocessing_dof.html example :
here is the function initPostprocessing
function initPostprocessing() {
postprocessing.scene = new THREE.Scene();
postprocessing.camera = new THREE.OrthographicCamera( window.innerWidth / - 2, window.innerWidth / 2, window.innerHeight / 2, window.innerHeight / - 2, -10000, 10000 );
postprocessing.camera.position.z = 100;
postprocessing.scene.add( postprocessing.camera );
var pars = { minFilter: THREE.LinearFilter, magFilter: THREE.LinearFilter, format: THREE.RGBFormat };
postprocessing.rtTextureDepth = new THREE.WebGLRenderTarget( window.innerWidth, height, pars ); //modifier 500
postprocessing.rtTextureColor = new THREE.WebGLRenderTarget( window.innerWidth, height, pars );
var ssao_shader = new THREE.ShaderMaterial(THREE.ShaderExtras[ "ssao" ]); //modification
postprocessing.ssao_uniforms = THREE.UniformsUtils.clone( ssao_shader.uniforms );
postprocessing.ssao_uniforms[ "tDepth" ].value=1;
postprocessing.ssao_uniforms[ "tDiffuse" ].value=1;
postprocessing.ssao_uniforms[ "fogEnabled" ].value=1;
postprocessing.ssao_uniforms[ "fogFar" ].value=100;
postprocessing.ssao_uniforms[ "fogNear" ].value=0;
postprocessing.ssao_uniforms[ "onlyAO" ].value=0;
postprocessing.ssao_uniforms[ "aoClamp" ].value=0.1;
postprocessing.ssao_uniforms[ "lumInfluence" ].value=0.1;
postprocessing.materialSSAO = new THREE.ShaderMaterial( {
uniforms: postprocessing.ssao_uniforms,
vertexShader: ssao_shader.vertexShader,
fragmentShader: ssao_shader.fragmentShader
});
}
and the render function :
function render() {
renderer.clear();
// Render depth into texture
scene.overrideMaterial=material_depth;
renderer.render( scene, camera, postprocessing.rtTextureDepth, true );
// Render color into texture
scene.overrideMaterial = null;
renderer.render( scene, camera, postprocessing.rtTextureColor);
//
postprocessing.materialSSAO.uniforms[ "tDepth" ].texture=postprocessing.rtTextureDepth;
postprocessing.materialSSAO.uniforms[ "tDiffuse" ].texture=postprocessing.rtTextureColor;
postprocessing.scene.overrideMaterial = postprocessing.materialSSAO;
renderer.render( postprocessing.scene, postprocessing.camera );
}
Maybe I misunderstood something.
I don't believe you can use the SSAO shader as a material in the way that you are. Materials are combined with geometry to draw meshes. Where as the SSAO shader is meant to draw its output not on top of multiple geometries but to a screen aligned quad.
typically you'd use the effect composer class to accomplish this.
composer = new THREE.EffectComposer( renderer );
composer.addPass( new THREE.RenderPass( postprocessing.scene, postprocessing.camera ) );
then instead of creating a material the SSAO is added as a shader pass to the composer and rendered to the screen
var effect = new THREE.ShaderPass( THREE.SSAOShader );
effect.uniforms[ 'tDepth' ].value = postprocessing.rtTextureDepth;
effect.uniforms[ 'size' ].value.set( window.innerWidth, window.innerHeight );
effect.uniforms[ 'cameraNear' ].value = postprocessing.camera.near;
effect.uniforms[ 'cameraFar' ].value = postprocessing.camera.far;
effect.renderToScreen = true;
composer.addPass( effect );
and finally in the render function you use the composer to render as opposed to the renderer
function render(){
scene.overrideMaterial = material_depth;
renderer.render( postprocessing.scene, postprocessing.camera, postprocessing.rtTextureDepth );
scene.overrideMaterial = null;
composer.render();
}
this also removes the necessity to have a seperate diffuse render target since the composer takes care of that for you with the render pass.
for a complete example of SSAO without the plugin see this one by alteredqualia: http://bit.ly/ZIPj2J