I can't manage to use ssao with three.js.
I tried to follow the webgl_postprocessing_dof.html example :
here is the function initPostprocessing
function initPostprocessing() {
postprocessing.scene = new THREE.Scene();
postprocessing.camera = new THREE.OrthographicCamera( window.innerWidth / - 2, window.innerWidth / 2, window.innerHeight / 2, window.innerHeight / - 2, -10000, 10000 );
postprocessing.camera.position.z = 100;
postprocessing.scene.add( postprocessing.camera );
var pars = { minFilter: THREE.LinearFilter, magFilter: THREE.LinearFilter, format: THREE.RGBFormat };
postprocessing.rtTextureDepth = new THREE.WebGLRenderTarget( window.innerWidth, height, pars ); //modifier 500
postprocessing.rtTextureColor = new THREE.WebGLRenderTarget( window.innerWidth, height, pars );
var ssao_shader = new THREE.ShaderMaterial(THREE.ShaderExtras[ "ssao" ]); //modification
postprocessing.ssao_uniforms = THREE.UniformsUtils.clone( ssao_shader.uniforms );
postprocessing.ssao_uniforms[ "tDepth" ].value=1;
postprocessing.ssao_uniforms[ "tDiffuse" ].value=1;
postprocessing.ssao_uniforms[ "fogEnabled" ].value=1;
postprocessing.ssao_uniforms[ "fogFar" ].value=100;
postprocessing.ssao_uniforms[ "fogNear" ].value=0;
postprocessing.ssao_uniforms[ "onlyAO" ].value=0;
postprocessing.ssao_uniforms[ "aoClamp" ].value=0.1;
postprocessing.ssao_uniforms[ "lumInfluence" ].value=0.1;
postprocessing.materialSSAO = new THREE.ShaderMaterial( {
uniforms: postprocessing.ssao_uniforms,
vertexShader: ssao_shader.vertexShader,
fragmentShader: ssao_shader.fragmentShader
});
}
and the render function :
function render() {
renderer.clear();
// Render depth into texture
scene.overrideMaterial=material_depth;
renderer.render( scene, camera, postprocessing.rtTextureDepth, true );
// Render color into texture
scene.overrideMaterial = null;
renderer.render( scene, camera, postprocessing.rtTextureColor);
//
postprocessing.materialSSAO.uniforms[ "tDepth" ].texture=postprocessing.rtTextureDepth;
postprocessing.materialSSAO.uniforms[ "tDiffuse" ].texture=postprocessing.rtTextureColor;
postprocessing.scene.overrideMaterial = postprocessing.materialSSAO;
renderer.render( postprocessing.scene, postprocessing.camera );
}
Maybe I misunderstood something.
I don't believe you can use the SSAO shader as a material in the way that you are. Materials are combined with geometry to draw meshes. Where as the SSAO shader is meant to draw its output not on top of multiple geometries but to a screen aligned quad.
typically you'd use the effect composer class to accomplish this.
composer = new THREE.EffectComposer( renderer );
composer.addPass( new THREE.RenderPass( postprocessing.scene, postprocessing.camera ) );
then instead of creating a material the SSAO is added as a shader pass to the composer and rendered to the screen
var effect = new THREE.ShaderPass( THREE.SSAOShader );
effect.uniforms[ 'tDepth' ].value = postprocessing.rtTextureDepth;
effect.uniforms[ 'size' ].value.set( window.innerWidth, window.innerHeight );
effect.uniforms[ 'cameraNear' ].value = postprocessing.camera.near;
effect.uniforms[ 'cameraFar' ].value = postprocessing.camera.far;
effect.renderToScreen = true;
composer.addPass( effect );
and finally in the render function you use the composer to render as opposed to the renderer
function render(){
scene.overrideMaterial = material_depth;
renderer.render( postprocessing.scene, postprocessing.camera, postprocessing.rtTextureDepth );
scene.overrideMaterial = null;
composer.render();
}
this also removes the necessity to have a seperate diffuse render target since the composer takes care of that for you with the render pass.
for a complete example of SSAO without the plugin see this one by alteredqualia: http://bit.ly/ZIPj2J
Related
I'm trying to learn something new in three.js. My goal is to be able to use what a second camera sees in a separate scene as a texture for the main scene.
Or alternatively to be able to use what a second camera sees in the main scene as a texture. But i only see a black screen. I posted my code for it here. I hope someone recognizes where my mistake is, because I just can't figure it out.
In 3 steps:
texture = second camera view
material use texture
apply material ordinary to a mesh
see below
var camera, controls, scene, renderer, container, aspect;
function main() {
init();
animate();
}
function init() {
renderer = new THREE.WebGLRenderer( { antialias: true } );
renderer.setPixelRatio( window.devicePixelRatio );
renderer.shadowMap.enabled = true;
renderer.shadowMap.type = THREE.PCFSoftShadowMap;
container = document.getElementById('container');
renderer.setSize(container.clientWidth, container.clientHeight);
container.appendChild( renderer.domElement );
aspect = container.clientWidth / container.clientHeight;
scene = new THREE.Scene();
scene.background = new THREE.Color( 0x000000 );
camera = new THREE.PerspectiveCamera( 60, container.clientWidth / container.clientHeight, 1, 1000000 );
camera.position.set(0, 0, 200);
controls = new THREE.OrbitControls( camera, renderer.domElement );
controls.enableZoom = true;
controls.enabled = true;
controls.target.set(0, 0, 0);
//-----End three basic setups-----
var tex = generateTexture(renderer);
var plane = new THREE.Mesh(
new THREE.PlaneBufferGeometry(100.0, 100.0),
new THREE.MeshBasicMaterial({
color: 0x00caff,
map: tex,
side: THREE.DoubleSide,
})
);
scene.add(plane);
}//-------End init----------
function animate() {
requestAnimationFrame( animate );
render();
}//-------End animate----------
function render() {
camera.updateMatrixWorld();
camera.updateProjectionMatrix();
renderer.render(scene, camera);
}//-------End render----------
function generateTexture(renderer) {
var resolution = 2000;
var textureScene = new THREE.Scene();
textureScene.background = new THREE.Color(0x404040);
var renderTarget = new THREE.WebGLRenderTarget(resolution, resolution, {minFilter: THREE.LinearFilter, magFilter: THREE.LinearFilter, format: THREE.RGBFormat});
var textureCamera = new THREE.PerspectiveCamera(60, aspect, 0.1, 100000.0);
textureCamera.position.set(0, 0, 200);
textureCamera.lookAt(0, 0, 0);
var geometry = new THREE.SphereGeometry( 60, 32, 16 );
var material = new THREE.MeshBasicMaterial( { color: 0xffff00 } );
var sphere = new THREE.Mesh( geometry, material);
textureScene.add( sphere );
//---changes---
renderer.setRenderTarget( renderTarget );
renderer.clear();
renderer.render( textureScene, textureCamera );
renderer.setRenderTarget(null);
return renderTarget.texture;
//-------------
//---now it works fine---:)
}//----- End generateTexture ------
Are you copying this approach from a tutorial? What version of three.js are you using? I'm asking because you're using renderer.render(scene, camera, target, true); but the docs state that .render() only accepts two arguments, so passing a renderTarget doesn't do anything.
I recommend you copy the approach in this demo, you can see the source code by clicking on the < > icon. The essential part is as follows:
// Render first scene into texture
renderer.setRenderTarget( renderTarget );
renderer.clear();
renderer.render( textureScene, textureCamera );
// Render full scene to canvas
renderer.setRenderTarget( null );
renderer.clear();
renderer.render( scene, camera );
I’ve created a panoramic image with PerspectiveCamera and all went well. I’ve also managed to add controls.
Now I want to place a clickable element (link/button/whatever) in the scene but I have no clue how to add and position the clickable element…every tip/suggestion is highly appreciated! Here is my code sofar:
function init() {
const container = document.getElementById( 'three-image' );
camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 1, 1100 );
scene = new THREE.Scene();
const geometry = new THREE.SphereGeometry( 500, 60, 40 );
// invert the geometry on the x-axis so that all of the faces point inward
geometry.scale( - 1, 1, 1 );
//let imageLocation = <?php echo $block->getPanoramaImages();?>;
let imageLocation = '/three/luifel.jpg';
//alert(imageLocation)
const texture = new THREE.TextureLoader().load(imageLocation);
const material = new THREE.MeshBasicMaterial( { map: texture } );
const mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
container.appendChild( renderer.domElement );
container.style.touchAction = 'none';
container.addEventListener( 'pointerdown', onPointerDown );
document.addEventListener( 'wheel', onDocumentMouseWheel );
window.addEventListener( 'resize', onWindowResize );
}
If you're looking to attach an html element to a 3d element, you can project a 3d position to 2d and use that.
var vector = new THREE.Vector3(100,0,0); // some point
vector.project(camera);
var widthHalf = window.innerWidth / 2;
var heightHalf = window.innerHeight / 2;
vector.x = vector.x * widthHalf + widthHalf;
vector.y = -(vector.y * heightHalf) + heightHalf;
vector.z = 0;
If you want to attach to an object, use the objects position in world space:
vector.setFromMatrixPosition(obj.matrixWorld);
vector.project(camera);
...etc
EDIT:
I've created an example here:
https://codepen.io/flatworldstudio/pen/LYbgvgY
Based on this example i try to create a scene where several objects get the bloom, and other objects dont.
The white cube in the middle is supposed to be just white (without the bloom)
I'm confused on how to get the result that i want. I tried for example adding a 2nd scene with the white cube but it seems i cant get the order right. Maybe my approch with different scenes is wrong?
Whats the "best" way to achieve this behaviour? I always end up just seeing one scene, just the white cube or the 4 colored ones. (example below shows everything atm)
myFiddle: https://jsfiddle.net/qwertasyx/8qw3ys4z/16/
var scene,scene2,camera, controls, pointLight, stats;
var composer, renderer, mixer;
var params = {
exposure: 1,
bloomStrength: 1.5,
bloomThreshold: 0,
bloomRadius: 0
};
var objects = [];
var clock = new THREE.Clock();
var container = document.getElementById( 'container' );
stats = new Stats();
//container.appendChild( stats.dom );
renderer = new THREE.WebGLRenderer( { antialias: true } );
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
renderer.toneMapping = THREE.ReinhardToneMapping;
container.appendChild( renderer.domElement );
scene = new THREE.Scene();
//scene2 = new THREE.Scene();
camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 1, 100 );
camera.position.set( 2.5,2.5, 10 );
scene.add( camera );
// scene2.add( camera );
controls = new THREE.OrbitControls( camera, renderer.domElement );
controls.maxPolarAngle = Math.PI * 0.5;
controls.minDistance = 1;
controls.maxDistance = 10;
controls.target.set(2.5,2.5,0)
controls.update()
// scene.add( new THREE.AmbientLight( 0x404040 ) );
pointLight = new THREE.PointLight( 0xffffff, 1 );
// camera.add( pointLight );
var renderScene = new THREE.RenderPass( scene, camera );
//var renderScene2 = new THREE.RenderPass( scene2, camera );
var bloomPass = new THREE.UnrealBloomPass( new THREE.Vector2( window.innerWidth, window.innerHeight ), 1.5, 0.4, 0.85 );
bloomPass.renderToScreen = true;
bloomPass.threshold = params.bloomThreshold;
bloomPass.strength = params.bloomStrength;
bloomPass.radius = params.bloomRadius;
composer = new THREE.EffectComposer( renderer );
composer.setSize( window.innerWidth, window.innerHeight );
composer.addPass( renderScene );
composer.addPass( bloomPass );
//composer.addPass( renderScene2 );
//objects
var geometry = new THREE.BoxGeometry( 1, 1, 1 );
var material = new THREE.MeshBasicMaterial( { color: 0xffff00 } );
var cube = new THREE.Mesh( geometry, material );
cube.vrz = 0.01;
cube.position.x += 5
scene.add( cube );
objects.push(cube)
var material = new THREE.MeshBasicMaterial( { color: 0xff0000 } );
var cube = new THREE.Mesh( geometry, material );
cube.vrz = 0.01;
cube.position.x += 5
cube.position.y += 5
scene.add( cube );
objects.push(cube)
var material = new THREE.MeshBasicMaterial( { color: 0x00ff00 } );
var cube = new THREE.Mesh( geometry, material );
cube.vrz = 0.01;
cube.position.y += 5
scene.add( cube );
objects.push(cube)
var material = new THREE.MeshBasicMaterial( { color: 0x0000ff } );
var cube = new THREE.Mesh( geometry, material );
cube.vrz = 0.01;
scene.add( cube );
objects.push(cube)
// cube thats supposed to be not bloomy
var material = new THREE.MeshBasicMaterial( { color: 0xffffff } );
var cube = new THREE.Mesh( geometry, material );
cube.vrz = 0.01;
cube.position.y += 2.5
cube.position.x += 2.5
scene.add( cube );
objects.push(cube)
var gui = new dat.GUI();
gui.add( params, 'exposure', 0.1, 2 ).onChange( function ( value ) {
renderer.toneMappingExposure = Math.pow( value, 4.0 );
} );
gui.add( params, 'bloomThreshold', 0.0, 1.0 ).onChange( function ( value ) {
bloomPass.threshold = Number( value );
} );
gui.add( params, 'bloomStrength', 0.0, 3.0 ).onChange( function ( value ) {
bloomPass.strength = Number( value );
} );
gui.add( params, 'bloomRadius', 0.0, 1.0 ).step( 0.01 ).onChange( function ( value ) {
bloomPass.radius = Number( value );
} );
window.onresize = function () {
var width = window.innerWidth;
var height = window.innerHeight;
camera.aspect = width / height;
camera.updateProjectionMatrix();
renderer.setSize( width, height );
composer.setSize( width, height );
};
function animate() {
requestAnimationFrame( animate );
objects.forEach(function(obj){
obj.rotation.z += obj.vrz;
});
stats.update();
composer.render();
}
animate();
I had a similar problem once. An example from this comment helped me.
Note that in that example there are 2 scenes and 2 composers (the final composer gets output of the previous composer as its input)
ppoFinal.blendPass.uniforms.tAdd.value = ppoRGB.composer.renderTarget2.texture;
and render() is called on both composers.
ppoRGB.composer.render();
ppoFinal.composer.render();
This pattern allows you to apply postprocessing effects selectively and it works well. The problem is the scalability of the method and probably performance. Because when you want to apply another object with yet different effect, you need to introduce 3rd scene and 3rd composer. For my little project in the past I ended up with 4 scenes and 4 composers...
I am trying to make a THREE.JS scene with GLSL shaders applied, but am having trouble figuring out how to make it load my shaders. The scene appears to work, but the shaders don't do what they're supposed to. I'm using a shader loader function I found that uses pure THREE.JS instead of AJAX or Jquery.
Here is the main javascript for my scene.
var container,
renderer,
scene,
camera,
light,
mesh,
controls,
start = Date.now(),
fov = 30;
window.addEventListener( 'load', function() {
container = document.getElementById( "container" );
if(!Detector.webgl) {
Detector.addGetWebGLMessage(container);
return;
}
//get the width and height
var WIDTH = window.innerWidth,
HEIGHT = window.innerHeight;
//sphere params
var radius = 20,
segments = 4,
rotation = 6;
scene = new THREE.Scene();
// ASPECT RATIO
camera = new THREE.PerspectiveCamera(fov, WIDTH / HEIGHT, 1, 10000);
camera.position.z = 100;
scene.add(new THREE.AmbientLight(0x333333));
light = new THREE.DirectionalLight(0xffffff, 1);
light.position.set(100, 3, 5);
scene.add(light);
;
ShaderLoader('./shaders/vertex.vert', './shaders/fragment.frag')
material = new THREE.ShaderMaterial( {
uniforms: {
tExplosion: {
type: "t",
value: THREE.ImageUtils.loadTexture( 'images/explosion.png' )
},
time: { //float initialized to 0
type: "f",
value: 0.0
}
},
vertexShader: ShaderLoader.vertex_text,
fragmentShader: ShaderLoader.fragment_text
} );
mesh = new THREE.Mesh(
new THREE.IcosahedronGeometry( radius, segments ),
material
);
scene.add( mesh );
renderer = new THREE.WebGLRenderer();
renderer.setSize( WIDTH, HEIGHT );
renderer.setPixelRatio( window.devicePixelRatio );
//update renderer size, aspect ratio, and projectionmatrix, on resize
window.addEventListener('resize', function() {
var WIDTH = window.innerWidth,
HEIGHT = window.innerHeight;
renderer.setSize(WIDTH, HEIGHT);
camera.aspect = WIDTH / HEIGHT;
camera.updateProjectionMatrix();
});
controls = new THREE.TrackballControls( camera );
container.appendChild( renderer.domElement );
render();
} );
function render() {
material.uniforms[ 'time' ].value = .00025 * ( Date.now() - start );
controls.update();
requestAnimationFrame( render );
renderer.render(scene, camera);
}
// This is a basic asyncronous shader loader for THREE.js.
function ShaderLoader(vertex_url, fragment_url, onLoad, onProgress, onError) {
var vertex_loader = new THREE.XHRLoader(THREE.DefaultLoadingManager);
vertex_loader.setResponseType('text');
vertex_loader.load(vertex_url, function (vertex_text) {
var fragment_loader = new THREE.XHRLoader(THREE.DefaultLoadingManager);
fragment_loader.setResponseType('text');
fragment_loader.load(fragment_url, function (fragment_text) {
onLoad(vertex_text, fragment_text);
});
}, onProgress, onError);
}
But when my scene loads, it is just a red sphere with no lighting or applied shaders... What am I doing wrong? I'm new to all of this so it is probably something easily noticeable for someone more experienced, but I have searched and searched and been experimenting and can't figure it out.
Thanks!
Try to add material.needsUpdate = true after you load your shader.
The snippet show error of the WebGL detector.
Are you loading that library?
https://github.com/mrdoob/three.js/blob/master/examples/js/Detector.js
Here are comparisons, to show that I do indeed have SSAO set-up correctly.
WebGLRenderer with EffectsComposer + SSAO pass
WebGLDeferredRenderer with SSAO pass
I'm trying to mix WebGLDeferredRenderer with the SSAO composer effect like so:
renderer = new THREE.WebGLDeferredRenderer({
antialias: true,
tonemapping: THREE.FilmicOperator,
brightness: 2.5,
scale: 1.0,
width: renderWidth,
height: renderHeight
});
var depthShader = THREE.ShaderLib[ "depthRGBA" ];
var depthUniforms = THREE.UniformsUtils.clone( depthShader.uniforms );
depthMaterial = new THREE.ShaderMaterial( { fragmentShader: depthShader.fragmentShader, vertexShader: depthShader.vertexShader, uniforms: depthUniforms } );
depthMaterial.blending = THREE.NoBlending;
composer = new THREE.EffectComposer( renderer );
composer.setSize( window.innerWidth, window.innerHeight );
depthTarget = new THREE.WebGLRenderTarget( window.innerWidth, window.innerHeight, {
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter,
format: THREE.RGBAFormat,
antialias: true
});
var ssao = new THREE.ShaderPass( THREE.SSAOShader );
ssao.uniforms[ 'tDepth' ].value = depthTarget;
ssao.uniforms[ 'size' ].value.set( window.innerWidth, window.innerHeight );
ssao.uniforms[ 'cameraNear' ].value = camera.near;
ssao.uniforms[ 'cameraFar' ].value = camera.far * 0.8;
ssao.uniforms[ 'aoClamp' ].value = 0.3;
ssao.uniforms[ 'lumInfluence' ].value = 0.1;
renderer.addEffect( ssao );
scene.overrideMaterial = depthMaterial;
renderer.render( scene, camera, depthTarget );
scene.overrideMaterial = null;
renderer.render( scene, camera );
The results are.. well.. nothing.
What am I doing wrong? How would I go about enabling SSAO within a deferred renderer?
Not sure why this works but try replacing:
renderer.render( scene, camera, depthTarget );
With:
renderer.renderer.render( scene, camera, depthTarget );