I have an application that includes meshes rendered with MeshPhongMaterials. They work well with the full suite of lights available in three.js.
However, I want to combine them with imported GLB/GLTF models. In order to have the models lit, I believe I must use an environment map such as the following:
function _Environment() {
const env_scene = new THREE.Scene();
const roomMaterial = new THREE.MeshStandardMaterial( { side: THREE.BackSide } );
const room = new THREE.Mesh( new THREE.BoxGeometry(), roomMaterial );
room.position.set( 0, 0, 0 );
room.scale.set( 40, 40, 40 );
env_scene.add( room );
const env_alight = new THREE.AmbientLight(0xFFFFFF, .1);
env_scene.add(env_alight);
return env_scene;
}
function main() {
canvas = document.getElementById('c');
renderer = new THREE.WebGLRenderer({canvas: canvas, antialias: true});
renderer.shadowMap.type = THREE.PCFSoftShadowMap;
renderer.outputEncoding = THREE.sRGBEncoding;
const aspect = 4/3; // the canvas default
camera = new THREE.PerspectiveCamera(fov, aspect, near, far);
camera.position.set(5, 0, 2);
camera.up.set(0,1,0);
camera.lookAt(new THREE.Vector3());
camera.updateProjectionMatrix()
const environment = new _Environment();
const pmremGenerator = new THREE.PMREMGenerator(renderer);
scene = new THREE.Scene();
scene.background = new THREE.Color(DefaultBackgroundColor);
scene.environment = pmremGenerator.fromScene(environment).texture;
dlight = new THREE.DirectionalLight(0xFFFFFF, .7);
dlight.position.set(5, 5, 10);
dlight.target.position.set(0, 0, 0);
scene.add(dlight);
scene.add(dlight.target);
alight = new THREE.AmbientLight(0xFFFFFF, .3);
scene.add(alight);
requestAnimationFrame(render);
}
However, it seems that the environment map causes the Phong materials to show up saturated and I cannot find a good combination of lights that work.
One can always convert everything to PBR, but am I missing something? Can Phong and PBR co-exist in a well lit, natural looking scene?
Related
I have a custom mesh geometry (three js) in mapbocx. I am trying to create a light for casting directional shadows but I always end up woth the light source in the base plane (which results in no casted shadows on my objects above the plane). Does anyone know how I can move the light source so it is above the plane? I added a helper to see the scope box and I would like to move it upwards along the z-vector in the image below.
//Create a WebGLRenderer and turn on shadows in the renderer
const renderer = new THREE.WebGLRenderer();
renderer.shadowMap.enabled = true;
renderer.shadowMap.type = THREE.PCFSoftShadowMap; // default THREE.PCFShadowMap
//Add Ambient light
const amblight = new THREE.AmbientLight(0xffffff, 0.8);
amblight.position.set(8, 10, 5); //default; light shining from top
scene.add(amblight);
//Create a DirectionalLight and turn on shadows for the light
const light = new THREE.DirectionalLight(0xffffff, 0.5);
//light.position.set(8, 10, 5); //default; light shining from top
light.position.y = 2000;
light.position.x = 10;
light.position.z = 5;
light.castShadow = true; // default false
scene.add(light);
//scene.add(light.target);
//Set up shadow properties for the light
light.shadow.mapSize.width = 512;
light.shadow.mapSize.height = 512;
light.shadow.camera.left = -100;
light.shadow.camera.right = 100;
light.shadow.camera.top = 100;
light.shadow.camera.bottom = -100;
light.shadow.camera.near = 0.5;
light.shadow.camera.far = 100; //Scope box depth
//Create a plane that receives shadows (but does not cast them)
const planeGeometry = new THREE.PlaneGeometry(1000, 1000, 10, 10);
const planeMaterial = new THREE.MeshPhongMaterial({
color: 0x808080,
opacity: 0.8,
transparent: true,
});
const plane = new THREE.Mesh(planeGeometry, planeMaterial);
plane.receiveShadow = true;
scene.add(plane);
const meshString = result.mesh.meshString;
const mesh = meshToThreejs(rhino, meshString, THREE);
//scene.add(mesh);
//Add shadows
mesh.castShadow = true; //default is false
mesh.receiveShadow = true; //default
scene.add(mesh);
//ENd shadows
//Create a helper for the shadow camera (optional)
const helper = new THREE.CameraHelper(light.shadow.camera);
scene.add(helper);
"move the light source so it is above the plane" - It looks like you already know how to do this, just change the z number.
light.position.z = 20;
// or
light.position.set(0, 0, 20);
// Check note below - If y is up
light.position.y = 20;
// or
light.position.set(0, 20, 0);
Just a note, by default Y is up in Three.js unless you have already handled that in code not shown here. If you need to check this add the axesHelper to your scene. The X axis is red. The Y axis is green. The Z axis is blue. Make sure the camera is moved in the correct direction.
const axesHelper = new THREE.AxesHelper( 100 );
scene.add( axesHelper );
If you are still not getting shadows you could try to add a sphere like in the Three.js docs (https://threejs.org/docs/#api/en/lights/shadows/DirectionalLightShadow)
//Create a sphere that cast shadows (but does not receive them)
const sphereGeometry = new THREE.SphereGeometry( 5, 32, 32 );
const sphereMaterial = new THREE.MeshStandardMaterial( { color: 0xff0000 } );
const sphere = new THREE.Mesh( sphereGeometry, sphereMaterial );
sphere.castShadow = true; //default is false
sphere.receiveShadow = false; //default
scene.add( sphere );
If that is casting a shadow correctly then perhaps there is an issue with your mesh, or the height of those buildings is so small that the shadows are really small
I have a GLTF version 1.0 model that I am importing into Three.js using LegacyGLTFLoader.js. When I do so, everything looks good, except that the model does not receive shadows. I am guessing that this is because the imported model's material is THREE.RawShaderMaterial, which does not support receiving shadows (I think). How can I fix this so that my imported model can receive shadows?
Here is sample code:
// Construct scene.
var scene = new THREE.Scene();
// Get window dimensions.
var width = window.innerWidth;
var height = window.innerHeight;
// Construct camera.
var camera = new THREE.PerspectiveCamera(75, width/height);
camera.position.set(20, 20, 20);
camera.lookAt(scene.position);
// Construct renderer.
var renderer = new THREE.WebGLRenderer();
renderer.setSize(width, height);
renderer.shadowMapEnabled = true;
document.body.appendChild(renderer.domElement);
// Construct cube.
var cubeGeometry = new THREE.BoxGeometry(10, 1, 10);
var cubeMaterial = new THREE.MeshPhongMaterial({color: 0x00ff00});
var cube = new THREE.Mesh(cubeGeometry, cubeMaterial);
cube.castShadow = true;
cube.translateY(15);
scene.add(cube);
// Construct floor.
var floorGeometry = new THREE.BoxGeometry(20, 1, 20);
var floorMaterial = new THREE.MeshPhongMaterial({color: 0x00ffff});
var floor = new THREE.Mesh(floorGeometry, floorMaterial);
floor.receiveShadow = true;
scene.add(floor);
// Construct light.
var light = new THREE.DirectionalLight(0xffffff);
light.position.set(0, 20, 0);
light.castShadow = true;
scene.add(light);
// Construct light helper.
var lightHelper = new THREE.DirectionalLightHelper(light);
scene.add(lightHelper);
// Construct orbit controls.
new THREE.OrbitControls(camera, renderer.domElement);
// Construct GLTF loader.
var loader = new THREE.LegacyGLTFLoader();
// Load GLTF model.
loader.load(
"https://dl.dropboxusercontent.com/s/5piiujui3sdiaj3/1.glb",
function(event) {
var model = event.scene.children[0];
var mesh = model.children[0];
mesh.receiveShadow = true;
scene.add(model);
},
null,
function(event) {
alert("Loading model failed.");
}
);
// Animates the scene.
var animate = function () {
requestAnimationFrame(animate);
renderer.render(scene, camera);
};
// Animate the scene.
animate();
Here are my resources:
https://dl.dropboxusercontent.com/s/y2r8bsrppv0oqp4/three.js
https://dl.dropboxusercontent.com/s/5wh92lnsxz2ge1e/LegacyGLTFLoader.js
https://dl.dropboxusercontent.com/s/1jygy1eavetnp0d/OrbitControls.js
https://dl.dropboxusercontent.com/s/5piiujui3sdiaj3/1.glb
Here is a JSFiddle:
https://jsfiddle.net/rmilbert/8tqc3yx4/26/
One way to fix the problem is to replace the instance of RawShaderMaterial with MeshStandardMaterial. To get the intended effect, you have to apply the existing texture to the new material like so:
var newMaterial = new THREE.MeshStandardMaterial( { roughness: 1, metalness: 0 } );
newMaterial.map = child.material.uniforms.u_tex.value;
You also have to compute normal data for the respective geometry so lighting can be computed correctly. If you need no shadows, the unlint MeshBasicMaterial is actually the better choice.
Updated fiddle: https://jsfiddle.net/e67hbj1q/2/
I was just experimenting with some lightning in three.js and came across a problem which I seem to be the only on having.
The setup is simple, two PointLight, one PlaneGeometry and one BoxGeometry.
"use strict";
var scale = 0.8;
var w = parseInt('' + Math.floor(innerWidth * scale));
var h = parseInt('' + Math.floor(innerHeight * scale));
var camera = new THREE.PerspectiveCamera(75, w / h, 0.1, 1000);
var renderer = new THREE.WebGLRenderer({
antialias: true
});
var scene = new THREE.Scene();
// init
{
scene.background = new THREE.Color(0x404040);
renderer.shadowMap.enabled = true;
renderer.shadowMap.type = THREE.BasicShadowMap;
renderer.setSize(w, h);
document.body.appendChild(renderer.domElement);
}
// plane
{
let geometry = new THREE.PlaneGeometry(40, 40, 10, 10);
let material = new THREE.MeshLambertMaterial({
color: 0x70B009,
side: THREE.DoubleSide
});
var plane = new THREE.Mesh(geometry, material);
plane.lookAt(new THREE.Vector3());
plane.rotateX(90 / 180 * Math.PI);
plane.receiveShadow = true;
scene.add(plane);
}
// box
{
let geometry = new THREE.BoxGeometry(1, 1, 1);
let material = new THREE.MeshLambertMaterial({
color: 0xFF6C00
});
var orangeCube = new THREE.Mesh(geometry, material);
orangeCube.castShadow = true;
scene.add(orangeCube);
}
// pointlights
{
var mapSize = 2 << 10;
var pointLight1 = new THREE.PointLight(0xFFFFFF, 0.6, 100);
pointLight1.castShadow = true;
pointLight1.shadow.mapSize.set(mapSize, mapSize);
scene.add(pointLight1);
var pointLight2 = new THREE.PointLight(0xFFFFFF, 0.6, 100);
pointLight2.castShadow = true;
pointLight2.shadow.mapSize.set(mapSize, mapSize);
scene.add(pointLight2);
}
// position camera, lights and box
{
pointLight1.position.set(0, 15, -15);
pointLight2.position.set(0, 15, 15);
orangeCube.position.set(0, 5, 0);
camera.position.set(10, 10, 0);
camera.lookAt(new THREE.Vector3());
}
// render once
{
renderer.render(scene, camera);
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/91/three.min.js"></script>
Which works quite well, but one problem. The lights do not eliminate the shadow projected by the other PointLight.
Does someone know how to fix this?
Thank you for your help.
As explained in this SO answer, shadows in MeshLambertMaterial are an approximation. Try MeshPhongMaterial, for example.
In MeshPhongMaterial and MeshStandardMaterial, shadows are the absence of light. If there is light from two light sources, shadow intensity can vary where the shadows overlap. See this three.js example.
three.js r.91
I am struggling to do something which should be quite easy to do, but I have been unable to find any example which addresses this scenario. Essentially all I want to do is extrude a simple profile along a rectangular path:
(As I am new here I cannot post images, but these can be viewed on the forum to explain what I should be getting and what I am actually generating.
Original Question on ThreeJS Forum)
I would appreciate it if someone could look at the code below and tell me what I am doing wrong:
'----------------------------------------------------------------------------------------------------------------
<script>
var container;
var camera, scene, renderer, controls;
init();
animate();
function init() {
//SCENE SETUP
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
scene = new THREE.Scene();
scene.background = new THREE.Color( 0x222222 );
camera = new THREE.PerspectiveCamera( 45, window.innerWidth / window.innerHeight, 1, 1000 );
camera.position.set( -250, 150, 200 );
camera.lookAt(new THREE.Vector3(0, -50, 0))
controls = new THREE.TrackballControls( camera, renderer.domElement );
controls.minDistance = 200;
controls.maxDistance = 500;
scene.add( new THREE.AmbientLight( 0x222222 ) );
var light = new THREE.PointLight( 0xffffff );
light.position.copy( camera.position );
scene.add( light );
//PROFILE SHAPE
var spts = [];
spts.push(new THREE.Vector2(0, 0));
spts.push(new THREE.Vector2(10, 0));
spts.push(new THREE.Vector2(10, 25));
spts.push(new THREE.Vector2(-5, 25));
spts.push(new THREE.Vector2(-5, 20));
spts.push(new THREE.Vector2(0, 20));
//PATH POINTS
var ppth = []
ppth.push(new THREE.Vector3(0,0,10));
ppth.push(new THREE.Vector3(100, 0,10));
ppth.push(new THREE.Vector3(100, 200,10));
ppth.push(new THREE.Vector3(0, 200,10));
//-----------------------------------------------EXTRUSION PATH AS A CURVEPATH
var cpth = new THREE.CurvePath()
//THE FOLLOWING STATEMENT APEARS TO CREATE NO NEW CURVES
//cpth.createGeometry(ppth)
//ADD CURVES EXPLICITELY
var v1 = new THREE.LineCurve(new THREE.Vector3(0,0,0), new THREE.Vector3(100,0,0));
var v2 = new THREE.LineCurve(new THREE.Vector3(100,0,0), new THREE.Vector3(100,200,0));
var v3 = new THREE.LineCurve(new THREE.Vector3(100, 200, 0), new THREE.Vector3(0, 200, 0));
var v4 = new THREE.LineCurve(new THREE.Vector3(0, 200, 0), new THREE.Vector3(0, 0, 0));
cpth.add(v1);
cpth.add(v2);
cpth.add(v3);
cpth.add(v4);
cpth.autoClose = true;
//cpth.update;
//SET EXTRUSION PATH TO CURVEPATH
expth = cpth
//EXTRUSION SETTINGS
var extrudeSettings = {
steps: 200,
bevelEnabled: false,
extrudePath: expth
};
// GENERATE SCENE GEOMETRY
var shape = new THREE.Shape( spts );
var geometry = new THREE.ExtrudeGeometry(shape, extrudeSettings);
var material2 = new THREE.MeshLambertMaterial( { color: 0xff8000, wireframe: false } );
var mesh = new THREE.Mesh( geometry, material2 );
mesh.position.x = -50;
mesh.position.y = -100;
scene.add( mesh );
}
</script>
Although at a high level, the concept may look simple, code wise, it may not be so.
I did peak at your link regarding the original question posted on the threejs forum and seems like someone did provid somewhat the solution, if not something close.
But at a high level, I believe the solution can be done in high level steps.
1 - find the new points extruded outward staying on the same plane. This can be done with the solution to a recent post of mine offset 2d polygon points
2 - Once you have those new points, you should be able to create the 2D shape with ShapeGeometry in conjunction with the original profile points using ShapteUtils.triangulateShape() to define a hole in the shape when building the triangles.
3 - Once you have your new defined 2D shape, use ExtrudeGeometry to give it depth.
I am attempting to have my skybox not be affected by the camera.far parameter. I would like to cull all other scene objects with this just not the skybox.
When I set skyBox.frustumCulled = false; it makes no difference. skyBox being the mesh of course.
Is this done by adding another render pass? If so I would need 2 different cameras one with a really high far value to allow viewing of the skybox right? How can this be done efficiently?
For clarity here is the snippet from my terrain object code used for drawing the skybox:
shader = THREE.ShaderLib["cube"];
shader.uniforms["tCube"].value = this.cubetexture;
mat = new THREE.ShaderMaterial({
uniforms: shader.uniforms,
fragmentShader: shader.fragmentShader,
vertexShader: shader.vertexShader,
depthWrite: false,
side: THREE.BackSide
});
geo = new THREE.BufferGeometry().fromGeometry(new THREE.BoxGeometry(1024, 1024, 1024));
mesh = new THREE.Mesh(geo, mat);
mesh.rotation.y += 90;
mesh.scale.x = mesh.scale.y = mesh.scale.z = 50;
mesh.frustumCulled = false;
mesh.matrixAutoUpdate = false;
mesh.rotationAutoUpdate = false;
mesh.updateMatrix();
this.skybox = mesh;
scene.add(this.skybox);
You're adding a skybox in the 'main' scene. A better way to accomplish a skydome would be to create a new scene. this will be the 'background' to your 'main' scene. There's also a discussion about skydomes v.s. skyboxes, simply put, a box saves polys, a dome looks better. in this example i'll be using a dome/sphere.
var renderer = new THREE.WebGLRenderer( {alpha: true, antialias: true} );
var mainScene = new THREE.Scene();
var mainCamera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 1, 20000 );
var skydome = {
scene: new THREE.Scene(),
camera: new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 1, 20000 );
};
skydome.material = new THREE.MeshBasicMaterial({color: 0x0F0F0F}) //the material for the skydome, for sake of lazyness i took a MeshBasicMaterial.
skydome.mesh = new THREE.Mesh(new THREE.SphereGeometry(100, 20, 20), skydome.material);
skydome.scene.add(skydome.mesh);
now, during the render function you adjust only the rotation of the skydome camera, not the position.
var render = function(){
requestAnimationFrame( render );
skydome.camera.quaternion = mainCamera.quaternion;
renderer.render(skydome.scene, skydome.camera); //first render the skydome
renderer.render(mainScene, mainCamera);//then render the rest over the skydome
};
renderer.autoclear = false; //otherwise only the main scene will be rendered.