I have a fairly complicated architecture where I am doing most of my stuff in Three.JS but I also have a special renderer that renders directly to a raw WebGL texture. Is it possible to use this WebGL texture in a three.js "Texture"? It looks like the Three.JS texture class is just a container for an image or video or canvas, and somewhere deep in the guts of three.js it will upload that to a real webgl texture. How can I just have Three.js render my WebGL texture onto a mesh?
#Brendan's answer no longer works.
No idea when it changed and too lazy to go look it up but as of r102
const texture = new THREE.Texture();
renderer.setTexture2D(texture, 0); // force three.js to init the texture
const texProps = renderer.properties.get(texture);
texProps.__webglTexture = glTex;
as of r103 setTexture2D no longer exists. You can use this instead
const forceTextureInitialization = function() {
const material = new THREE.MeshBasicMaterial();
const geometry = new THREE.PlaneBufferGeometry();
const scene = new THREE.Scene();
scene.add(new THREE.Mesh(geometry, material));
const camera = new THREE.Camera();
return function forceTextureInitialization(texture) {
material.map = texture;
renderer.render(scene, camera);
};
}();
const texture = new THREE.Texture();
forceTextureInitialization(texture); // force three.js to init the texture
const texProps = renderer.properties.get(texture);
texProps.__webglTexture = glTex;
'use strict';
/* global THREE */
function main() {
const canvas = document.querySelector('#c');
const renderer = new THREE.WebGLRenderer({
canvas: canvas
});
const fov = 75;
const aspect = 2; // the canvas default
const near = 0.1;
const far = 5;
const camera = new THREE.PerspectiveCamera(fov, aspect, near, far);
camera.position.z = 2;
const scene = new THREE.Scene();
const boxWidth = 1;
const boxHeight = 1;
const boxDepth = 1;
const geometry = new THREE.BoxGeometry(boxWidth, boxHeight, boxDepth);
const forceTextureInitialization = function() {
const material = new THREE.MeshBasicMaterial();
const geometry = new THREE.PlaneBufferGeometry();
const scene = new THREE.Scene();
scene.add(new THREE.Mesh(geometry, material));
const camera = new THREE.Camera();
return function forceTextureInitialization(texture) {
material.map = texture;
renderer.render(scene, camera);
};
}();
const cubes = []; // just an array we can use to rotate the cubes
{
const gl = renderer.getContext();
const glTex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, glTex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 2, 2, 0,
gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([
255, 0, 0, 255,
0, 255, 0, 255,
0, 0, 255, 255,
255, 255, 0, 255,
]));
gl.generateMipmap(gl.TEXTURE_2D);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
const texture = new THREE.Texture();
forceTextureInitialization(texture);
const texProps = renderer.properties.get(texture);
texProps.__webglTexture = glTex;
const material = new THREE.MeshBasicMaterial({
map: texture,
});
const cube = new THREE.Mesh(geometry, material);
scene.add(cube);
cubes.push(cube); // add to our list of cubes to rotate
}
function render(time) {
time *= 0.001;
cubes.forEach((cube, ndx) => {
const speed = .2 + ndx * .1;
const rot = time * speed;
cube.rotation.x = rot;
cube.rotation.y = rot;
});
renderer.render(scene, camera);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
<canvas id="c"></canvas>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/103/three.min.js"></script>
Note: There is no such thing as "unsupported behavior" in three.js. Three.js makes no guarantee that anything you are doing today will work tomorrow. Three.js breaks whatever it wants to whenever it wants to
This is completely unsupported behaviour, but you could imitate the WebGLRenderer and set the __webglTexture property on a Texture directly. e.g.
var texure = new THREE.Texture();
var rawTexture = gl.createTexture();
texture.__webglTexture = rawTexture;
texture.__webglInit = true;
// ... use texture as a normal three.js texture ...
Again, this is completely unsupported and undefined behaviour and likely to break in future versions of three.js, but will likely work for the time being if you're after speed.
I'd instead suggest looking into WebGLRenderTarget if you can, or submitting a feature request to enable the feature correctly.
Related
I found that it seems to be solved by setting the blending attribute of material, but it still cannot be solved after trying.
Incorrect occlusion
import * as THREE from "three";
import { OrbitControls } from "three/examples/jsm/controls/OrbitControls";
import {GLTFLoader} from "three/examples/jsm/loaders/GLTFLoader.js"
import {
CSS3DRenderer,
CSS3DObject
} from "three/examples/jsm/renderers/CSS3DRenderer.js"
import dat from "dat.gui"
function initThree() {
const scene = new THREE.Scene();
const scene2 = new THREE.Scene();
const camera = new THREE.PerspectiveCamera(
50,
window.innerWidth / window.innerHeight,
0.1,
10000
);
camera.position.set(0, 0, 2500);
scene.add(camera);
const renderer = new THREE.WebGLRenderer({
antialias: true,
alpha: true,
});
renderer.shadowMap.enabled = true;
renderer.setSize(window.innerWidth, window.innerHeight);
document.querySelector('#webgl').appendChild(renderer.domElement);
const labelRenderer = new CSS3DRenderer()
labelRenderer.setSize(window.innerWidth, window.innerHeight);
labelRenderer.domElement.style.position = 'absolute';
labelRenderer.domElement.style.top = 0;
document.body.appendChild(labelRenderer.domElement);
scene.add(new THREE.AxesHelper(1000))
const controls = new OrbitControls(camera, labelRenderer.domElement);
controls.enableDamping = true;
const clock = new THREE.Clock()
window.addEventListener("resize", () => {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setPixelRatio(window.devicePixelRatio);
labelRenderer.setSize(window.innerWidth, window.innerHeight);
});
return {
scene,
scene2,
camera,
renderer,
labelRenderer,
controls,
clock,
}
}
const gltfLoader = new GLTFLoader()
const textureLoader = new THREE.TextureLoader()
const gui = new dat.GUI()
const {
scene,
scene2,
camera,
renderer,
labelRenderer,
controls,
clock
} = initThree();
const ambientLight = new THREE.AmbientLight("#ffffff", 1)
scene.add(ambientLight)
const position = new THREE.Vector3(0, 900, 300);
const rotation = new THREE.Euler(0, 0, 0);
const container = document.createElement('div');
container.style.width = '1000px';
container.style.height = '1000px';
container.style.opacity = '1';
container.style.background = '#1d2e2f';
const iframe = document.createElement('iframe');
iframe.src = "http://csyedu.top"
iframe.style.width = "1000px"
iframe.style.height = "1000px"
iframe.style.padding = 10 + 'px';
iframe.style.boxSizing = 'border-box';
iframe.style.opacity = '1';
container.appendChild(iframe);
const object = new CSS3DObject(container);
// copy monitor position and rotation
object.position.copy(position);
object.rotation.copy(rotation);
// Add to CSS scene
scene2.add(object);
// Create GL plane
const material = new THREE.MeshLambertMaterial();
material.side = THREE.DoubleSide;
material.opacity = 0;
material.transparent = true;
// NoBlending allows the GL plane to occlude the CSS plane
material.blending = THREE.NoBlending;
// Create plane geometry
const geometry = new THREE.PlaneGeometry(1000, 1000);
// Create the GL plane mesh
const mesh = new THREE.Mesh(geometry, material);
// Copy the position, rotation and scale of the CSS plane to the GL plane
mesh.position.copy(object.position);
mesh.rotation.copy(object.rotation);
mesh.scale.copy(object.scale);
// Add to gl scene
scene.add(mesh);
gltfLoader.load("./models/computer_setup.glb", model => {
const texture = textureLoader.load("./models/baked_computer.jpg");
texture.flipY = false;
texture.encoding = THREE.sRGBEncoding;
const material = new THREE.MeshBasicMaterial({
map: texture,
});
model.scene.traverse((child) => {
if (child instanceof THREE.Mesh) {
child.scale.set(900, 900, 900);
child.material.map = texture;
child.material = material;
}
});
scene.add(model.scene)
})
function render() {
const elapsedTime = clock.getElapsedTime();
controls.update();
renderer.render(scene, camera);
labelRenderer.render(scene2, camera)
requestAnimationFrame(render);
}
render();
The effect I want is that the 3D mesh can correctly occlude the CSS3DObject.
I runned over the same problem, I'm trying your code and works good on my project, I think you have a styling problem.
I have separated the WebGL and CSS3DRenderer on the html.
<body>
<div id="css"></div>
<div id="webgl"></div>
</body>
#css,
#webgl
{
width: 100%;
height: 100%;
position: absolute;
top: 0;
left: 0;
}
If I only put the #css on absolute this is the result
If I put both in absolute it works fine.
I wish that'll help
I already tried my best to find a solution to my problem but without any luck, hope for your help with this.
In my three.js file I have a moon that is surrounded by stars.
The moon is a simple sphere geometry and the stars are a particle system that use a texture. However they just look like flat circles flying around.
So far so good, the particles were flying all around the moon, some in front of it and some behind it. Exactly what I want.
Then I added a bloom effect and worked with layers. So the moon (layer 0) has no bloom and the particles (layer 1) has bloom added to it.
However that way all the star particles are flying behind the moon and not around it anymore.
Does anyone know how to fix this?
Thanks a ton in advance!
Preview video gif-file of the issue
Here is the js code:
const canvas = document.querySelector("canvas.webgl");
const scene = new THREE.Scene();
const sizes = {
width: window.innerWidth,
height: window.innerHeight,
};
const camera = new THREE.PerspectiveCamera(
45,
sizes.width / sizes.height,
0.1,
1000
);
camera.position.x = 0;
camera.position.y = 0;
camera.position.z = 5;
camera.layers.enable(1);
scene.add(camera);
const params = {
exposure: 1,
bloomStrength: 2,
bloomThreshold: 0,
bloomRadius: 0,
};
let composer, mixer;
const light = new THREE.AmbientLight(0xffffff, 1);
scene.add(light);
const sgeometry = new THREE.SphereGeometry(15, 32, 16);
const smaterial = new THREE.MeshBasicMaterial({ color: 0xffffff });
const ssphere = new THREE.Mesh(sgeometry, smaterial);
const loader1 = new THREE.TextureLoader();
const circle = loader1.load("a.png");
const particlesGeometry = new THREE.BufferGeometry();
const particlesCnt = 3000;
const posArray = new Float32Array(particlesCnt * 3);
for (i = 0; i < particlesCnt * 3; i++) {
posArray[i] = (Math.random() - 0.5) * (Math.random() * 20);
}
particlesGeometry.setAttribute(
"position",
new THREE.BufferAttribute(posArray, 3)
);
const mat = new THREE.PointsMaterial({
size: 0.05,
map: circle,
transparent: true,
opacity: 1,
alpha: 0.8,
alphaTest: 0.9,
alphaToCoverage: 0.91,
Blending: THREE.AdditiveBlending,
});
const particlesMesh = new THREE.Points(particlesGeometry, mat);
particlesMesh.position.set(0, 0, -1);
particlesMesh.layers.set(1);
scene.add(particlesMesh);
const geometry = new THREE.SphereBufferGeometry(1, 64, 64);
const material = new THREE.MeshPhongMaterial({
map: THREE.ImageUtils.loadTexture("moon.jpg"),
side: THREE.DoubleSide,
shininess: 0,
opacity: 1,
transparent: true,
});
const sphere = new THREE.Mesh(geometry, material);
sphere.rotation.set(0, -2.7, 0);
sphere.layers.set(0);
scene.add(sphere);
const renderer = new THREE.WebGLRenderer({
canvas: canvas,
alpha: true,
antialias: true,
});
renderer.autoClear = false;
renderer.setSize(sizes.width, sizes.height);
renderer.setPixelRatio(Math.min(window.devicePixelRatio, 2));
renderer.toneMapping = THREE.LinearToneMapping;
const renderScene = new THREE.RenderPass(scene, camera);
const effectFXAA = new THREE.ShaderPass(THREE.FXAAShader);
effectFXAA.uniforms.resolution.value.set(
1 / window.innerWidth,
1 / window.innerHeight
);
const bloomPass = new THREE.UnrealBloomPass(
new THREE.Vector2(window.innerWidth, window.innerHeight),
1.5,
0.4,
0.85
);
bloomPass.threshold = params.bloomThreshold;
bloomPass.strength = params.bloomStrength;
bloomPass.radius = params.bloomRadius;
composer = new THREE.EffectComposer(renderer);
composer.addPass(renderScene);
composer.addPass(effectFXAA);
composer.addPass(bloomPass);
renderer.gammaInput = true;
renderer.gammaOutput = true;
renderer.toneMappingExposure = Math.pow(0.9, 4.0);
window.addEventListener("resize", () => {
sizes.width = window.innerWidth;
sizes.height = window.innerHeight;
camera.aspect = sizes.width / sizes.height;
camera.updateProjectionMatrix();
renderer.setSize(sizes.width, sizes.height);
renderer.setPixelRatio(Math.min(window.devicePixelRatio, 2));
});
const clock = new THREE.Clock();
const tick = () => {
window.requestAnimationFrame(tick);
const deltaTime = clock.getDelta();
const elapsedTime = clock.getElapsedTime();
sphere.rotation.y = 0.08 * elapsedTime;
particlesMesh.rotation.y = 0.08 * elapsedTime;
renderer.clear();
camera.layers.set(1);
composer.render();
renderer.clearDepth();
camera.layers.set(0);
renderer.render(scene, camera);
};
tick();
The problem is probably that layer 0 (with the moon) is rendered AFTER layer 1 (with the stars). This makes the renderer paint the moon over the stars.
Try to change the order of the rendering so that layer 0 is first. Keep in mind that layer 1 must render in the composer, and layer 0 with the normal renderer. I think the code will be something like this:
renderer.clear();
camera.layers.set(0);
renderer.render(scene, camera);
renderer.clearDepth();
camera.layers.set(1);
composer.render();
I have an application that includes meshes rendered with MeshPhongMaterials. They work well with the full suite of lights available in three.js.
However, I want to combine them with imported GLB/GLTF models. In order to have the models lit, I believe I must use an environment map such as the following:
function _Environment() {
const env_scene = new THREE.Scene();
const roomMaterial = new THREE.MeshStandardMaterial( { side: THREE.BackSide } );
const room = new THREE.Mesh( new THREE.BoxGeometry(), roomMaterial );
room.position.set( 0, 0, 0 );
room.scale.set( 40, 40, 40 );
env_scene.add( room );
const env_alight = new THREE.AmbientLight(0xFFFFFF, .1);
env_scene.add(env_alight);
return env_scene;
}
function main() {
canvas = document.getElementById('c');
renderer = new THREE.WebGLRenderer({canvas: canvas, antialias: true});
renderer.shadowMap.type = THREE.PCFSoftShadowMap;
renderer.outputEncoding = THREE.sRGBEncoding;
const aspect = 4/3; // the canvas default
camera = new THREE.PerspectiveCamera(fov, aspect, near, far);
camera.position.set(5, 0, 2);
camera.up.set(0,1,0);
camera.lookAt(new THREE.Vector3());
camera.updateProjectionMatrix()
const environment = new _Environment();
const pmremGenerator = new THREE.PMREMGenerator(renderer);
scene = new THREE.Scene();
scene.background = new THREE.Color(DefaultBackgroundColor);
scene.environment = pmremGenerator.fromScene(environment).texture;
dlight = new THREE.DirectionalLight(0xFFFFFF, .7);
dlight.position.set(5, 5, 10);
dlight.target.position.set(0, 0, 0);
scene.add(dlight);
scene.add(dlight.target);
alight = new THREE.AmbientLight(0xFFFFFF, .3);
scene.add(alight);
requestAnimationFrame(render);
}
However, it seems that the environment map causes the Phong materials to show up saturated and I cannot find a good combination of lights that work.
One can always convert everything to PBR, but am I missing something? Can Phong and PBR co-exist in a well lit, natural looking scene?
I'm trying to read depth values from framebuffer/WebGLRenderTarget into an array. I can find information to render depth to depth Texture, but could not read the depth to a buffer. With readpixels i could only get rbga values. Ideally i am trying to get worldposition from depth and ray.
I have tried reading pixels, but dont know how to read depth in threejs.
Off the top of my head it's not possible to read depth values directly.
You can set a DepthTexture to your WebGLRenderTarget's depthTexture property. You can then render all or part of that depth texture to another render target and read the result.
'use strict';
/* global THREE */
const canvas = document.querySelector('#c');
const renderer = new THREE.WebGLRenderer({canvas});
const fov = 75;
const aspect = 2; // the canvas default
const near = 0.01;
const far = 5;
const camera = new THREE.PerspectiveCamera(fov, aspect, near, far);
camera.position.z = 0.7;
const scene = new THREE.Scene();
const geometry = new THREE.BoxBufferGeometry();
const material = new THREE.MeshBasicMaterial({color: 'red'});
const cube = new THREE.Mesh(geometry, material);
scene.add(cube);
cube.position.x = 0.25;
cube.rotation.y = Math.PI * 0.25;
const depthTexture = new THREE.DepthTexture(canvas.width, canvas.height);
const rt = new THREE.WebGLRenderTarget(canvas.width, canvas.height, {
depthTexture,
});
renderer.setRenderTarget(rt);
renderer.render(scene, camera);
const planeScene = new THREE.Scene();
const planeGeo = new THREE.PlaneBufferGeometry(2, 2);
const planeMat = new THREE.MeshBasicMaterial({map: depthTexture});
const plane = new THREE.Mesh(planeGeo, planeMat);
planeScene.add(plane);
const ortho = new THREE.OrthographicCamera(-1, 1, 1, -1, -1, 1)
const planeRT = new THREE.WebGLRenderTarget(canvas.width, canvas.height, {type: THREE.FloatType});
renderer.setRenderTarget(planeRT);
renderer.render(planeScene, ortho);
const depths = new Float32Array(canvas.width * canvas.height * 4);
renderer.readRenderTargetPixels(planeRT, 0, 0, canvas.width, canvas.height, depths);
console.log(depths);
<canvas id="c" width="4" height="4"></canvas>
<script src="https://threejsfundamentals.org/threejs/resources/threejs/r105/three.min.js"></script>
What does the distance setting mean in three.js in relation to physically based lighting?
For non physically based lighting the distance setting is a setting where the light's influence fades out linearly. Effectively
lightAffect = 1 - min(1, distanceFromLight / distance)
I don't know physically based lighting well but it seems to me real lights don't have a distance setting, they just have a power output (lumens) and decay based on the atmosphere density. Three.js has both a power setting and a decay setting although it's not clear at all what decay should be set to as the docs effectively just say to set it to 2.
What should I be setting distance for a physically based PointLight for example if I want physically based lighting?
'use strict';
/* global dat */
function main() {
const canvas = document.querySelector('#c');
const renderer = new THREE.WebGLRenderer({canvas: canvas});
renderer.physicallyCorrectLights = true;
const fov = 45;
const aspect = 2; // the canvas default
const zNear = 0.1;
const zFar = 100;
const camera = new THREE.PerspectiveCamera(fov, aspect, zNear, zFar);
camera.position.set(0, 10, 20);
camera.lookAt(0, 5, 0);
const scene = new THREE.Scene();
scene.background = new THREE.Color('black');
{
const planeSize = 40;
const planeGeo = new THREE.PlaneBufferGeometry(planeSize, planeSize);
const planeMat = new THREE.MeshPhongMaterial({
color: '#A86',
side: THREE.DoubleSide,
});
const mesh = new THREE.Mesh(planeGeo, planeMat);
mesh.rotation.x = Math.PI * -.5;
scene.add(mesh);
} {
const cubeSize = 4;
const cubeGeo = new THREE.BoxBufferGeometry(cubeSize, cubeSize, cubeSize);
const cubeMat = new THREE.MeshPhongMaterial({color: '#8AC'});
const mesh = new THREE.Mesh(cubeGeo, cubeMat);
mesh.position.set(cubeSize + 1, cubeSize / 2, 0);
scene.add(mesh);
}
{
const sphereRadius = 3;
const sphereWidthDivisions = 32;
const sphereHeightDivisions = 16;
const sphereGeo = new THREE.SphereBufferGeometry(sphereRadius, sphereWidthDivisions, sphereHeightDivisions);
const sphereMat = new THREE.MeshPhongMaterial({color: '#CA8'});
const mesh = new THREE.Mesh(sphereGeo, sphereMat);
mesh.position.set(-sphereRadius - 1, sphereRadius + 2, 0);
scene.add(mesh);
}
{
const color = 0xFFFFFF;
const intensity = 1;
const light = new THREE.PointLight(color, intensity);
light.power = 800;
light.distance = 20;
light.position.set(0, 10, 5);
scene.add(light);
light.decay = 2;
const helper = new THREE.PointLightHelper(light);
scene.add(helper);
const onChange = () => {
helper.update();
render();
};
setTimeout(onChange);
window.onresize = onChange;
const gui = new dat.GUI();
gui.add(light, 'distance', 0, 100).onChange(onChange);
gui.add(light, 'decay', 0, 4).onChange(onChange);
gui.add(light, 'power', 0, 3000).onChange(onChange);
}
function resizeRendererToDisplaySize(renderer) {
const canvas = renderer.domElement;
const width = canvas.clientWidth;
const height = canvas.clientHeight;
const needResize = canvas.width !== width || canvas.height !== height;
if (needResize) {
renderer.setSize(width, height, false);
}
return needResize;
}
function render() {
if (resizeRendererToDisplaySize(renderer)) {
const canvas = renderer.domElement;
camera.aspect = canvas.clientWidth / canvas.clientHeight;
camera.updateProjectionMatrix();
}
renderer.render(scene, camera);
}
}
main();
html, body {
margin: 0;
height: 100%;
}
#c {
width: 100%;
height: 100%;
display: block;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/96/three.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/dat-gui/0.7.2/dat.gui.min.js"></script>
<canvas id="c"></canvas>
Reading through three.js source and the paper it's linked to, at least as of r95 the distance setting should basically be Infinity for physically based lights.
In the paper they point out physically based lights shine to infinity but of course in a 3D engine that's no good. Most 3D engines need to compute the minimum number of lights per object drawn so a lightDistance setting was added, if the light is further way than lightDistance they can ignore the light. The problem is there will be sharp edge if they just stop using the light past lightDistance so they hacked in a falloff.
three.js copied that lightDistance and falloff setting from the paper but three.js does not cull lights from calculations when lights are far away so there seems to be no reason not to set distance to infinity AFAICT, at least as of r95.