Manually specifying camera matrices in ThreeJS - three.js

I'm working on a project where I will draw 3D graphics on a video that is filmed with a real camera. I get provided a static projection and view matrix and my task is to draw the actual graphics on top. I've got it working in pure WebGL and know I'm trying to do it in ThreeJS. The problem is that I can't find a way to manually set the projection and view matrix for the camera in ThreeJS.
I've tried to set camera.matrixAutoUpdate = false and calling camera.projectionMatrix.set(matrix) and camera.matrixWorldInverse.set(matrix), but it doesn't seam to work. It is like the actual variable isn't passed to the shader.
Does anyone know how this can be done?
This is what I've got so far:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(
75,
window.innerWidth / window.innerHeight,
0.1,
1000
);
camera.matrixAutoUpdate = false;
camera.projectionMatrix.set(...)
camera.matrixWorldInverse.set(...)
var renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
var geometry = new THREE.BoxGeometry();
var material = new THREE.MeshBasicMaterial({ color: 0x00ff00 });
var cube = new THREE.Mesh(geometry, material);
scene.add(cube);
var animate = function() {
requestAnimationFrame(animate);
renderer.render(scene, camera);
};
animate();

Related

Rendering to texture within a-frame

I'm trying to implement something very similar to this...
https://stemkoski.github.io/Three.js/Camera-Texture.html
... as an a-frame component. However, I'm getting merely a blank white on the quad which is supposed to be "monitoring" this extra camera. It changes to black when entering VR mode.
Here is my code:
AFRAME.registerComponent('viewfinder', {
schema:{
//
},
init:function(){
this.renderer = new THREE.WebGLRenderer({antialias:true});
this.renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild(this.renderer.domElement);
this.dronecamera = new THREE.PerspectiveCamera( 70, window.innerWidth / window.innerHeight, 1, 1000 );
this.dronecamera.position.z=5;
this.monitorbuffercam = new THREE.OrthographicCamera(window.innerWidth/-2, window.innerWidth/2, window.innerHeight/2, window.innerHeight/-2, -10000, 10000);
this.monitorbuffercam.position.z=1;
this.buffertexture = new THREE.WebGLRenderTarget( window.innerWidth, window.innerHeight, { format: THREE.RGBFormat });
this.monitortexture = new THREE.WebGLRenderTarget( window.innerWidth, window.innerHeight, { format: THREE.RGBFormat });
this.bufferscene = new THREE.Scene();
var ambientlight = new THREE.AmbientLight(0xffffff);
this.bufferscene.add(ambientlight);
var bufferplaneG = new THREE.PlaneGeometry( window.innerWidth, window.innerHeight);
var bufferplaneM = new THREE.MeshBasicMaterial({map:this.buffertexture});
this.bufferplane = new THREE.Mesh(bufferplaneG, bufferplaneM);
this.bufferscene.add(this.bufferplane);
this.bufferscene.add(this.monitorbuffercam);
this.el.sceneEl.object3D.add(this.dronecamera);
this.monitorplaneG = new THREE.PlaneGeometry(10, 10);
this.monitorplaneM = new THREE.MeshBasicMaterial({map:this.monitortexture});
this.monitor = new THREE.Mesh(this.monitorplaneG, this.monitorplaneM);
this.el.setObject3D('monitor', this.monitor);
},
tick:function(){
var r = this.renderer;
var s = this.el.sceneEl.object3D;
var bs = this.bufferscene;
var dc = this.dronecamera;
var bt = this.buffertexturee;
var mbc = this.monitorbuffercam;
var mt = this.monitortexture;
requestAnimationFrame(draw);
function draw(){
r.render(s, dc, bt, true);
r.render(bs, mbc, mt, true);
}
}
});
Any help is greatly appreciated. I'm attempting to follow the model from the three.js example, rendering from a camera in the main scene to a quad texture map in a separate off-screen scene, then pointing an orthographic camera at it, and rendering that camera's view to a texture map back in the main scene. I have a hunch that there is merely some boilerplate code I'm forgetting or I'm doing wrong.
Thanks in advance!
I'm not sure if this what you're looking for, but maybe this answer to AFrame: How to render a camera to a texture could be of some help. In short, it shows how to use a component that creates a renderer for a secondary camera, that can be referenced as material for an object.

Three.js not rendering cube with custom texture

I'm trying to render a three.js box that has custom textures which I'm loading using THREE.TextureLoader().
While I see THREE.WebGLRenderer 87 in my console, there doesn't seem to be any cube being rendered.
I've created a fiddle to show how I'm attempting to do it: http://jsfiddle.net/hfj7gm6t/4786/
Note that I'm just using one url for all 6 side's textures for simplicity's sake.
Can anybody please help me understand why my beloved cube isn't showing?
First you need a render loop, you cannot just call renderer.render once. Then you need to position your camera correctly and make sure it is actually looking at your cube:
let textureUrls = [
'https://i.imgur.com/wLNDvZV.png', 'https://i.imgur.com/wLNDvZV.png', 'https://i.imgur.com/wLNDvZV.png',
'https://i.imgur.com/wLNDvZV.png', 'https://i.imgur.com/wLNDvZV.png', 'https://i.imgur.com/wLNDvZV.png'
];
let renderer = null
let camera = null
let scene = null
function renderMap(urls) {
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(
60, window.innerWidth / window.innerHeight, 1, 100000 );
camera.position.x = 500;
camera.position.y = 500;
camera.position.z = -500;
camera.lookAt(new THREE.Vector3(0, 0, 0))
let textureLoader = new THREE.TextureLoader();
let materials =
urls.map((url) => {
return textureLoader.load(url);
}).map((texture) => {
return new THREE.MeshBasicMaterial({map: texture})
});
let mesh = new THREE.Mesh(new THREE.BoxGeometry(200, 200, 200), materials);
scene.add(mesh);
}
function init() {
renderMap(textureUrls);
renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
render();
}
function render() {
requestAnimationFrame(render)
renderer.render(scene, camera)
}
init()

Three.js mesh doesn't appear with the build version of the library but appears when using old ones library?

I want to use the build version of three.js (https://threejs.org/build/three.js) for a project.
I was using an old one version of the library and everything was fine and my mesh (loaded form a .json file) appeared normally.
Here's my code :
var renderer, scene, camera, mesh, material;
initMesh();
function initMesh() {
var controls;
camera = new THREE.PerspectiveCamera( 50, window.innerWidth / window.innerHeight, 1, 10000 );
camera.position.set(0,500,700);
camera.up = new THREE.Vector3(0,0,0);
camera.lookAt(new THREE.Vector3(0,0,0));
scene = new THREE.Scene();
scene.add(camera);
renderer = new THREE.WebGLRenderer();
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
var loader = new THREE.JSONLoader();
loader.load('test.json', function(geometry) {
material = new THREE.MeshBasicMaterial({ color: 0x00ff84, wireframe: true, wireframeLinewidth: 3 });
mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
render();
});
}
var render = function () {
requestAnimationFrame( render );
renderer.render(scene, camera);
};
Here is what I got when using old one library.
I don't see what I miss ? Any idea ? Thank you.
I tried with your example
Removing this line fixed the issue: camera.up = new THREE.Vector3(0,0,0);

Why doesn't my directional light affect the whole scene?

Please see this fiddle - http://jsfiddle.net/vnr2v6mL/
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000);
var renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
var geometry = new THREE.BoxGeometry(100, 100, 1);
var material = new THREE.MeshPhongMaterial({ color: 0x00ff00 });
var plane = new THREE.Mesh(geometry, material);
scene.add(plane);
camera.position.z = 5;
//scene.add(new THREE.AmbientLight(0xdddddd));
var dl = new THREE.DirectionalLight(0x0000ff, 1.0);
dl.position.set(0, 0, 10);
scene.add(dl);
scene.add( new THREE.DirectionalLightHelper(dl, 2.5) );
function render() {
requestAnimationFrame( render );
renderer.render( scene, camera );
}
render();
$(document).ready(function() {
$(document).bind("mousewheel DOMMouseScroll", function(e) {
var delta = e.type === 'DOMMouseScroll' ? -e.originalEvent.detail : e.originalEvent.wheelDelta;
camera.position.z += delta / 120;
});
});
It is my understanding that the lighting should be infinite and parallel, so why does it only light up a circle in the middle of my scene?
Thanks in advance.
You are using MeshPhongMaterial. What you are seeing is the specular highlight only.
Your light is blue. However, your material is green, so it reflects only green light. Therefore, there is no diffuse light reflected from the material.
The material specular reflectance is, by default, 0x111111. So all colors are reflected specularly. Since your light is blue, you get a blue light reflected specularly. In other words, a blue "hot spot".
Consider using a white light, 0xffffff, and adjust the light intensity.
Fiddle: http://jsfiddle.net/vnr2v6mL/1/
three.js r.70

Identifying overlap region of two geometries in a canvas using THREE.js

I need to identify the overlapping region of two geometries in a canvas and show the overlapping region with different color/texture.
sample code: http://jsfiddle.net/v4B3d/1/
var camera, scene, renderer, geometry, material, mesh,mesh2;
init();
animate();
function init() {
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(50, window.innerWidth / window.innerHeight, 1, 10000);
camera.position.z = 500;
scene.add(camera);
geometry = new THREE.CubeGeometry(100, 100, 100);
material = new THREE.MeshNormalMaterial();
mesh = new THREE.Mesh(geometry, material);
mesh2 = new THREE.Mesh(geometry, material);
scene.add(mesh);
scene.add(mesh2);
mesh.position.y = -30;
mesh2.position.y = 40;
renderer = new THREE.CanvasRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
}
function animate() {
requestAnimationFrame(animate);
render();
}
function render() {
mesh.rotation.x += 0.01;
mesh.rotation.y += 0.02;
mesh2.rotation.x += 0.01;
mesh2.rotation.y += 0.02;
renderer.render(scene, camera);
}
Please let me know how to achieve this.
Thanks in advance.
You probably want to make a third geometry from the overlapping region. You can then render that however you want.
This can be achieved using Constructive Solid Geometry (CSG) boolean operations (namely, intersection). If you only want to render the overlapping part, you can render only the intersection result.
There is a Three.js wrapper for CSG.js library, that should make it easy. See http://learningthreejs.com/blog/2011/12/10/constructive-solid-geometry-with-csg-js/ , http://www.chandlerprall.com/2011/12/constructive-solid-geometry-with-three-js/ and http://en.wikipedia.org/wiki/Constructive_solid_geometry for more information.

Resources