How do you connect a geometry to two moving vertices - three.js

I have created some box geometries in my threejs app and I have successfully drawn a cylinder from the center of one to the center of another using the code below:
function cylinderMesh(pointX, pointY, material) {
var direction = new THREE.Vector3().subVectors(pointY, pointX);
var orientation = new THREE.Matrix4();
orientation.lookAt(pointX, pointY, new THREE.Object3D().up);
orientation.multiply(new THREE.Matrix4(1, 0, 0, 0,
0, 0, 1, 0,
0, -1, 0, 0,
0, 0, 0, 1));
var edgeGeometry = new THREE.CylinderGeometry(2, 2, direction.length(), 8, 1);
var edge = new THREE.Mesh(edgeGeometry, material);
edge.applyMatrix(orientation);
edge.position.x = (pointY.x + pointX.x) / 2;
edge.position.y = (pointY.y + pointX.y) / 2;
edge.position.z = (pointY.z + pointX.z) / 2;
return edge;
}
scene.add(cylinderMesh(vertex1, vertex2, globalMaterial));
My question is: How to I keep the cylinder "connected" to the two vertices I provide if they move?
I don't want to use a THREE.Line because I can't control the width of the line and I have noticed weird issues with clipping if the camera gets too close.
Any ideas?

Related

ThreeJS: Getting world coordinates from camera view

I want to animate a Plane vertices to fill the screen. (Vertices as this is the effect I want, I'm hoping to animate each vertex with a short delay to then fill the screen)
As a proof of concept, I've got a vertex to animate off to a random point, using the function below -
tileClick() {
var geo = this.SELECTED.geometry;
var mat = this.SELECTED.material as THREE.MeshBasicMaterial;
TweenMax.TweenLite.to(geo.vertices[0], 0.3, {x: -5, y:5, onUpdate: () =>{
mat.needsUpdate = true;
geo.colorsNeedUpdate = true;
geo.elementsNeedUpdate = true;
}, ease: TweenMax.Elastic.easeOut.config(1, 0.5)});
}
However, now I need to work out the points of the current view of the camera. pseudo code: camera.view.getBoundingClientRect();
Plnkr of WIP - https://next.plnkr.co/edit/Jm4D2zgLtiKBGghC
I believe what you need is THREE.Vector3.unproject. With this method, you can set the vector to x, y, z in screen coordinates, and it'll return x, y, z in world coordinates:
var vector = new THREE.Vector3();
var zNearPlane = -1;
var zFarPlane = 1;
// Top left corner
vector.set( -1, 1, zNearPlane ).unproject( camera );
// Top right corner
vector.set( 1, 1, zNearPlane ).unproject( camera );
// Bottom left corner
vector.set( -1, -1, zNearPlane ).unproject( camera );
// Bottom right corner
vector.set( 1, -1, zNearPlane ).unproject( camera );
Notice that all inputs are in the [-1, 1] range:
x:-1 = left side of screen
x: 1 = right side of screen
y: 1 = top
y:-1 = bottom
z: 1 = far plane
z: -1 = near plane

How to properly rotate the raycaster according to the object position and rotation in three.js

I have 8 raycasters from the Object3D to different direction for the collision detection. I want to rotate the direction they were pointing according to the object rotation. I have followed the solution from here
The raycaster start rotating but in a weird way. It started detecting collision from all rays at different direction.
In HTML I have
cube = new THREE.BoxGeometry(1,1,1);
cube_material = new THREE.MeshBasicMaterial({color: 0x00a000});
cube_mesh = new THREE.Mesh(cube, cube_material);
var rays = [
new THREE.Vector3(0, 1, 0), // forward
new THREE.Vector3(0, -1, 0), // backward
new THREE.Vector3(-1, 0, 0), // left
new THREE.Vector3(1, 0, 0), // right
new THREE.Vector3(1, 1, 0), // forward right
new THREE.Vector3(-1, 1, 0), // forward left
new THREE.Vector3(1, -1, 0), // backward right
new THREE.Vector3(-1, -1, 0) // backward left
];
Here is my detectCollision function which is called from animate() function:
detectCollision = function( ){
var hit = false;
var dist = 0.8;
var origin = new THREE.Vector3(cube_mesh.position.x, cube_mesh.position.y, cube_mesh.position.z);
var rayHits = [];
for ( var i = 0; i < rays.length; i++){
var matrix = new THREE.Matrix4();
matrix.extractRotation(cube_mesh.matrix);
var dir = rays[i];
dir = dir.applyMatrix4( matrix );
raycaster = new THREE.Raycaster(origin, dir, 0.6, dist);
var intersections = raycaster.intersectObjects(collidable_walls);
if (intersections.length > 0){
// 0 = forward
// 1 = backward
// 2 = left
// 3 = right
// 4 = forward right
// 5 = forward left
// 6 = backward right
// 7 = backward left
switch (i) {
case 0:
console.log("forward: " + i);
cube_mesh.translateY(-0.12);
break;
case 1:
console.log("backward: " + i);
cube_mesh.translateY(0.12);
break;
case 2:
cube_mesh.translateX(0.12);
console.log("left: " + i);
break;
case 3:
cube_mesh.translateX(-0.12);
console.log("right: " + i);
break;
case 4:
console.log("forward right: " + i);
break;
case 5:
console.log("forward left: " + i);
break;
case 6:
console.log("backward right:" + i);
break;
case 7:
console.log("backward left: " + i);
break;
}
}
}
}
Can anyone give me some idea on how to rotate the raycaster properly as ArrowHelper(not working in the image but works pretty well with rotation.z)
Three.js using a right-hand coordinate system, the rays should be:
var rays = [
new THREE.Vector3(0, 0, -1), // forward
new THREE.Vector3(0, 0, 1), // backward
new THREE.Vector3(-1, 0, 0), // left
new THREE.Vector3(1, 0, 0), // right
new THREE.Vector3(1, 0, -1), // forward right
new THREE.Vector3(-1, 0, -1), // forward left
new THREE.Vector3(1, 0, 1), // backward right
new THREE.Vector3(-1, 0, 1) // backward left
];
In your function detectCollision, you should changevar dir = rays[i]; to var dir = new THREE.Vector3().copy(rays[i]), or when you apply a matrix to this dir the rays[i] will change also.
You'd better make sure the cube_mesh.position is the cube's world position. I recommend you to use cube_mesh.getWorldPosition() and cube_mesh.matrixWorld.

detect collision for child mesh for three.js

I have an Object3D (position 10, 0, 30) with a child mesh (local position 0, 0, 0) constructed with BoxGeometry (w: 20, h: 20, d: 20).
Now if a ray is casted with the origin (-10, 0, 0) and direction (1, 0, 0) and checked for intersection, it detected intersection (incorrectly as the object is not in the path).
Consider this code:
const THREE = require('three');
let obj = new THREE.Object3D();
let boxGeo = new THREE.BoxGeometry(20, 20, 20);
let mat = new THREE.MeshPhongMaterial();
let mesh = new THREE.Mesh(boxGeo, mat);
obj.add(mesh);
obj.position.set(10, 0, 30);
let raycaster = new THREE.Raycaster(new THREE.Vector3(-10, 0, 0), new THREE.Vector3(1, 0, 0));
let intersects = raycaster.intersectObject(obj, true);
The intersects array is of length 2 whereas, it should be on length 0.
In order for Raycaster to correctly determine child objects recursively, I had to call updateMatrixWorld() on the parent object before checking intersectionObject.

WebGL LookAt works on -z but broke on +z?

I'm trying to build my own camera class in WebGL for learning purposes. It's almost complete but I have 1 slight issue with the lookAt function. If I translate my object in the -z direction then it will work fine but once I move it in the +z direction it acts as if it's doing the opposite and the object will begin to stretch across the screen if I move the camera up or down.
This is what I currently have:
var zAxis = new Vector(cameraPosition.x, cameraPosition.y, cameraPosition.z)
.subtract(target)
.normalize();
var xCross = math.cross(up.flatten(3), zAxis.flatten(3));
var xAxis = new Vector(xCross[0], xCross[1], xCross[2]).normalize();
var yCross = math.cross(zAxis.flatten(3), xAxis.flatten(3));
var yAxis = new Vector(yCross[0], yCross[1], yCross[2]).normalize();
var orientation = [
xAxis.x, xAxis.y, xAxis.z, 0,
yAxis.x, yAxis.y, yAxis.z, 0,
zAxis.x, zAxis.y, zAxis.z, 0,
0, 0, 0, 1
];
var translation = [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
cameraPosition.x, cameraPosition.y, cameraPosition.z, 1
];
this.multiply(translation);
this.multiply(orientation);
Does anyone know what I've done wrong?

Three.js - Set rotation of object from normalized directions of the X, Y and Z-Axis

From a model file object definition the placement a new object is given by a location (a point in space) and the normalized directions of the X-Axis, the Y-Axis and the Z-Axis.
How can i translate this to a THREE.Euler so i can rotate my object correctly in space.
So the axes are off type THREE.Vector3.
If the new object is aligned with the world the values would be:
xAxis = new THREE.Vector3( 1, 0, 0 );
yAxis = new THREE.Vector3( 0, 1, 0 );
zAxis = new THREE.Vector3( 0, 0, 1 );
But if for example the whole local UCS of the object is rotated 180 degrees ( or Math.PI ) around the zAxis they would look like this:
xAxis = new THREE.Vector3( -1, 0, 0 );
yAxis = new THREE.Vector3( 0, -1, 0 );
zAxis = new THREE.Vector3( 0, 0, 1 );
So I need to do something like this:
var object3D = new THREE.Object3D();
object3D.position = location;
var euler = new THREE.Euler();
euler.setFromNormalizedAxes( xAxis, yAxis, zAxis );
object3D.rotation = euler;
Or create a rotation matrix from those axes:
var rotationMatrix = new THREE.Matrix4();
rotationMatrix.setFromNormalizedAxes( xAxis, yAxis, zAxis );
object3D.rotation.setFromRotationMatrix( rotationMatrix, "XYZ" );
I am not so good yet with these rotation matrices and euler rotations...
In the example you gave, the answer would be
object.quaternion.setFromRotationMatrix(
new THREE.Matrix4( -1, 0, 0, 0,
0, -1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1 )
);
The columns of the upper 3x3 of the Matrix4 are the new x-, y-, and z-axes.
You could set object.rotation instead. It does not matter.
three.js r.66

Resources