I have the following structure in my scene (imported from glb):
I try to move the mesh svgGroupsvg_test to the center of mesh named M_Top (red cross is the expected location).
Here is my code:
function engraveSVG(object, value) {
//object is the name of the target mesh
var svgMeshName = 'svgGroup' + value
loadSVGandFit(svgMeshName, object, value).then(res => {
var svgMesh = scene.getObjectByName(svgMeshName);
svgMesh.scale.set(0.1, 0.1, 1)
const axesHelper = new THREE.AxesHelper( 20 );
svgMesh.parent.add(axesHelper)
moveCenterMeshToOtherMeshCenter(svgMesh, scene.getObjectByName(object))
})
}
I tried the following functions:
function moveCenterMeshToOtherMeshCenter(centerMesh, otherMesh) {
// get the center positions of both meshes in the local world
const centerMeshPosition = new THREE.Vector3();
const otherMeshPosition = new THREE.Vector3();
centerMesh.getWorldPosition(centerMeshPosition);
otherMesh.getWorldPosition(otherMeshPosition);
// calculate the difference between the center positions of both meshes
const difference = otherMeshPosition.sub(centerMeshPosition);
// translate the center mesh by the difference
centerMesh.translateX(difference.x);
centerMesh.translateY(difference.y);
centerMesh.translateZ(difference.z);
}
function moveCenterToOther(centerMesh, otherMesh) {
const centerBox = new THREE.Box3().setFromObject(centerMesh);
const otherBox = new THREE.Box3().setFromObject(otherMesh);
const centerPosition = centerBox.getCenter(new THREE.Vector3());
const otherPosition = otherBox.getCenter(new THREE.Vector3());
const offset = new THREE.Vector3().subVectors(otherPosition, centerPosition);
centerMesh.position.add(offset);
}
Is there something wrong ? Get center return a value around 0.
I chose to add the svg mesh in the same group of my target mesh. But it changes nothing.
The axes is also in the local coordinate.
Any help would be very appreciate.
I found a solution. I add my SVG in the root of the scene. I am now able to get the correct bounding box size and center in the world coordinates.
Related
I trying to get position of hole in extruded geometry. I created plane and made hole in her geometry. I want get x,y,z coordinates in center of hole. Is there some methods to get it?
Here demo: https://codepen.io/DYDOI-NSK/pen/XWqJzXG?editors=0011
Here code:
I created shape of plane
let shape = new THREE.Shape();
let width = 30;
let height = 30;
shape.moveTo(-width, height);
shape.lineTo(-width, -height);
shape.lineTo(width, -height);
shape.lineTo(width, height);
shape.lineTo(-width, height);
I created hole path and add it to shape
let hole = new THREE.Path();
hole.absarc(20, 10, 10, 0, Math.PI * 2, false) //first two argumets is x,y coord of hole
shape.holes.push(hole)
I created plane add add extruded geometry
let geometry = new THREE.PlaneGeometry( 30, 30);
let material = new THREE.MeshBasicMaterial( {color: new THREE.Color('#cea6a6'), side: THREE.DoubleSide} );
let mesh = new THREE.Mesh( geometry, material );
let newGeometry = new THREE.ExtrudeGeometry(shape, settings);
mesh.geometry.dispose()
mesh.geometry = newGeometry;
After 4 days I understand how can do it. I simple created line from mesh center to hole config position. Applied quaternion to line and got x,y,z cords of hole.
Maybe there are more optimized solutions, but it only that I could create. I will be glad if someone share more optimized solution :D
Here codepen: https://codepen.io/DYDOI-NSK/pen/XWqJzXG?editors=0011
Here code:
/*
* findCoords - function to find hole coords in 3d space
* data - parameter that require x,y of hole
*/
let findCoords = function (data) {
let vertexes = [];
//Set coords where you was created hole
let hole = new THREE.Vector3(
data.x,
data.y,
0
);
vertexes.push( new THREE.Vector3() );
vertexes.push(hole)
//Create line
const material = new THREE.LineBasicMaterial({
color: 0x0000ff
});
const geometry = new THREE.BufferGeometry().setFromPoints( vertexes );
const line = new THREE.Line( geometry, material );
scene.add(line)
//Set line to center of mesh
line.position.copy(mesh.position)
//Rotate line like rotated mesh
line.applyQuaternion(mesh.quaternion)
//Extract hole coords from second vertex of line
let holeCoord = new THREE.Vector3()
const positionAttribute = line.geometry.getAttribute( 'position' );
holeCoord.fromBufferAttribute( positionAttribute, 1);
return holeCoord;
}
I want my child meshes to be draggable only in x & y direction with respect to the camera. I am using free camera of Babylon. Tried using the "Left Right Dragger" and "DragPlaneNormal" but could not achieve the desired result, the mesh is draggable in z direction.
const axis = new BABYLON.Vector3(0,1,0);
const leftRightDragger = new BABYLON.PointerDragBehavior({dragAxis: axis});
leftRightDragger.moveAttached = true;
clone_child[child].addBehavior(leftRightDragger);
or
var pointerDragBehavior = new BABYLON.PointerDragBehavior({dragPlaneNormal: new BABYLON.Vector3(1,1,0)});
clone_child[child].addBehavior(pointerDragBehavior);
pointerDragBehavior.useObjectOrientationForDragging = false;
use babylonjs life cycle method.
const mesh // the mesh now dragging
const zPos = mesh.position.z
scene.registerBeforeRender(() => {
mesh.position.z = zPos
})
I want to make a 3D building using Three.js. For example, I made 6 walls and a floor by checkerboard texture. I used clippingPlanes for wall1 and wall4:
floor1.material.clippingPlanes = [plane1,plane4];
I made my planes(plane1 and plane4) by my walls(wall1 and wall4). For example, my wall4 planeGeometry and plane4 code is here:
var wallGeometry4 = new THREE.PlaneGeometry(40, Floor_Height, 1, 1);
var wall4 = createMesh(wallGeometry4, "brick_diffuse.jpg", THREE.DoubleSide, 1024, 1024);
unit1.add(wall4);
wall4.position.x = -10;
wall4.position.y = 0;
wall4.position.z = -20;
wall4.rotation.y = 1.5 * Math.PI;
wall4.add(new THREE.EdgesHelper(wall4, 0x000000));
var plane4 = new THREE.Plane();
var normal4 = new THREE.Vector3();
var point4 = new THREE.Vector3();
normal4.set(0, 0, -1).applyQuaternion(wall4.quaternion);
point4.copy(wall4.position);
plane4.setFromNormalAndCoplanarPoint(normal4, point4);
But I see an empty area between wall5 and wall6, because plane4(that used for clipping the floor) isn't the same size of wall4. I think Plane4 is whole of the scene. How to change size of my plane to clip correctly? Or Is there any way to make floor bounded in walls?
One way to achieve this as suggested is to use ShapeGeometry.
When you are creating your walls you can save the x and z co-ordinate of their starting and ending points in an array to form a loop of points of Vector2. Then you can create a new custom shape from these points using shapeGeometry.
points = [{x:0,y:0},{x:0,y:10},{x:10,y:10},{x:10,y:0},{x:0,y:0}]
function getShapeFromPoints(points){
const shape = new THREE.Shape();
shape.moveTo(points[0].x, points[0].y);
for (let i = 1; i < points.length; i++) {
shape.lineTo(points[i].x, points[i].y);
}
return shape;
}
function createPlaneFromPoints(points) {
const planeMaterial = getPlaneMaterial();
const shape = getShapeFromPoints(points);
const geometry = new THREE.ShapeBufferGeometry(shape);
geometry.rotateX(degreeToRadians(-90));
const mesh = new THREE.Mesh(geometry, planeMaterial);
return mesh;
}
Hope that helps you!
I've been learning how to integrate ThreeJS with Mapbox, using this example. It struck me as weird that the approach is to leave the loaded model in its own coordinate system, and transform the camera location on render. So I attempted to rewrite the code, so that the GLTF model is transformed when loaded, then the ThreeJS camera is just synchronised with the Mapbox camera, with no further modifications.
The code now looks like this:
function newScene() {
const scene = new THREE.Scene();
// create two three.js lights to illuminate the model
const directionalLight = new THREE.DirectionalLight(0xffffff);
directionalLight.position.set(0, -70, 100).normalize();
scene.add(directionalLight);
const directionalLight2 = new THREE.DirectionalLight(0xffffff);
directionalLight2.position.set(0, 70, 100).normalize();
scene.add(directionalLight2);
return scene;
}
function newRenderer(map, gl) {
// use the Mapbox GL JS map canvas for three.js
const renderer = new THREE.WebGLRenderer({
canvas: map.getCanvas(),
context: gl,
antialias: true
});
renderer.autoClear = false;
return renderer;
}
// create a custom layer for a 3D model per the CustomLayerInterface
export function addModel(modelPath, origin, altitude = 0, orientation = [Math.PI / 2, 0, 0]) {
const coords = mapboxgl.MercatorCoordinate.fromLngLat(origin, altitude);
// transformation parameters to position, rotate and scale the 3D model onto the map
const modelTransform = {
translateX: coords.x,
translateY: coords.y,
translateZ: coords.z,
rotateX: orientation[0],
rotateY: orientation[1],
rotateZ: orientation[2],
/* Since our 3D model is in real world meters, a scale transform needs to be
* applied since the CustomLayerInterface expects units in MercatorCoordinates.
*/
scale: coords.meterInMercatorCoordinateUnits()
};
const scaleVector = new THREE.Vector3(modelTransform.scale, -modelTransform.scale, modelTransform.scale)
return {
id: "3d-model",
type: "custom",
renderingMode: "3d",
onAdd: function(map, gl) {
this.map = map;
this.camera = new THREE.Camera();
this.scene = newScene();
this.renderer = newRenderer(map, gl);
// use the three.js GLTF loader to add the 3D model to the three.js scene
new THREE.GLTFLoader()
.load(modelPath, gltf => {
gltf.scene.position.fromArray([coords.x, coords.y, coords.z]);
gltf.scene.setRotationFromEuler(new THREE.Euler().fromArray(orientation));
gltf.scene.scale.copy(scaleVector);
this.scene.add(gltf.scene);
const bbox = new THREE.Box3().setFromObject(gltf.scene);
console.log(bbox);
this.scene.add(new THREE.Box3Helper(bbox, 'blue'));
});
},
render: function(gl, matrix) {
this.camera.projectionMatrix = new THREE.Matrix4().fromArray(matrix);
this.renderer.state.reset();
this.renderer.render(this.scene, this.camera);
// this.map.triggerRepaint();
}
}
}
It basically works, in that a model is loaded and drawn in the right location in the Mapbox world. However, instead of looking like this:
It now looks like this, a mangled mess that jitters around chaotically as the camera moves:
I'm not yet familiar enough with ThreeJS to have any idea what I did wrong.
Here's a side-by-side comparison of the old, functional code on the right, vs the new broken code on the left.
Further investigation
I suspect possibly the cause is to do with shrinking all the coordinates down to within the [0..1] range of the projected coordinate system, and losing mathematical precision, perhaps. When I scale the model up by 100 times, it renders like this - messy and glitchy, but at least recognisable as something.
I am trying to move an Object3D group along a specific ellipse. I got it to move in a circle successfully using:
group.position.x = Math.cos(toRadians(loc+=5)) * radius;
group.position.z = Math.sin(toRadians(loc+=5)) * radius;
However, I want it to follow a specific 3 dimensional ellipse. I have the ellipse made successfully and visualized here:
curve = new THREE.EllipseCurve(0,0,80,60,0,toRad(360),false);
path = new THREE.Path( curve.getPoints( 2000 ) );
var geo = path.createPointsGeometry( 50 );
ellipse = new THREE.Line(
geo,
new THREE.LineBasicMaterial( { color : 0xff0000 } )
);
DISPLAY.scene.add(ellipse);
ellipse.rotation.x=toRadians(94);
ellipse.rotation.y=toRadians(12);
And I try to animate it like this (outside my 'render' function):
var pathAnim = new PathAnimation(group, path, 0.4);
Or even like this: (within the render function)
group.position=path.getPoint(t+=0.01); //i've tried getPoints, getPointAt, etc..
It doesn't seem to work. Any ideas as to how I can get this working? Thanks