ThreeJs / Mesh coordinates always 0 (glTF object) - three.js

i have this object and i have a problem...
Mesh coordinates are always 0 and i don't know how to fix it
I want to add multiple sprites to this one, but i can't with these coordinates (research from here)
THREE.DRACOLoader.setDecoderPath('Scripts/libs/draco/gltf/');
THREE.DRACOLoader.setDecoderConfig({ type: 'js' });
let loader = new THREE.GLTFLoader();
loader.setDRACOLoader(new THREE.DRACOLoader());
loader.load('/3D_Cars/Dacia_1410_Tuned/components/scene.gltf', function (gltf) {
carModel = gltf.scene.children[0];
gltf.scene.traverse(function (child) {
if (child.isMesh) {
console.log(child.name," ", child.matrixWorld.getPosition().x + ',' + child.matrixWorld.getPosition().y + ',' + child.matrixWorld.getPosition().z);
var v = new THREE.Vector3(0, 0, 0);
domEvents.addEventListener(carModel.getObjectByName(child.name), 'click', function (event) {
app.RenameAsCarParts(child.name)
console.log(child.name)
}, false)
//if (child.name == 'rim_RSL1_Aluminium_0') {
// child.position.set(100, 50, 3); //testing
// var spritey = makeTextSpriteNew(" Hello ",
// { fontsize: 15, textColor: { r: 0, g: 0, b: 0, a: 1.0 } });
// scene.add(spritey);
//}
}
});
carModel.getObjectByName('Body_paint_0').material = bodyMaterial;
carModel.rotation.set(Math.PI / 2, 0, 0);
carModel.rotateY(Math.PI);
scene.add(carModel);
});
Console screenshot:
Thanks!

Related

Collada Kinematics Tweening to desired position with three.js and tween.js

The bounty expires in 6 days. Answers to this question are eligible for a +50 reputation bounty.
Bilal wants to draw more attention to this question.
I want to replicate the existent example of loading Collada kinematics and using them with my own model, to do so I have created a class as follows:
iimport { Object3D, MathUtils } from "three";
import { ColladaLoader } from "three/addons/loaders/ColladaLoader.js";
import yumi_path from "../assets/dae/yumi.dae";
import { Tween, Easing, update } from "#tweenjs/tween.js";
export class YuMiMotion {
constructor(scene, joint_vlaues) {
this.scene = scene;
this.tgt_jnt_vals = joint_vlaues;
this.loader = new ColladaLoader();
this.yumi_model = new Object3D();
this.kinematics;
this.kinematicsTween;
this.tweenParameters = {};
this.loadYuMi();
this.startMotion();
}
startMotion = () => {
if (this.kinematics == undefined) {
console.log("Kinematics Still not loaded!");
this.anim_frame_id1 = requestAnimationFrame(this.startMotion);
}
else {
console.log(this.kinematics);
this.setupTween();
cancelAnimationFrame(this.anim_frame_id1);
this.animate();
}
}
animate = () => {
update();
this.anim_frame_id2 = requestAnimationFrame(this.animate);
}
loadYuMi = async() => {
const onLoad = (result, yumi) => {
const model = result.scene.children[0];
yumi.add(model.clone(true));
yumi.traverse(function (child) {
if (child.isMesh) {
child.material.flatShading = true;
}
});
this.kinematics = result.kinematics;
};
await this.loader.load(yumi_path, (collada) =>
onLoad(collada, this.yumi_model, this.kinematics)
),
undefined,
function (error) {
console.log("An error happened", error);
};
this.yumi_model.position.set(0.0, 0.0, -6.0);
this.yumi_model.rotation.set(-Math.PI / 2, 0.0, -Math.PI / 2);
this.yumi_model.scale.set(20, 20, 20);
this.scene.add(this.yumi_model);
}
setupTween =() =>{
const duration = MathUtils.randInt( 1000, 5000 );
const target = {};
for (const prop in this.tgt_jnt_vals) {
const joint = this.kinematics.joints[ prop ];
const old = this.tweenParameters[ prop ];
const position = old ? old : joint.zeroPosition;
this.tweenParameters[ prop ] = position;
target[prop] = this.tgt_jnt_vals[prop]; //MathUtils.randInt( joint.limits.min, joint.limits.max );
// console.log('target:', prop, this.tgt_jnt_vals[prop]);
}
this.kinematicsTween = new Tween( this.tweenParameters ).to( target, duration ).easing( Easing.Quadratic.Out );
this.kinematicsTween.onUpdate( ( object )=> {
for ( const prop in this.kinematics.joints ) {
if ( prop in this.kinematics.joints) {
if ( ! this.kinematics.joints[ prop ].static ) {
this.kinematics.setJointValue( prop, object[ prop ] );
}
}
}
});
// console.log("Tween Parameters", this.tweenParameters);
// console.log("kinematics tween", this.kinematicsTween);
this.kinematicsTween.start();
setTimeout( this.setupTween, duration );
}
}
And I'm calling this class as follows:
let target_position = {
gripper_l_joint: 0.01,
gripper_l_joint_m: 0.01,
gripper_r_joint: 0.01,
gripper_r_joint_m: 0.01,
yumi_joint_1_l: 10.0,
yumi_joint_1_r: 10.0,
yumi_joint_2_l: 20.0,
yumi_joint_2_r: 20.0,
yumi_joint_3_l: 30.0,
yumi_joint_3_r: 30.0,
yumi_joint_4_l: 40.0,
yumi_joint_4_r: 40.0,
yumi_joint_5_l: 50.0,
yumi_joint_5_r: 50.0,
yumi_joint_6_l: 60.0,
yumi_joint_6_r: 60.0,
yumi_joint_7_l: 70.0,
yumi_joint_7_r: 70.0,
};
new YuMiMotion(this.scene, target_position);
I have created a repo to reproduce my example here.
Can you please tell me how can I Move my model properly to the desired joints position using Three.js and Tween.js? thanks in advance.

How can I show the distance a polyline has covered while animating it in Mapbox GL JS?

I am using Mapbox GL JS to capture frame by frame video of the animation of a geoJson (similar to what is described here: https://docs.mapbox.com/mapbox-gl-js/example/animate-a-line/).
The strategy for encoding mapbox animations into mp4 here are described here:
https://github.com/mapbox/mapbox-gl-js/issues/5297 and https://github.com/mapbox/mapbox-gl-js/pull/10172 .
I would like to show the distance the polyline has covered as I draw each frame. I need the distance to be in the GL itself (as opposed to, for example, an HTML element on top of the canvas), since that's where I'm capturing the video from.
Can someone help describe a performant strategy for doing this to me?
Aside from tearing apart the Mapbox GL JS, why not simply try drawing directly onto the mapbox canvas?
In this example, there are two canvases, created identically.
With the second, there is another requestAnimationFrame loop that adds an overlay.
I've also shown how it can be recorded with MediaRecorder.
const canvas1 = document.getElementById('canvas1')
const canvas2 = document.getElementById('canvas2')
const video = document.getElementById('output')
const status = document.getElementById('status')
let x = 0 // Value to be displayed
const setupCanvas = (canvas) => {
canvas.height = 300
canvas.width = 300
const canvas1Ctx = canvas.getContext('2d')
canvas1Ctx.fillStyle = 'black'
canvas1Ctx.fillRect(300, 100, 100, 100)
const animateCanvas = () => {
x += 2;
if (x > 300) x = 10
canvas1Ctx.fillStyle = 'rgba(20,20,20,1)'
canvas1Ctx.fillRect(0, 0, canvas.width, canvas.height)
canvas1Ctx.beginPath()
canvas1Ctx.arc(x, 100, 20, 0, 2 * Math.PI)
canvas1Ctx.fillStyle = 'rgba(250,0,0,1)'
canvas1Ctx.fill()
requestAnimationFrame(animateCanvas)
}
animateCanvas()
}
const addOverlay = (canvas) => {
const canvasCtx = canvas2.getContext('2d')
function animateOverlay() {
canvasCtx.font = '48px serif'
canvasCtx.fillStyle = 'white'
canvasCtx.fillText(`X: ${x}`, 10, 50)
requestAnimationFrame(animateOverlay)
}
animateOverlay()
}
const initVideoCapture = () => {
var videoStream = canvas2.captureStream(30)
var mediaRecorder = new MediaRecorder(videoStream)
var chunks = []
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data)
}
mediaRecorder.onstop = function(e) {
var blob = new Blob(chunks, {
'type': 'video/mp4'
})
chunks = []
var videoURL = URL.createObjectURL(blob)
video.src = videoURL
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data)
}
mediaRecorder.start()
status.textContent = 'Recording...'
setTimeout(function() {
mediaRecorder.stop()
status.textContent = 'Complete'
}, 5000)
}
setupCanvas(canvas1)
setupCanvas(canvas2)
addOverlay(canvas2)
initVideoCapture()
<canvas id="canvas1"></canvas>
<canvas id="canvas2"></canvas>
<video id="output" autoplay controls></video>
<p>Status: <span id="status">Loading</span></p>
Try this :
// show the distance a polyline has covered while animating it in mapbox gl js
var map = new mapboxgl.Map({
container: 'map',
style: 'mapbox://styles/mapbox/streets-v9',
center: [-74.5, 40],
zoom: 9,
})
var geojson = {
type: 'Feature',
properties: {},
geometry: {
type: 'LineString',
coordinates: [
[-74.5, 40],
[-74.45, 40.7],
[-74.36, 40.8],
[-74.36, 41.2],
],
},
}
map.on('load', function () {
map.addLayer({
id: 'route',
type: 'line',
source: {
type: 'geojson',
data: geojson,
},
layout: {
'line-join': 'round',
'line-cap': 'round',
},
paint: {
'line-color': '#888',
'line-width': 8,
},
})
var lineDistance = turf.lineDistance(geojson, 'kilometers')
var arc = []
var maxTravelTime = 0
for (var i = 0; i < lineDistance; i += lineDistance / 200) {
var segment = turf.along(geojson, i, 'kilometers')
arc.push(segment.geometry.coordinates)
}
map.addLayer({
id: 'point',
type: 'symbol',
source: {
type: 'geojson',
data: {
type: 'FeatureCollection',
features: [
{
type: 'Feature',
properties: {},
geometry: {
type: 'Point',
coordinates: arc[0],
},
},
],
},
},
layout: {
'icon-image': 'car',
'icon-rotate': ['get', 'bearing'],
'icon-rotation-alignment': 'map',
'icon-allow-overlap': true,
'icon-ignore-placement': true,
},
})
function animate() {
map.getSource('point').setData(geojson)
if (maxTravelTime < lineDistance) {
maxTravelTime += lineDistance / 200
} else {
maxTravelTime = 0
}
requestAnimationFrame(animate)
}
animate()
})

Error when importing a .gltf file into threejs (in expo)

When importing a .gltf file into threejs (in expo), I get the following error:
Error: Event {
"isTrusted": false,
}
I am using the GLTFLoader from three(/examples/jsm/loaders/GLTFLoader.js) in an expo react native project with the following code:
loader.load(
"../body.gltf",
(gltf) => {
console.log("Object: ", gltf);
scene.add(gltf.scene);
},
(progress) => console.log("Progress: ", progress),
(err) => console.log("Error: ", err)
);
Is there something in my permissions I am not aware of, is it something in expo or something else?
Loading assets:
useEffect(() => {
(async () => {
const assets = [
Asset.fromModule(require("./assets/abductors.gltf")),
Asset.fromModule(require("./assets/abs.gltf")),
Asset.fromModule(require("./assets/adductors.gltf")),
Asset.fromModule(require("./assets/biceps.gltf")),
Asset.fromModule(require("./assets/bracheoradialis.gltf")),
Asset.fromModule(require("./assets/calves.gltf")),
Asset.fromModule(require("./assets/chest.gltf")),
Asset.fromModule(require("./assets/feet.gltf")),
Asset.fromModule(require("./assets/flexors.gltf")),
Asset.fromModule(require("./assets/forearms.gltf")),
Asset.fromModule(require("./assets/glutes.gltf")),
Asset.fromModule(require("./assets/hamstrings.gltf")),
Asset.fromModule(require("./assets/hands.gltf")),
Asset.fromModule(require("./assets/head.gltf")),
Asset.fromModule(require("./assets/lats.gltf")),
Asset.fromModule(require("./assets/obliques.gltf")),
Asset.fromModule(require("./assets/pelvic.gltf")),
Asset.fromModule(require("./assets/quads.gltf")),
Asset.fromModule(require("./assets/rotators.gltf")),
Asset.fromModule(require("./assets/serratus.gltf")),
Asset.fromModule(require("./assets/shoulders.gltf")),
Asset.fromModule(require("./assets/tibalis.gltf")),
Asset.fromModule(require("./assets/transverse.gltf")),
Asset.fromModule(require("./assets/traps.gltf")),
Asset.fromModule(require("./assets/triceps.gltf"))
];
let body = new THREE.Scene();
for (let i = 0; i < assets.length; i++) {
await assets[i].downloadAsync();
const loader = new GLTFLoader();
loader.load(
assets[i].uri || "",
(gltf) => {
assets[i] = gltf.scene;
body.add(gltf.scene);
},
(xhr) => {
console.log(`${(xhr.loaded / xhr.total) * 100}% loaded`);
},
(error) => {
console.error("An error happened", error);
}
);
}
setBody(body);
})();
}, []);
Showing them:
const _onContextCreate = async (gl) => {
const { drawingBufferWidth: width, drawingBufferHeight: height } = gl;
let renderer = new ExpoTHREE.Renderer({ gl });
renderer.setSize(width, height);
renderer.setClearColor(0xffffff);
let camera = new THREE.PerspectiveCamera(80, width / height, 0.01, 1000);
camera.position.z = 5;
let scene = new THREE.Scene();
const light = new THREE.PointLight(0xffffff, 1, 500);
light.position.set(5, 10, 10);
scene.add(light);
scene.add(body);
body.position.set(0, -2, -5);
body.scale.set(1.2, 1.2, 1.2);
body.rotation.set(0, 0, 0);
[...]
};
return (
<Model
_onContextCreate={_onContextCreate}
body={body}
setActiveMuscles={setActiveMuscles}
onChangeStroke={onChangeStroke}
/>
);

Chartjs animate x-axis displaying data incorrectly in mac device

I am working with chartjs, I am trying to animate chart from right to left or left to right on load.
var canvas = document.getElementById('chart_canvas');
var ctx = canvas.getContext('2d');
// Generate random data to plot
var DATA_POINT_NUM = 10;
var data = {
labels: [],
datasets: [
{
data: [],
},
]
}
for (var i=0; i<DATA_POINT_NUM; i++) {
data.datasets[0].data.push(Math.random()*10);
data.labels.push(String.fromCharCode(65+i));
}
var oldDraw = Chart.controllers.line.prototype.draw;
Chart.controllers.line.prototype.draw = function(animationFraction) {
var animationConfig = this.chart.options.animation;
if (animationConfig.xAxis === true) {
var ctx = this.chart.chart.ctx;
var hShift = (1-animationFraction)*ctx.canvas.width;
ctx.save();
ctx.setTransform(1, 0, 0, 1, hShift,0);
if (animationConfig.yAxis === true) {
oldDraw.call(this, animationFraction);
} else {
oldDraw.call(this, 1);
}
ctx.restore();
} else if (animationConfig.yAxis === true) {
oldDraw.call(this, animationFraction);
} else {
oldDraw.call(this, 1);
}
}
var lineChart = new Chart(ctx, {
type: 'line',
data: data,
options: {
animation: {
duration: 5000,
xAxis: true,
yAxis: true,
}
}
});
Example 1
The above code works fine on windows, but I'm facing issue on mac devices.While animating from left to right the data displays incorrectly means that the data moves to upward from x axis.How to fix this issue?
I am attaching screenshot.
Screenshot
Please change setTransform to transform.
Try the following code
var canvas = document.getElementById('chart_canvas');
var ctx = canvas.getContext('2d');
// Generate random data to plot
var DATA_POINT_NUM = 10;
var data = {
labels: [],
datasets: [
{
data: [],
},
]
}
for (var i=0; i<DATA_POINT_NUM; i++) {
data.datasets[0].data.push(Math.random()*10);
data.labels.push(String.fromCharCode(65+i));
}
var oldDraw = Chart.controllers.line.prototype.draw;
Chart.controllers.line.prototype.draw = function(animationFraction) {
var animationConfig = this.chart.options.animation;
if (animationConfig.xAxis === true) {
var ctx = this.chart.chart.ctx;
var hShift = (1-animationFraction)*ctx.canvas.width;
ctx.save();
ctx.transform(1, 0, 0, 1, hShift,0);
if (animationConfig.yAxis === true) {
oldDraw.call(this, animationFraction);
} else {
oldDraw.call(this, 1);
}
ctx.restore();
} else if (animationConfig.yAxis === true) {
oldDraw.call(this, animationFraction);
} else {
oldDraw.call(this, 1);
}
}
var lineChart = new Chart(ctx, {
type: 'line',
data: data,
options: {
animation: {
duration: 5000,
xAxis: true,
yAxis: true,
}
}
});

Three JS - Uncaught TypeError: Cannot read property 'x' of undefined

I'm trying to create a morphing object based on a number of particles, I have 4 objects, 2 normal three js shapes (cube and sphere) and 2 OBJ objects.
When I hover the related name of the object it changes to that one.
The problem here is that when I try to hover the name of the obj object the console.log returns the following error at the last line where I try to get newParticles.vertices[i].x, etc:
Uncaught TypeError: Cannot read property 'x' of undefined
Code:
// Particle Vars
var particleCount = numberOfParticles;
let spherePoints,
cubePoints,
rocketPoints,
spacemanPoints;
var particles = new Geometry(),
sphereParticles = new Geometry(),
cubeParticles = new Geometry(),
rocketParticles = new Geometry(),
spacemanParticles = new Geometry();
var pMaterial = new PointsMaterial({
color: particleColor,
size: particleSize,
map: new TextureLoader().load(particleImage),
blending: AdditiveBlending,
transparent: true
});
// Objects
var geometry = new SphereGeometry( 5, 30, 30 );
spherePoints = GeometryUtils.randomPointsInGeometry(geometry, particleCount)
var geometry = new BoxGeometry( 9, 9, 9 );
cubePoints = GeometryUtils.randomPointsInGeometry(geometry, particleCount)
// Custom (OGJ) Objects
const codepenAssetUrl = 'https://s3-us-west-2.amazonaws.com/s.cdpn.io/605067/';
var objLoader = new OBJLoader();
objLoader.setPath('./upload/');
objLoader.load( 'Nymph.obj', function ( object ) {
object.traverse( function ( child ) {
if ( child instanceof Mesh ) {
let scale = 0.2; //era 2.1 per il razzo
let area = new Box3();
area.setFromObject( child );
let yOffset = (area.max.y * scale) / 2;
child.geometry.scale(scale,scale,scale);
rocketPoints = GeometryUtils.randomPointsInBufferGeometry(child.geometry, particleCount);
createVertices(rocketParticles, rocketPoints, yOffset, 2);
}
});
});
var objLoader = new OBJLoader();
objLoader.setPath(codepenAssetUrl);
objLoader.load( 'Astronaut.obj', function ( object ) {
object.traverse( function ( child ) {
if ( child instanceof Mesh ) {
let scale = 4.6;
let area = new Box3();
area.setFromObject( child );
let yOffset = (area.max.y * scale) / 2;
child.geometry.scale(scale,scale,scale);
spacemanPoints = GeometryUtils.randomPointsInBufferGeometry(child.geometry, particleCount);
createVertices(spacemanParticles, spacemanPoints, yOffset, 3);
}
});
});
// Particles
for (var p = 0; p < particleCount; p++) {
var vertex = new Vector3();
vertex.x = 0;
vertex.y = 0;
vertex.z = 0;
particles.vertices.push(vertex);
}
createVertices (sphereParticles, spherePoints, null, null)
createVertices (cubeParticles, cubePoints, null, 1)
function createVertices (emptyArray, points, yOffset = 0, trigger = null) {
for (var p = 0; p < particleCount; p++) {
var vertex = new Vector3();
vertex.x = points[p]['x'];
vertex.y = points[p]['y'] - yOffset;
vertex.z = points[p]['z'];
emptyArray.vertices.push(vertex);
}
if (trigger !== null) {
triggers[trigger].setAttribute('data-disabled', false)
}
}
var particleSystem = new Points(
particles,
pMaterial
);
particleSystem.sortParticles = true;
// Add the particles to the scene
scene.add(particleSystem);
// Animate
const normalSpeed = (defaultAnimationSpeed/100),
fullSpeed = (morphAnimationSpeed/100)
let animationVars = {
speed: normalSpeed
}
function animate() {
particleSystem.rotation.y += animationVars.speed;
particles.verticesNeedUpdate = true;
window.requestAnimationFrame( animate );
renderer.render( scene, camera );
}
animate();
setTimeout(toSphere, 500);
function toSphere () {
handleTriggers(0);
morphTo(sphereParticles);
}
function toCube () {
handleTriggers(1);
morphTo(cubeParticles);
}
function toRocket () {
handleTriggers(2);
morphTo(rocketParticles);
}
function toSpaceman () {
handleTriggers(3);
morphTo(spacemanParticles);
}
function morphTo (newParticles, color = '0xffffff') {
TweenMax.to(animationVars, .3, {ease:
Power4.easeIn, speed: fullSpeed, onComplete: slowDown});
particleSystem.material.color.setHex(color);
for (var i = 0; i < particles.vertices.length; i++){
TweenMax.to(particles.vertices[i], 4, {ease:
Elastic.easeOut.config( 1, 0.75), x: newParticles.vertices[i].x, y: newParticles.vertices[i].y, z: newParticles.vertices[i].z})
}
}
P.S. note that I'm using webpack.

Resources