Threejs update single material on glb with multiple objects - three.js

I have a .glb model which has multiple objects and materials. I'm updating the children that have a certain material at runtime, but as there are nearly 1,000 children with this material it's choking a little. I'm looking for a more elegant solution if it exists.
AFRAME.registerComponent('apply-texture', {
schema: {
texture: { type: 'string' }
},
init: function() {
var data = this.data;
var el = this.el;
var loader = new THREE.TextureLoader();
var alphamap = loader.load("./images/PETAL-DATA.jpg");
el.object3D.traverse(function(child) {
if (child.isMesh && child.material.name == "Material_to_update") {
var texture = loader.load(data.texture,
function ( texture ) {
child.material.map = texture;
child.material.alphaMap = alphamap;
child.material.map.encoding = THREE.sRGBEncoding;
child.material.side = THREE.DoubleSide;
child.material.color.set("#FFFFFF");
child.material.alphaTest = 0.5;
child.material.opacity = 0.8;
child.material.needsUpdate = true;
},
function ( xhr ) {
console.log( (xhr.loaded / xhr.total * 100) + '% loaded' );
},
function ( xhr ) {
console.log( 'An error happened' );
}
);
}
As all the child objects are using the same material I'm hoping there's a way of just updating the material itself rather than the 1,000 children.

Instead of creating 1000 image loaders, use a system which will provide the loaded texture to each component:
<script src="https://aframe.io/releases/1.1.0/aframe.min.js"></script>
<script>
// register system
AFRAME.registerSystem("image-provider", {
init: function() {
// load some external image
this.image = new THREE.TextureLoader().load(
"https://i.imgur.com/wjobVTN.jpg"
);
},
getImage() {
// the components will grab the image via this function
return this.image;
}
});
// register the component
AFRAME.registerComponent("image-provider", {
init: function() {
// grab the system reference
const system = this.system;
// wait until the model is loaded
this.el.addEventListener("model-loaded", e => {
// grab the mesh
const mesh = this.el.getObject3D("mesh");
// apply the texture from the system
mesh.traverse(node => {
if (node.material) {
node.material.map = system.getImage();
node.material.needsUpdate = true;
}
});
});
}
});
</script>
<body>
<div style="z-index: 99999; position: fixed; top: 2.5%">
<p>
Model by Kelli Ray
</p>
</div>
<a-scene background="color: #FAFAFA">
<a-assets>
<a-asset-item id="model" src="https://cdn.glitch.com/994b27e1-a801-47b8-9b52-9547b8f25fc1%2Fgoogle%20poly.gltf"></a-asset-item>
<a-mixin id="fish" gltf-model="#model" scale="0.001 0.001 0.001" image-provider></a-mixin>
</a-assets>
<a-text position="-1.5 2 -3" color="black" value="original"></a-text>
<a-entity gltf-model="#model" position="-1 1 -3" scale="0.001 0.001 0.001"></a-entity>
<a-entity position="0 1 -3" mixin="fish"></a-entity>
<a-entity position="1 1 -3" mixin="fish"></a-entity>
<a-entity position="2 1 -3" mixin="fish"></a-entity>
<a-entity position="3 1 -3" mixin="fish"></a-entity>
</a-scene>
If You have multiple identical objects, you should look into Instanced Meshes - which allow you to place multiple objects while creating only one draw call. Check out this SO thread. You can check it out here (source)

"I guess what I'm trying to do is avoid having to traverse the model and just make an update to 1 material in the model, which in turn updates all objects that have that material applied"
If multiple mesh are actually sharing the same exact material (meaning they have the same uuid), if you update a property of the material then it'll update on all mesh using that material. I'm guessing they're not all sharing the same material though.
From what you've said, I think you just need to make your updates to the first mesh's material, create a variable referencing that material, then traverse your model and apply that material to every applicable mesh. For memory sake, it would also be good to .dispose() the materials that your replacing.

Related

How to animate gtlf models in Augmented reality in Three.js

Helle, I'm new to Three.js and I'm trying to show an animated gltf/glb model in my AR project.
There are a lot of examples on how to animate gltf models in a 3d environment but i can't get it to work in my AR project.
I can import the 3d model and show it in AR. But the models refuses to animate.
I would appreciate some help.
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<meta
name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0"
/>
<title>WebXr 3d model demo</title>
<!-- three.js -->
<script src="https://unpkg.com/three#0.126.0/build/three.js"></script>
<script src="https://unpkg.com/three#0.126.0/examples/js/loaders/GLTFLoader.js"></script>
</head>
<body>
<!-- Starting an immersive WebXR session -->
<button onclick="activateXR()">Start AR</button>
<script>
async function activateXR() {
// Add a canvas element and initialize a WebGL context that is compatible with WebXR.
const canvas = document.createElement("canvas");
document.body.appendChild(canvas);
const gl = canvas.getContext("webgl", { xrCompatible: true });
//Step 2
const scene = new THREE.Scene();
let clock = new THREE.Clock();
let mixer;
var hemiLight = new THREE.HemisphereLight(0xffffff, 0x444444);
hemiLight.position.set(0, 300, 0);
scene.add(hemiLight);
var dirLight = new THREE.DirectionalLight(0xffffff);
dirLight.position.set(75, 300, 0);
scene.add(dirLight);
//step 3
// Set up the WebGLRenderer, which handles rendering to the session's base layer.
const renderer = new THREE.WebGLRenderer({
alpha: true,
preserveDrawingBuffer: true,
canvas: canvas,
context: gl,
});
renderer.autoClear = false;
// The API directly updates the camera matrices.
// Disable matrix auto updates so three.js doesn't attempt
// to handle the matrices independently.
const camera = new THREE.PerspectiveCamera();
camera.matrixAutoUpdate = false;
// step 4
// Initialize a WebXR session using "immersive-ar".
const session = await navigator.xr.requestSession("immersive-ar", {
requiredFeatures: ["hit-test"],
});
session.updateRenderState({
baseLayer: new XRWebGLLayer(session, gl),
});
// A 'local' reference space has a native origin that is located
// near the viewer's position at the time the session was created.
const referenceSpace = await session.requestReferenceSpace("local");
// Create another XRReferenceSpace that has the viewer as the origin.
const viewerSpace = await session.requestReferenceSpace("viewer");
// Perform hit testing using the viewer as origin.
const hitTestSource = await session.requestHitTestSource({
space: viewerSpace,
});
//Use the model loader from the previous step to load a targeting reticle and a sunflower from the web.
const loader = new THREE.GLTFLoader();
let reticle;
loader.load(
"https://immersive-web.github.io/webxr-samples/media/gltf/reticle/reticle.gltf",
function (gltf) {
reticle = gltf.scene;
reticle.visible = false;
scene.add(reticle);
}
);
let key;
loader.load("key/key.glb", function (gltf) {
const model = gltf.scene;
mixer = new THREE.AnimationMixer(model);
mixer.clipAction(gltf.animations[0]).play();
key = gltf.scene;
});
animate();
// XRSession receives select events when the user completes a primary action. In an AR session, this corresponds to a
// tap on the screen.
session.addEventListener("select", (event) => {
if (key) {
const clone = key.clone();
clone.position.copy(reticle.position);
scene.add(clone);
}
});
//step 5
// Create a render loop that allows us to draw on the AR view.
const onXRFrame = (time, frame) => {
// Queue up the next draw request.
session.requestAnimationFrame(onXRFrame);
// Bind the graphics framebuffer to the baseLayer's framebuffer
gl.bindFramebuffer(
gl.FRAMEBUFFER,
session.renderState.baseLayer.framebuffer
);
// Retrieve the pose of the device.
// XRFrame.getViewerPose can return null while the session attempts to establish tracking.
const pose = frame.getViewerPose(referenceSpace);
if (pose) {
// In mobile AR, we only have one view.
const view = pose.views[0];
const viewport = session.renderState.baseLayer.getViewport(view);
renderer.setSize(viewport.width, viewport.height);
// Use the view's transform matrix and projection matrix to configure the THREE.camera.
camera.matrix.fromArray(view.transform.matrix);
camera.projectionMatrix.fromArray(view.projectionMatrix);
camera.updateMatrixWorld(true);
const hitTestResults = frame.getHitTestResults(hitTestSource);
if (hitTestResults.length > 0 && reticle) {
const hitPose = hitTestResults[0].getPose(referenceSpace);
reticle.visible = true;
reticle.position.set(
hitPose.transform.position.x,
hitPose.transform.position.y,
hitPose.transform.position.z
);
reticle.updateMatrixWorld(true);
}
// Render the scene with THREE.WebGLRenderer.
renderer.render(scene, camera);
}
};
function animate() {
requestAnimationFrame(animate);
const delta = clock.getDelta();
renderer.render(scene, camera);
}
session.requestAnimationFrame(onXRFrame);
}
</script>
</body>
</html>
The code + glb file
https://github.com/weewautersmaxim/arDemo
In your animate function, it looks like you need something along the lines of:
if (mixer) {
mixer.update(delta);
}
Otherwise the animation mixer doesn't know that time is passing.
Finally was able to solve my issue.
Firstly I load the model in the addEventListener.
I also had to change the animation code inside the load function.
The 'animate' function also needed extra code.
If anyone is interested in the final code. Here is a link to my github repository. Everything is explained there.
https://github.com/Maxim-Weewauters/WebXR_Animations

Update texture with three.js

I've got a textured model in three.js and I want to be able to swap out the texture defined in the .gltf file when the page loads. I've looked here for inspiration.
"images": [
{
"uri": "flat_baseColor.png"
},
// etc
So to update the texture I do
var images = [
"./textures/01.jpg",
// "./textures/01.jpg",
];
var texture = new THREE.TextureLoader().load( images[0] );
var my_material = new THREE.MeshBasicMaterial({map: texture});
// load the model
var loader = new GLTFLoader().setPath( 'models/gltf/' ); // trex
loader.load( 'creature_posed.gltf', function ( gltf )
{
gltf.scene.traverse( function ( child )
{
if ( child.isMesh )
{
// The textures go for a double flip, I have no idea why
// Texture compensation
texture.flipX = false;
texture.flipY = false;
child.material = my_material;
texture.needsUpdate = true;
}
} );
var model = gltf.scene;
Only the texture is considerably pale. :(
I've tested it against itself, so it's not the texture). What have a missed out?
When loading textures you'll need to pay attention to colorspace: if the texture has color data (like .map or .emissiveMap) it's probably sRGB.
texture.encoding = THREE.sRGBEncoding;
See GLTFLoader docs and color management in three.js. This assumes that renderer.outputEncoding = THREE.sRGBEncoding as well.
three.js r113

Can I put a THREE.js material on a gltf model?

Im currently working on my portfolio in A-frame. I'm trying to change the material of the objects to a wireframe material, i'm using the wireframe material (MeshBasicMaterial) of THREE.js to do that. If I put this material on objects like "a-box" or "a-sphere" it works, but i need to put this wireframe material on my 3D gltf model. Is this possible?
This is my script to call the material:
AFRAME.registerComponent('wireframe', {
dependencies: ['material'],
init: function () {
this.el.components.material.material.wireframe = true;
}
});
<a-box position="-1 0.5 -3" rotation="0 45 0" material="color: blue" wireframe></a-box>
It is possible to modify the material on a gltf.
One way to do it is to drill into the THREEjs level of the mesh inside the gltf, and create a new material and assign it a wireframe property.
AFRAME.registerComponent('tree-manager', {
init: function () {
let el = this.el;
let comp = this;
let data = this.data;
comp.scene = el.sceneEl.object3D;
comp.counter = 0;
comp.treeModels = [];
comp.modelLoaded = false;
// After gltf model has loaded, modify it materials.
el.addEventListener('model-loaded', function(ev){
let mesh = el.getObject3D('mesh');
if (!mesh){return;}
//console.log(mesh);
mesh.traverse(function(node){
if (node.isMesh){
let mat = new THREE.MeshStandardMaterial;
let color = new THREE.Color(0xaa5511);
mat.color = color;
mat.wireframe = true;
node.material = mat;
}
});
comp.modelLoaded = true;
});
}
});
<a-entity id="tree" gltf-model="#tree-gltf" scale="5 5 5" tree-manager></a-entity>
Here is a glitch that shows how.

How to dynamically update THREE.js PointCloud overlay in Autodesk Forge

I am working on a markup extension for autodesk forge. I want to be able to click on a position, and have a markup dynamically appear as a pointcloud.
Below is the load function for the extenision. The first time that I load the pointcloud it works, but when I try to add verticies to the geometry, and render that, the new points don't appear. However, the raycaster is able to detect the point. I know this because when I click on a location for the second time, I get a log telling me that the raycaster intercepted the pointcloud (even though that point is not rendered on the screen).
ClickableMarkup.prototype.load = function () {
const self = this;
/* Initizialize */
console.log('Start loading clickableMarkup extension');
this.camera = this.viewer.navigation.getCamera(); // Save camera instance
console.log(this.camera);
this.initCameraInfo(); // Populate cameraInfo array
this.overlayManager.addScene(this.sceneName); // Add scene to overlay manager
this.scene = this.viewer.impl.overlayScenes[this.sceneName].scene; // Save reference to the scene
/* Create pointCloud */
this.geometry = new THREE.Geometry();
this.cameraInfo.forEach( function(e) {
// console.log(` > add ${e.position}`)
self.geometry.vertices.push(e.position);
}
);
this.geometry.computeBoundingBox();
// const material = new THREE.PointCloudMaterial( { size: 50, color: 0Xff0000, opacity: 100, sizeAttenuation: true } );
const texture = THREE.ImageUtils.loadTexture(this.pointCloudTextureURL);
this.material = new THREE.ShaderMaterial({
vertexColors: THREE.VertexColors,
opacity: this.prefs.POINT_CLOUD_OPACITY,
fragmentShader: this.fragmentShader,
vertexShader: this.vertexShader,
depthWrite: true,
depthTest: true,
uniforms: {
size: {type: "f", value: self.size},
tex: {type: "t", value: texture}
}
});
this.points = new THREE.PointCloud( this.geometry, this.material );
/* Add the pointcloud to the scene */
this.overlayManager.addMesh(this.points, this.sceneName); /* >>> THIS WORKS SO IT RENDERS THE POINTCLOUD AT THE BEGINNING OF LAOD <<< */
/* Set up event listeners */
document.addEventListener('click', event => {
event.preventDefault();
this.setRaycasterIntersects(event); // Fill this.intersects with pointcloud indices that are currently selected
if (this.intersects.length > 0) {
console.log('Raycaster hit index: ' + this.hitIndex + JSON.stringify(this.intersects[0]));
this.setCameraView(this.hitIndex);
} else {
/* >>>> THE PROBLEM IS HERE - IT DOESN'T RENDER THE NEW POINTCLOUD POINTS <<<< */
const mousePos = this.screenToWorld(event);
if (mousePos) { // Only add point to scene if the user clicked on the building
const vertexMousePos = new THREE.Vector3(mousePos.x, mousePos.y, mousePos.z);
this.geometry.vertices.push(vertexMousePos);
this.geometry.verticesNeedUpdate = true;
this.geometry.computeBoundingBox();
this.points = new THREE.PointCloud(this.geometry, this.material);
this.overlayManager.addMesh(this.points, this.sceneName);
this.viewer.impl.invalidate(true); // Render the scene again
}
}
}, false);
console.log('ClickableMarkup extension is loaded!');
return true;
};
How do I make new pointcloud verticies render?
It looks like there might be a typo in the problematic code.
this.overlayManager.addMesh(this.points, 'this.sceneName');
Should probably be
this.overlayManager.addMesh(this.points, this.sceneName);

Need to render textures uploaded by user in input files with Canvas renderer three.js

I've loaded a Blender model using the three.js library and want to allow the users to change the texture of some faces through an input field in a form. I don't have any problem when I use the WebGLRenderer, and it works fine in Chrome, but it doesn't work with the canvas renderer when the texture coming from the input is in data:image... format. Seems if I load a full path image from the server it works fine. Does anybody know if there's a way to load textures this way and render them with the canvasrenderer?
Thank you.
I add here the code after I set the camera, lights and detect it the browswer detects webgl or not to use the WebGLRenderer or the CanvasRenderer.
First I load the model from blender:
var loader = new THREE.JSONLoader();
loader.load('assets/models/mimaquina9.js', function (geometry, mat) {
//I set the overdraw property to 1 for each material like i show here for 16
mat[16].overdraw = 1;
mesh = new THREE.Mesh(geometry, new THREE.MeshFaceMaterial(mat) );
mesh.scale.x = 5;
mesh.scale.y = 5;
mesh.scale.z = 5;
scene.add(mesh);
}, 'assets/images');
render();
//To render, I try to make an animation in case WebGL is available and just render one frame in case of using the canvas renderer.
function render() {
if(webgl){
if (mesh) {
mesh.rotation.y += 0.02;
}
// render using requestAnimationFrame
requestAnimationFrame(render);
webGLRenderer.render(scene, camera);
}
else if(canvas){
camera.position.x = 30;
camera.position.y = 20;
camera.position.z = 40;
camera.lookAt(new THREE.Vector3(0, 10, 0));
setTimeout(function (){
//something you want delayed
webGLRenderer.render(scene, camera);
}, 1000);
}
}
$('#datafile').change(function(e)
{
e.preventDefault();
var f = e.target.files[0];
if(f && window.FileReader)
{
var reader = new FileReader();
reader.onload = function(evt) {
console.log(evt);
mesh.material.materials[16].map = THREE.ImageUtils.loadTexture(evt.target.result);
if(canvas && !webgl){
//I read that might be a problem of using Lambert materials, so I tried this commented line without success
//mesh.material.materials[16] = new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture(evt.target.result)});
//If I uncomment the next line, it displays the texture fine when rendering after.
//mesh.material.materials[16].map = THREE.ImageUtils.loadTexture("assets/images/foto.jpg");
render();
}
}
reader.readAsDataURL(f);
}
});
Thanks once more.
evt.target.result is a DataURL so you should assign that to a image.src. Something like this should work:
var image = document.createElement( 'img' );
image.src = evt.target.result;
mesh.material.materials[16].map = new THREE.Texture( image );

Resources