Json 3D model only using 1 pixel of a texture - three.js

I'm having this weird issue, my 3D object is only taking 1 pixel (bottom left) of my texture, this is how i'm loading the object
loaderFrame.load('./models/filterFrame/filterFrame.json',(geometry) =>
{
const mat = new THREE.MeshBasicMaterial({
map: new THREE.TextureLoader().load('./models/filterFrame/textura_polar.jpeg'),
transparent: true,
morphTargets: true
});
mat.transparent = true;
// mat.morphTargets = true;
frameMesh = new THREE.Mesh(geometry, mat);
frameMesh.scale.multiplyScalar(frameOptions.frameScale);
frameMesh.frustumCulled = false;
frameMesh.transparent = true;
frameMesh.renderOrder = 0;
}
);

This is because your loaded object doesn't have proper UV mapping. If UVs are nonexistent, or if they're all 0, 0, then it's only going to sample from the bottom-left corner of your texture.
To fix this, open your model in a 3D editor and make sure the UVs are properly positioned across the texture plane. I don't know what your model looks like, but here's a basic example:

Related

Three.js: texture loaded unproperly

I use some data to create a model in three.js. It can load texture, but with a weird problem. I load the texture in this way.
`function createMesh(geom, imageFile){
var loader = new THREE.TextureLoader();
texture = loader.load(imageFile);
var mat = new THREE.MeshLambertMaterial({
side: THREE.DoubleSide,
});
mat.map = texture;
var mesh = new THREE.Mesh(geom, mat);
return mesh;}
var geom = new THREE.Geometry();
geom.vertices = vertices;
geom.faces = faces;
geom.computeFaceNormals();
var myModel = createMesh(geom, './tex1.jpg');
scene.add(myModel);`
Here is the screenshot before the texture is loaded.
Here is the screenshot after the texture is loaded.
My texture file(2048*2048.jpg)
I have tested to load the texture in a common cube, and it works. So I can't figure out why the texture can't be loaded in my model. Any suggestions? Thank you very much!
For customized geometry in Three.js, we need to do UV mapping to define how texture is mapped to the geometry.
Firstly, load the texture by var material = new THREE.MeshPhongMaterial( { map: THREE.ImageUtils.loadTexture('texture.jpg') } );
Then here is how I did UV mapping:
create n arrarys, each corresponding to a sub-image of the texture and contains 4 points which define the boudaries of the sub-image. (0, 0) represents the lower left corner and (1, 1) represents the upper right corner.
clear the existed UV mapping by geometry.faceVertexUvs[0] = [];
Map the sub-image of the texture to each triangle face of the geometry.
geometry.faceVertexUvs[0][0] = [ sub_image_1[0], sub_image_1[1], sub_image_1[3] ];
geometry.faceVertexUvs[0][1] = [ sub_image_1[1], sub_image_1[2], sub_image_1[3] ];
Finally, mesh = new THREE.Mesh(geometry, material);

Mangled rendering when transforming scene coordinates instead of camera coordinates

I've been learning how to integrate ThreeJS with Mapbox, using this example. It struck me as weird that the approach is to leave the loaded model in its own coordinate system, and transform the camera location on render. So I attempted to rewrite the code, so that the GLTF model is transformed when loaded, then the ThreeJS camera is just synchronised with the Mapbox camera, with no further modifications.
The code now looks like this:
function newScene() {
const scene = new THREE.Scene();
// create two three.js lights to illuminate the model
const directionalLight = new THREE.DirectionalLight(0xffffff);
directionalLight.position.set(0, -70, 100).normalize();
scene.add(directionalLight);
const directionalLight2 = new THREE.DirectionalLight(0xffffff);
directionalLight2.position.set(0, 70, 100).normalize();
scene.add(directionalLight2);
return scene;
}
function newRenderer(map, gl) {
// use the Mapbox GL JS map canvas for three.js
const renderer = new THREE.WebGLRenderer({
canvas: map.getCanvas(),
context: gl,
antialias: true
});
renderer.autoClear = false;
return renderer;
}
// create a custom layer for a 3D model per the CustomLayerInterface
export function addModel(modelPath, origin, altitude = 0, orientation = [Math.PI / 2, 0, 0]) {
const coords = mapboxgl.MercatorCoordinate.fromLngLat(origin, altitude);
// transformation parameters to position, rotate and scale the 3D model onto the map
const modelTransform = {
translateX: coords.x,
translateY: coords.y,
translateZ: coords.z,
rotateX: orientation[0],
rotateY: orientation[1],
rotateZ: orientation[2],
/* Since our 3D model is in real world meters, a scale transform needs to be
* applied since the CustomLayerInterface expects units in MercatorCoordinates.
*/
scale: coords.meterInMercatorCoordinateUnits()
};
const scaleVector = new THREE.Vector3(modelTransform.scale, -modelTransform.scale, modelTransform.scale)
return {
id: "3d-model",
type: "custom",
renderingMode: "3d",
onAdd: function(map, gl) {
this.map = map;
this.camera = new THREE.Camera();
this.scene = newScene();
this.renderer = newRenderer(map, gl);
// use the three.js GLTF loader to add the 3D model to the three.js scene
new THREE.GLTFLoader()
.load(modelPath, gltf => {
gltf.scene.position.fromArray([coords.x, coords.y, coords.z]);
gltf.scene.setRotationFromEuler(new THREE.Euler().fromArray(orientation));
gltf.scene.scale.copy(scaleVector);
this.scene.add(gltf.scene);
const bbox = new THREE.Box3().setFromObject(gltf.scene);
console.log(bbox);
this.scene.add(new THREE.Box3Helper(bbox, 'blue'));
});
},
render: function(gl, matrix) {
this.camera.projectionMatrix = new THREE.Matrix4().fromArray(matrix);
this.renderer.state.reset();
this.renderer.render(this.scene, this.camera);
// this.map.triggerRepaint();
}
}
}
It basically works, in that a model is loaded and drawn in the right location in the Mapbox world. However, instead of looking like this:
It now looks like this, a mangled mess that jitters around chaotically as the camera moves:
I'm not yet familiar enough with ThreeJS to have any idea what I did wrong.
Here's a side-by-side comparison of the old, functional code on the right, vs the new broken code on the left.
Further investigation
I suspect possibly the cause is to do with shrinking all the coordinates down to within the [0..1] range of the projected coordinate system, and losing mathematical precision, perhaps. When I scale the model up by 100 times, it renders like this - messy and glitchy, but at least recognisable as something.

Three.js: Add a texture to an object just on the outside

I'm very new with Three.js and I'm trying to make a ring:
http://www.websuvius.it/atma/myring/preview.html
I have a background texture ( the silver one ) and another one with a text.
I want the text only on the ring external face.
This is part of my code:
var loader = new THREE.OBJLoader( manager );
var textureLoader = new THREE.TextureLoader( manager );
loader.load( 'assets/3d/ring.obj', function ( event ) {
var object = event;
var geometry = object.children[ 0 ].geometry;
var materials = [];
var backgroundTexture = textureLoader.load('img/texture/silver.jpg');
backgroundTexture.flipY = false;
var background = new THREE.MeshBasicMaterial({
map: backgroundTexture,
color: 0xffffff
});
materials.push(background);
var customTexture = textureLoader.load('img/text.png');
customTexture.flipY = false;
var custom = new THREE.MeshBasicMaterial({
map: customTexture,
transparent: true,
opacity: 1,
color: 0xffffff
});
materials.push(custom);
mesh = THREE.SceneUtils.createMultiMaterialObject(geometry, materials);
mesh.position.y=-50;
scene.add(mesh);
}, onProgress, onError );
It is possible?
Thanks
The reason behind your issue appears to be in your .obj file. Judging from a quick glance at the texture coordinates stored in the file, the inside of the ring uses the same part of the texture image as the outside of the ring.
Increasing the transparent parts of the image won't help. Neither will the attempts to stop the texture from repeating. Those would help if the texture coordinates were larger than 1 but this is not your case unfortunately.
However, there are several solutions:
Split the object in a 3D modeling software to two objects - outside and inside of the ring - and apply the texture only to the first one.
Adjust the UV coordinates of the object in a 3D modeling software.
Adjust the UV coordinates of the vertices programmatically after loading the object to Three.JS

How to apply a texture to a custom geometry in Three.js

I successfully applied a texture to a cube geometry with this:
var geometry = new THREE.CubeGeometry(10, 10, 10);
var meshMaterial = new THREE.MeshPhongMaterial({ transparent: false, map: THREE.ImageUtils.loadTexture('/app/images/wood.jpg') });
meshMaterial.side = THREE.DoubleSide;
var mesh = new THREE.Mesh(geometry, meshMaterial);
With this I get a nice textured cube like this:
Now I want to apply the same texture (512x512 jpg image) to a custom model I'm loading from an STL and this is what I get (in this case a pyramid):
This is the code:
loader.load(jsonParam.url, function (geometry) {
var meshMaterial = new THREE.MeshPhongMaterial({ transparent: false, map: THREE.ImageUtils.loadTexture('/app/images/wood.jpg') });
meshMaterial.side = THREE.DoubleSide;
var mesh = new THREE.Mesh(geometry, meshMaterial);
mesh.castShadow = false;
mesh.receiveShadow = true;
scene.add(mesh);
});
Why the texture is not being applied and I get only what seems to be an average of the texture colors?
You need UV mapping.
You can either edit the model in modelling software to add UV coordinates or maybe generate them as in the answers posted here.
I suppose another option would be to create your own shader that maps the texture to the model surface without using UV coordinates.

Three.js: mesh does not receive shadow and is not detected by intersectObjects

I have a WebGL Geometry Shape in Three.JS that is made by ExtrudeGeometry.
My problem is why it doesn't receive shadow (which is powered by THREE.SpotLight) or why Raycaster.intersectObject doesn't detect that!?
My shape is something like below:
var geometry = new THREE.ExtrudeGeometry(shape, extrudeSettings);
var mesh = THREE.SceneUtils.createMultiMaterialObject(geometry, [new THREE.MeshBasicMaterial({ color: color }), new THREE.MeshBasicMaterial({ color: color2, wireframe: true, transparent: false, wireframeLinewidth: 5 })]);
mesh.position.set(x, y, z);
mesh.rotation.set(rx, ry, rz);
mesh.scale.set(s, s, s);
mesh.castShadow = true;
mesh.receiveShadow = true;
scene.add(mesh);
It's ExtrudeGeometry natural, or what!?
This has nothing to do with the geometry.
THREE.SceneUtils.createMultiMaterialObject() achieves its effect by creating a parent object and two child meshes, each with the same geometry.
You need to set receiveShadow on the children, instead of the parent.
mesh.children[ 0 ].receiveShadow = true;
mesh.children[ 1 ].receiveShadow = true;
To get Raycaster.intersectObjects() to work on hierarchical objects, you need to pass in the recursive flag like so.
raycaster.intersectObjects( objects, true );
three.js r.59

Resources