Applying multiple textures to same geometry - three.js

I have planeGeometry and I have some vertices higher than others, so some vertices have different z index to generate terrain-like texture.
I want to add different textures like grass,sand,snow to different planes.
How can I add to material to different planes.
This is how planegeometry is generated.
https://github.com/mrdoob/three.js/blob/master/src/geometries/PlaneGeometry.js
var indices = [];
var vertices = [];
var normals = [];
var uvs = [];
// generate vertices, normals and uvs
for ( iy = 0; iy < gridY1; iy ++ ) {
var y = iy * segment_height - height_half;
for ( ix = 0; ix < gridX1; ix ++ ) {
var x = ix * segment_width - width_half;
vertices.push( x, - y, 0 );
normals.push( 0, 0, 1 );
uvs.push( ix / gridX );
uvs.push( 1 - ( iy / gridY ) );
}
}
// indices
for ( iy = 0; iy < gridY; iy ++ ) {
for ( ix = 0; ix < gridX; ix ++ ) {
var a = ix + gridX1 * iy;
var b = ix + gridX1 * ( iy + 1 );
var c = ( ix + 1 ) + gridX1 * ( iy + 1 );
var d = ( ix + 1 ) + gridX1 * iy;
// faces
indices.push( a, b, d );
indices.push( b, c, d );
}
}
// build geometry
this.setIndex( indices );
this.addAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) );
this.addAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) );
this.addAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) );
This is how it should be done:
//Loading in Texture:
var loader = new THREE.TextureLoader();
var texture = loader.load('grass.jpg');
//Creating material:
var material = new THREE.MeshBasicMaterial({
map: texture,
overdraw: true,
wireframe: false
});
var materials = [];
materials.push(material);
mesh = new THREE.Mesh(geometry, materials);
// Applying this one to faces that need certain material:
// https://threejs.org/docs/#api/en/core/BufferGeometry.addGroup
geometry.addGroup();
Basically what I need is even adding texture to the first plane, but it always adds texture to the whole geometry,
then I can create loop myself which does create textures for rest.

Related

three.js: Calculate faceVertexUvs on custom Geometry for texture mapping

I have this example here:
https://jsfiddle.net/NiklasKnaack/L1cqbdr9/82/
function createPlanetFace( radiusX, radiusY, radiusZ, localUp, resolution ) {
const face = {};
face.geometry = new THREE.Geometry();
face.geometry.faceVertexUvs[ 0 ] = [];
face.verticesOriginal = [];
face.verticesNormalized = [];
const axisA = new THREE.Vector3( localUp.y, localUp.z, localUp.x );
const axisB = new THREE.Vector3().crossVectors( localUp, axisA );
for ( let y = 0; y < resolution; y++ ) {
for ( let x = 0; x < resolution; x++ ) {
const index = x + y * resolution;
const percent = new THREE.Vector2( x, y );
percent.x /= ( resolution - 1 );
percent.y /= ( resolution - 1 );
const vertex = new THREE.Vector3();
vertex.x = ( localUp.x + ( percent.x - 0.5 ) * 2 * axisA.x + ( percent.y - 0.5 ) * 2 * axisB.x ) * radiusX;
vertex.y = ( localUp.y + ( percent.x - 0.5 ) * 2 * axisA.y + ( percent.y - 0.5 ) * 2 * axisB.y ) * radiusY;
vertex.z = ( localUp.z + ( percent.x - 0.5 ) * 2 * axisA.z + ( percent.y - 0.5 ) * 2 * axisB.z ) * radiusZ;
face.verticesOriginal[ index ] = new THREE.Vector3( vertex.x, vertex.y, vertex.z );
vertex.normalize();//create a sphere
vertex.x += vertex.x * radiusX / 2;
vertex.y += vertex.y * radiusY / 2;
vertex.z += vertex.z * radiusZ / 2;
face.verticesNormalized[ index ] = new THREE.Vector3( vertex.x, vertex.y, vertex.z );
face.geometry.vertices[ index ] = vertex;
//if ( index % 6 === 0 && index > 0 && x !== resolution - 1 && y !== resolution - 1 ) {
if ( x !== resolution - 1 && y !== resolution - 1 ) {
const triangle1 = new THREE.Face3( index, index + resolution + 1, index + resolution );
const triangle2 = new THREE.Face3( index, index + 1, index + resolution + 1 );
face.geometry.faces.push( triangle1, triangle2 );
}
}
}
//face.geometry.computeBoundingSphere();
//face.geometry.computeVertexNormals();
//face.geometry.computeFaceNormals();
return face;
};
For this I would like to calculate the UVs so that the loaded texture can be displayed correctly.
In principle, the createPlanetFace function creates a plane. From these 6 planes, a cube or sphere is created. (See it in the example)
So far it already works, only the texture is not displayed because the UVs are missing.
After a lot of research and trying, I get either errors in the console, a totally distorted texture, or just no texture at all. That's why I erased my miserable attempts calculating the UVs.
The examples I have found on this topic are all different. At least most of them. I have now reached a point where I can't get any further and need your help.
Thank you in advance.
Here is a box unwrap I wrote a while ago for regular geometries:
function boxUnwrapUVs(geometry) {
for (var i = 0; i < geometry.faces.length; i++) {
var face = geometry.faces[i];
var faceUVs = geometry.faceVertexUvs[0][i]
var va = geometry.vertices[geometry.faces[i].a]
var vb = geometry.vertices[geometry.faces[i].b]
var vc = geometry.vertices[geometry.faces[i].c]
var vab = new THREE.Vector3().copy(vb).sub(va)
var vac = new THREE.Vector3().copy(vc).sub(va)
//now we have 2 vectors to get the cross product of...
var vcross = new THREE.Vector3().copy(vab).cross(vac);
//Find the largest axis of the plane normal...
vcross.set(Math.abs(vcross.x), Math.abs(vcross.y), Math.abs(vcross.z))
var majorAxis = vcross.x > vcross.y ? (vcross.x > vcross.z ? 'x' : vcross.y > vcross.z ? 'y' : vcross.y > vcross.z) : vcross.y > vcross.z ? 'y' : 'z'
//Take the other two axis from the largest axis
var uAxis = majorAxis == 'x' ? 'y' : majorAxis == 'y' ? 'x' : 'x';
var vAxis = majorAxis == 'x' ? 'z' : majorAxis == 'y' ? 'z' : 'y';
faceUVs[0].set(va[uAxis], va[vAxis])
faceUVs[1].set(vb[uAxis], vb[vAxis])
faceUVs[2].set(vc[uAxis], vc[vAxis])
}
geometry.elementsNeedUpdate = geometry.verticesNeedUpdate = true;
}
Is that helpful?
edit: I rewrote your example because it was too complicated for me to understand...
https://jsfiddle.net/manthrax/dL6kxuf2/1/

Change of the geometry type in the three.js

How to change the type of the geometry from .spherical to .SphereBufferGeometry in three.js
https://codepen.io/anon/pen/ajyvyB
// sphere
var vector = new THREE.Vector3();
var spherical = new THREE.Spherical();
for ( var i = 0, l = objects.length; i < l; i ++ ) {
var phi = Math.acos( -1 + ( 2 * i ) / l );
var theta = Math.sqrt( l * Math.PI ) * phi;
var object = new THREE.Object3D();
spherical.set(1000, phi, theta );
object.position.setFromSpherical( spherical );
vector.copy( object.position ).multiplyScalar( 2 );
object.lookAt( vector );
targets.sphere.push( object );
}

three.js webgl custom shader sharing texture with new offset

I am splitting a texture 1024 x 1024 over 32x32 tiles * 32, Im not sure if its possible to share the texture with an offset or would i need to create a new texture for each tile with the offset..
to create the offset i am using a uniform value = 32 * i and updating the uniform through each loop instance of creating tile, all the tiles seem to be the same offset? as basically i wanting an image to appear like its one image not broken up into little tiles.But the current out-put is the same x,y-offset on all 32 tiles..Im using the vertex-shader with three.js r71...
Would i need to create a new texture for each tile with the offset?
for ( j = 0; j < row; j ++ ) {
for ( t = 0; t < col; t ++ ) {
customUniforms.tX.value = tX;
customUniforms.tY.value = tY;
console.log(customUniforms.tX.value);
customUniforms.tX.needsUpdate = true;
customUniforms.tY.needsUpdate = true;
mesh = new THREE.Mesh( geometry,mMaterial);// or new material
}
}
//vertex shader :
vec2 uvOffset = vUV + vec2( tX, tY) ;
Image example:
Each image should have an offset of 10 0r 20 px but they are all the same.... this is from using one texture..
As suggested i have tried to manipulate the uv on each object with out luck, it seems to make all the same vertexes have the same position for example 10x10 segmant plane all faces will be the same
var geometry = [
[ new THREE.PlaneGeometry( w, w ,64,64),50 ],
[ new THREE.PlaneGeometry( w, w ,40,40), 500 ],
[ new THREE.PlaneGeometry( w, w ,30,30), 850 ],
[ new THREE.PlaneGeometry( w, w,16,16 ), 1200 ]
];
geometry[0][0].faceVertexUvs[0] = [];
for(var p = 0; p < geometry[0][0].faces.length; p++){
geometry[0][0].faceVertexUvs[0].push([
new THREE.Vector2(0.0, 0.0),
new THREE.Vector2(0.0, 1),
new THREE.Vector2( 1, 1 ),
new THREE.Vector2(1.0, 0.0)]);
}
image of this result, you will notice all vertices are the same when they shouldn't be
Update again:
I have to go through each vertices of faces as two triangles make a quad to avoid the above issue, I think i may have this solved... will update
Last Update Hopfully:
Below is the source code but i am lost making the algorithm display the texture as expected.
/*
j and t are rows & columns looping by 4x4 grid
row = 4 col = 4;
*/
for( i = 0; i < geometry.length; i ++ ) {
var mesh = new THREE.Mesh( geometry[ i ][ 0 ], customMaterial);
mesh.geometry.computeBoundingBox();
var max = mesh.geometry.boundingBox.max;
var min = mesh.geometry.boundingBox.min;
var offset = new THREE.Vector2(0 - min.x*t*j+w, 0- min.y*j+w);//here is my issue
var range = new THREE.Vector2(max.x - min.x*row*2, max.y - min.y*col*2);
mesh.geometry.faceVertexUvs[0] = [];
var faces = mesh.geometry.faces;
for (p = 0; p < mesh.geometry.faces.length ; p++) {
var v1 = mesh.geometry.vertices[faces[p].a];
var v2 = mesh.geometry.vertices[faces[p].b];
var v3 = mesh.geometry.vertices[faces[p].c];
mesh.geometry.faceVertexUvs[0].push([
new THREE.Vector2( ( v1.x + offset.x ) / range.x , ( v1.y + offset.y ) / range.y ),
new THREE.Vector2( ( v2.x + offset.x ) / range.x , ( v2.y + offset.y ) / range.y ),
new THREE.Vector2( ( v3.x + offset.x ) / range.x , ( v3.y + offset.y ) / range.y )
]);
}
You will notice the below image in the red is seamless as the other tiles are not aligned with the texture.
Here is the answer:
var offset = new THREE.Vector2(w - min.x-w+(w*t), w- min.y+w+(w*-j+w));
var range = new THREE.Vector2(max.x - min.x*7, max.y - min.y*7);
if you could simplify answer will award bounty too:

Dynamically change point color in Three.js PointClouod

I'm trying to render a matrix of points in Three.js but I need to treat each particle in the cloud as an individual "pixel" for which I can change the color of each on the fly. I figured out how to basically render the point cloud, and can set the initial color, but cannot figure out how to change the color of each point once it's set.
I'm generating the point cloud like this:
function generateRegularPointcloud( color, width, length ) {
var geometry = new THREE.Geometry();
var numPoints = width * length;
var colors = [];
var k = 0;
for( var i = 0; i < width; i++ ) {
for( var j = 0; j < length; j++ ) {
var u = i / width;
var v = j / length;
var x = u - 0.5;
var y = 0;
var z = v - 0.5;
var v = new THREE.Vector3( x,y,z );
var intensity = ( y + 0.1 ) * 7;
colors[ 3 * k ] = color.r * intensity;
colors[ 3 * k + 1 ] = color.g * intensity;
colors[ 3 * k + 2 ] = color.b * intensity;
geometry.vertices.push( v );
colors[ k ] = ( color.clone().multiplyScalar( intensity ) );
k++;
}
}
geometry.colors = colors;
geometry.computeBoundingBox();
var material = new THREE.PointCloudMaterial( { size: pointSize, vertexColors: THREE.VertexColors } );
var pointcloud = new THREE.PointCloud( geometry, material );
return pointcloud;
}
My basic code is here: http://jsfiddle.net/dg34sbsk/
Any idea how to change each point color separately and dynamically? (Data for the colors will be coming in from a web service).
You can directly change its's value pointclouds[0].geometry.colors=... and after that pointclouds[0].geometry.colorsNeedUpdate=true.
To set each point's color just set the colors's children's value like pointclouds[0].geometry.colors[22]=new THREE.Color("rgb(255,0,0)");.
see this:http://jsfiddle.net/aboutqx/dg34sbsk/2/ .click and you will see the color of one point changes.

Three.js r60 Height Map

So I have a heightmap system which works well enough, however since the THREE.js has updated to r60 which removed the Face4 object, I am having issues.
My code is something like this:
this.buildGeometry = function(){
var geo, len, i, f, y;
geo = new THREE.PlaneGeometry(3000, 3000, 128, 128);
geo.dynamic = true;
geo.applyMatrix(new THREE.Matrix4().makeRotationX(-Math.PI / 2));
this.getHeightData('heightmap.png', function (data) {
len = geo.faces.length;
for(i=0;i<len;i++){
f = geo.faces[i];
if( f ){
y = (data[i].r + data[i].g + data[i].b) / 2;
geo.vertices[f.a].y = y;
geo.vertices[f.b].y = y;
geo.vertices[f.c].y = y;
geo.vertices[f.d].y = y;
}
}
geo.computeFaceNormals();
geo.computeCentroids();
mesh = new THREE.Mesh(geo, new THREE.MeshBasicMaterial({color:0xff0000}) );
scene.add(mesh);
});
};
This works well since a pixel represents each face. How is this done now that the faces are all triangulated?
Similarly I use image maps for model positioning as well. Each pixel matches to the respective Face4 and a desired mesh is placed at its centroid. How can this be accomplished now?
I really miss being able to update the library and do not want to be stuck in r59 anymore =[
This approach works fine on the recent versions (tested on r66).
Notice that the genFn returns the height y given current col and row, maxCol and maxRow (for testing purposes, you can of course replace it with a proper array lookup or from a grayscale image... 64x64 determines the mesh resolution and 1x1 the real world dimensions.
var genFn = function(x, y, X, Y) {
var dx = x/X;
var dy = y/Y;
return (Math.sin(dx*15) + Math.cos(dy * 5) ) * 0.05 + 0.025;
};
var geo = new THREE.PlaneGeometry(1, 1, 64, 64);
geo.applyMatrix(new THREE.Matrix4().makeRotationX(-Math.PI / 2));
var iz, ix,
gridZ1 = geo.widthSegments +1,
gridX1 = geo.heightSegments+1;
for (iz = 0; iz < gridZ1; ++iz) {
for (ix = 0; ix < gridX1; ++ix) {
geo.vertices[ ix + gridX1*iz ].y = genFn(ix, iz, gridX1, gridZ1);
}
}
geo.computeFaceNormals();
geo.computeVertexNormals();
geo.computeCentroids();
var mesh = new THREE.Mesh(
geo,
mtl
);
scene.add(mesh);

Resources