How to render edges as cylinders? - three.js

I've loaded an OBJ polyhedron and I've used EdgesGeometry() to extract its edges:
var edges = new THREE.LineSegments(new THREE.EdgesGeometry(child.geometry), new THREE.LineBasicMaterial( {color: 0x000000}) );
But I would like to render each edge as a cylinder with configurable radius. Something like this:

A customizable solutuion, which you can start from:
var edgesGeom = new THREE.EdgesGeometry(dodecahedronGeom); //EdgesGeometry is a BufferGeometry
var thickness = 0.25; // radius of a cylinder
for (var i = 0; i < edgesGeom.attributes.position.count - 1; i+=2){
// when you know that it's BufferGeometry, you can find vertices in this way
var startPoint = new THREE.Vector3(
edgesGeom.attributes.position.array[i * 3 + 0],
edgesGeom.attributes.position.array[i * 3 + 1],
edgesGeom.attributes.position.array[i * 3 + 2]
);
var endPoint = new THREE.Vector3(
edgesGeom.attributes.position.array[i * 3 + 3],
edgesGeom.attributes.position.array[i * 3 + 4],
edgesGeom.attributes.position.array[i * 3 + 5]
);
var cylLength = new THREE.Vector3().subVectors(endPoint, startPoint).length(); // find the length of a cylinder
var cylGeom = new THREE.CylinderBufferGeometry(thickness, thickness, cylLength, 16);
cylGeom.translate(0, cylLength / 2, 0);
cylGeom.rotateX(Math.PI / 2);
var cyl = new THREE.Mesh(cylGeom, new THREE.MeshLambertMaterial({color: "blue"}));
cyl.position.copy(startPoint);
cyl.lookAt(endPoint); // and do the trick with orienation
scene.add(cyl);
}
jsfiddle example

Here's a version of #prisoner849's excellent answer which returns a merged BufferGeometry for just the cylinders:
/** Convert an edges geometry to a set of cylinders w/ the given thickness. */
function edgesToCylinders(edgesGeometry, thickness) {
const {position} = edgesGeometry.attributes;
const {array, count} = position;
const r = thickness / 2;
const geoms = [];
for (let i = 0; i < count * 3 - 1; i += 6) {
const a = new THREE.Vector3(array[i], array[i + 1], array[i + 2]);
const b = new THREE.Vector3(array[i + 3], array[i + 4], array[i + 5]);
const vec = new THREE.Vector3().subVectors(b, a);
const len = vec.length();
const geom = new THREE.CylinderBufferGeometry(r, r, len, 8);
geom.translate(0, len / 2, 0);
geom.rotateX(Math.PI / 2);
geom.lookAt(vec);
geom.translate(a.x, a.y, a.z);
geoms.push(geom);
}
return THREE.BufferGeometryUtils.mergeBufferGeometries(geoms);
}
Usage:
const edgesGeom = new THREE.EdgesGeometry(dodecahedronGeom);
const cylindersGeom = edgesToCylinders(edgesGeom, 0.25);
const cylinders = new THREE.Mesh(
cylindersGeom,
new THREE.MeshLambertMaterial({color: "blue"})
);
scene.add(cylinders);
See updated fiddle.

Related

How to convert `geometry.faceVertexUvs` in `THREE.js` current version?

const geometry = new THREE.SphereGeometry(100, 64, 64, Math.PI / 2, Math.PI, 0, Math.PI);
const uvs = geometry.faceVertexUvs[0];
const axis = 'x';
for (let i = 0; i < uvs.length; i += 1) {
for (let j = 0; j < 3; j += 1) {
uvs[i][j][axis] *= 0.5;
}
}
geometry.faceVertexUvs is depreciated.
How to convert this reference to THREE.js current version?
The new method uses BufferGeometry instead of Geometry. This stores each vertex attribute (position, normal, uv) in arrays, so you can fetch the UVs with BufferGeometry.getAttribute("uv");.
Once you've retreived the attribute, you'll end up with a BufferAttribute, where you can access the .array property for each individual component:
const geometry = new THREE.SphereGeometry(100, 64, 64, Math.PI / 2, Math.PI, 0, Math.PI);
const uvAttribute = geometry.getAttribute("uv");
const uvArray = uvAttribute.array;
// Loop through all UVs
// UVs have 2 components, so we jump by 2 on each iteration
for (let i = 0; i < uvAttribute.length; i += 2) {
uvArray[i + 0] = uvX;
uvArray[i + 1] = uvY;
}
// Now we set the update flag to true so the GPU gets the new values
uvAttribute.needsUpdate = true;

Three.js Buffer Geometry Animation

I have Three.js scene that basically spreads out a bunch of triangles over a given area.
geometry = new THREE.BufferGeometry();
geometry.dynamic = true;
positions = new Float32Array(triangles * 3 * 3);
const normals = new Float32Array(triangles * 3 * 3);
const colors = new Float32Array(triangles * 3 * 3);
const color = new THREE.Color();
const n = 200,
n2 = n / 2; // triangle's spread distance
const d = 1000,
d2 = d / 2; // individual triangle size
const pA = new THREE.Vector3();
const pB = new THREE.Vector3();
const pC = new THREE.Vector3();
const cb = new THREE.Vector3();
const ab = new THREE.Vector3();
for (let i = 0; i < positions.length; i += 9) {
// position
const x = Math.random() * n - n2;
const y = Math.random() * n - n2;
const z = Math.random() * n - n2;
const ax = x + Math.random() * d - d2;
const ay = y + Math.random() * d - d2;
const az = z + Math.random() * d - d2;
const bx = x + Math.random() * d - d2;
const by = y + Math.random() * d - d2;
const bz = z + Math.random() * d - d2;
const cx = x + Math.random() * d - d2;
const cy = y + Math.random() * d - d2;
const cz = z + Math.random() * d - d2;
positions[i] = ax;
positions[i + 1] = ay;
positions[i + 2] = az;
positions[i + 3] = bx;
positions[i + 4] = by;
positions[i + 5] = bz;
positions[i + 6] = cx;
positions[i + 7] = cy;
positions[i + 8] = cz;
if (i === 0) console.log(positions);
// flat face normals
pA.set(ax, ay, az);
pB.set(bx, by, bz);
pC.set(cx, cy, cz);
cb.subVectors(pC, pB);
ab.subVectors(pA, pB);
cb.cross(ab);
cb.normalize();
const nx = cb.x;
const ny = cb.y;
const nz = cb.z;
normals[i] = nx;
normals[i + 1] = ny;
normals[i + 2] = nz;
normals[i + 3] = nx;
normals[i + 4] = ny;
normals[i + 5] = nz;
normals[i + 6] = nx;
normals[i + 7] = ny;
normals[i + 8] = nz;
// colors
const vx = x / n + 0.5;
const vy = y / n + 0.5;
const vz = z / n + 0.5;
color.setRGB(vx, vy, vz);
colors[i] = color.r;
colors[i + 1] = color.g;
colors[i + 2] = color.b;
colors[i + 3] = color.r;
colors[i + 4] = color.g;
colors[i + 5] = color.b;
colors[i + 6] = color.r;
colors[i + 7] = color.g;
colors[i + 8] = color.b;
}
geometry.setAttribute(
"position",
new THREE.BufferAttribute(positions, 3)
);
geometry.setAttribute("normal", new THREE.BufferAttribute(normals, 3));
geometry.setAttribute("color", new THREE.BufferAttribute(colors, 3));
geometry.computeBoundingSphere();
let material = new THREE.MeshPhongMaterial({
color: 0xaaaaaa,
specular: 0xffffff,
shininess: 250,
side: THREE.DoubleSide,
vertexColors: true,
});
mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
What I would like to do is have the triangles start close together, and expand in every direction randomly.
How can I create an animation loop that updates the triangles position?
I have been using this example code from three.js's website:
https://github.com/mrdoob/three.js/blob/master/examples/webgl_interactive_buffergeometry.html
edit: I was able to make the triangles expand with this.mesh.scale.z += 0.005 but the triangles themselves grow as well. Is there a way to hold the triangle size the same but change the area the cover?
You could use morph targets.
Made a codepen available here:
https://codepen.io/cdeep/pen/WNENOmK
The code is inspired from the three.js example:
https://threejs.org/examples/?q=morph#webgl_morphtargets.
While assigning the initial position for each vertex, also assign the final position of where the vertex must end up after expanding.
In essence,
const positions = new Float32Array( triangles * 3 * 3 );
const morphedPositions = new Float32Array(triangles * 3 * 3);
...
...
geometry.morphAttributes.position = [];
geometry.morphAttributes.position[ 0 ] = new THREE.BufferAttribute( morphedPositions, 3);
mesh.morphTargetInfluences[ 0 ] = morphValue;
Animate the morphValue to influence how far the position attribute is closer to morphedPositions.

How to make a not squared texture fit in a "background-size:cover" way to a geometry plane in Three.js?

I want my texture to have the same behaviour than the "background-size:cover" css property.
I'd like to work with uvs coordinates.
I looked at this answer and start to work on a solution : Three.js Efficiently Mapping Uvs to Plane
I try to have the same dimension/position planes that some div of my DOM.
This is what I want :
And this is the result I get with this code : the dimension and position are good, the ratio of my texture looks good too but it seems like there's a scale issue :
let w = domElmt.clientWidth / window.innerHeight;
let h = domElmt.clientHeight / window.innerHeight;
geometry = new THREE.PlaneGeometry(w, h);
var uvs = geometry.faceVertexUvs[ 0 ];
uvs[ 0 ][ 0 ].set( 0, h );
uvs[ 0 ][ 1 ].set( 0, 0 );
uvs[ 0 ][ 2 ].set( w, h );
uvs[ 1 ][ 0 ].set( 0, 0 );
uvs[ 1 ][ 1 ].set( w, 0 );
uvs[ 1 ][ 2 ].set( w, h );
tex = new THREE.TextureLoader().load('image.jpg'));
tex.wrapS = tex.wrapT = THREE.RepeatWrapping;
material = new THREE.MeshBasicMaterial( { map: tex } );
mesh = new THREE.Mesh( geometry, material );
Should I play with the repeat attribute of my texture or can I fully made this behaviour using uvs ? Thank you
https://en.wikipedia.org/wiki/UV_mapping
UV mapping values range from 0 to 1, inclusive, and represent a percentage mapping across your texture image.
You're using a ratio of the div size vs the window size, which is likely much smaller than 1, and would result in the "zoomed in" effect you're seeing.
For example, if your w and h result in the value 0.5, then The furthest top-right corner of the mapped texture will be the exact center of the image.
background-style: cover:
Scales the image as large as possible without stretching the image. If the proportions of the image differ from the element, it is cropped either vertically or horizontally so that no empty space remains.
In other words, it will scale the image based on the size of the short side, and crop the rest. So let's assume you have a nice 128x512 image, and a 64x64 space. cover would scale the width of 128 down to 64 (a scale factor of 0.5), so multiply 512 by 0.5 to get the new height (128). Now your w would still be 1, but your h will be 128 / 512 = 0.25. Your texture will now fit to the width, and crop the height.
You'll need to perform this calculation for each image-to-container size relationship to find the proper UVs, keeping in mind that the scaling is always relevant to the short side.
You don't need to generate UVs, you can just use texture.repeat and texture.offset
const aspectOfPlane = planeWidth / planeHeight;
const aspectOfImage = image.width / image.height;
let yScale = 1;
let xScale = aspectOfPlane / aspectOfImage;
if (xScale > 1) { // it doesn't cover so based on x instead
xScale = 1;
yScale = aspectOfImage / aspectOfPlane;
}
texture.repeat.set(xScale, yScale);
texture.offset.set((1 - xScale) / 2, (1 - yScale) / 2);
'use strict';
/* global THREE */
async function main() {
const canvas = document.querySelector('#c');
const renderer = new THREE.WebGLRenderer({canvas});
const fov = 75;
const aspect = 2; // the canvas default
const near = 0.1;
const far = 50;
const camera = new THREE.PerspectiveCamera(fov, aspect, near, far);
camera.position.z = 4;
const scene = new THREE.Scene();
const loader = new THREE.TextureLoader();
function loadTexture(url) {
return new Promise((resolve, reject) => {
loader.load(url, resolve, undefined, reject);
});
}
const textures = await Promise.all([
"https://i.imgur.com/AyOufBk.jpg",
"https://i.imgur.com/ZKMnXce.png",
"https://i.imgur.com/TSiyiJv.jpg",
"https://i.imgur.com/v38pV.jpg",
].map(loadTexture));
const geometry = new THREE.PlaneBufferGeometry(1, 1);
const material = new THREE.MeshBasicMaterial({map: textures[0]});
const planeMesh = new THREE.Mesh(geometry, material);
scene.add(planeMesh);
let texIndex = 0;
function setTexture() {
const texture = textures[texIndex];
texIndex = (texIndex + 1) % textures.length;
// pick and random width and height for plane
const planeWidth = rand(1, 4);
const planeHeight = rand(1, 4);
planeMesh.scale.set(planeWidth, planeHeight, 1);
const image = texture.image;
const aspectOfPlane = planeWidth / planeHeight;
const aspectOfImage = image.width / image.height;
let yScale = 1;
let xScale = aspectOfPlane / aspectOfImage;
if (xScale > 1) { // it doesn't cover so based on x instead
xScale = 1;
yScale = aspectOfImage / aspectOfPlane;
}
texture.repeat.set(xScale, yScale);
texture.offset.set((1 - xScale) / 2, (1 - yScale) / 2);
material.map = texture;
}
setTexture();
setInterval(setTexture, 1000);
function resizeRendererToDisplaySize(renderer) {
const canvas = renderer.domElement;
const width = canvas.clientWidth;
const height = canvas.clientHeight;
const needResize = canvas.width !== width || canvas.height !== height;
if (needResize) {
renderer.setSize(width, height, false);
}
return needResize;
}
function render(time) {
time *= 0.001;
if (resizeRendererToDisplaySize(renderer)) {
const canvas = renderer.domElement;
camera.aspect = canvas.clientWidth / canvas.clientHeight;
camera.updateProjectionMatrix();
}
renderer.render(scene, camera);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
function rand(min, max) {
if (max === undefined) {
max = min;
min = 0;
}
return Math.random() * (max - min) + min;
}
main();
body {
margin: 0;
}
#c {
width: 100vw;
height: 100vh;
display: block;
}
<canvas id="c"></canvas>
<script src="https://threejsfundamentals.org/threejs/resources/threejs/r105/three.min.js"></script>
texture.repeat and texture.offset are really just applied to the UVs so if you really want UVs it's
newU = u * repeat.x + offset.x;
newV = v * repeat.y + offset.y;
so using the code above
offsetX = (1 - xScale) / 2;
offsetY = (1 - yScale) / 2;
u0 = offsetX;
v0 = offsetY;
u1 = offsetX + xScale;
v1 = offsetY + yScale;
so
var uvs = geometry.faceVertexUvs[ 0 ];
uvs[ 0 ][ 0 ].set( u0, v1 );
uvs[ 0 ][ 1 ].set( u0, v0 );
uvs[ 0 ][ 2 ].set( u1, v1 );
uvs[ 1 ][ 0 ].set( u0, v0 );
uvs[ 1 ][ 1 ].set( u1, v0 );
uvs[ 1 ][ 2 ].set( u1, v1 );

buffergeometry with more than 40k vertices

I was loading more than 40k vertices in a BufferGeometry.
It worked, but the geometry was not rendered entirely. After decomposing the geometry into separate chunks of 40k vertices, it worked. I was using version r86.
Is this something related to the hardware I am using or to three js?
Please find bellow an adaptation of the code I used to create the BufferGeometry (normalgeom is a Geometry passed as parameter)
var positions = new Float32Array(chunkLength * 3);
var indices = new Uint16Array(chunkLength);
var chunkLength = normalgeom.vertices.length;
for (var i = 0; i < chunkLength; i++)
{
var posInNormalGeom = i;
positions[i * 3] = normalgeom.vertices[posInNormalGeom].x;
positions[i * 3 + 1] = normalgeom.vertices[posInNormalGeom].y;
positions[i * 3 + 2] = normalgeom.vertices[posInNormalGeom].z;
indices[i] = i;
}
var buffGeom = new THREE.BufferGeometry();
buffGeom.addAttribute('position', new THREE.BufferAttribute(positions, 3));
buffGeom.setIndex(new THREE.BufferAttribute(new Uint16Array(indices), 1));
Afterwards, I was creating LineSegments from the buffer
var lineSegs = new THREE.LineSegments(buffGeom, material);
scene.add(lineSegs);
Update: after comment from #TheJim01, the code for splitting the geometry is the following
function makebuffered(normalgeom)
{
var retArrays = new Array();
var chunkLength = normalgeom.vertices.length;
console.log("nr vertices:" + chunkLength);
var remainingVertices = chunkLength;
var processedVertices = 0;
if(chunkLength > 40000)
{
chunkLength = 40000;
}
while(remainingVertices > 0)
{
if(remainingVertices <= chunkLength)
{
chunkLength = remainingVertices;
}
var positions = new Float32Array(chunkLength * 3);
var indices = new Uint32Array(chunkLength);
for (var i = 0; i < chunkLength; i++)
{
var posInNormalGeom = processedVertices + i;
positions[i * 3] = normalgeom.vertices[posInNormalGeom].x;
positions[i * 3 + 1] = normalgeom.vertices[posInNormalGeom].y;
positions[i * 3 + 2] = normalgeom.vertices[posInNormalGeom].z;
indices[i] = i;
}
var buffGeom = new THREE.BufferGeometry();
buffGeom.addAttribute('position', new THREE.BufferAttribute(positions, 3));
buffGeom.setIndex(new THREE.BufferAttribute(new Uint32Array(indices), 1));
retArrays.push(buffGeom);
remainingVertices -= chunkLength;
processedVertices += chunkLength;
}
return retArrays;
}
You have 847666 vertices, which leads to 847666 indices, which is well beyond the bounds of a Uint16Array (max = 65535).
To alleviate this, use Uint32Array instead. All modern browsers (of any consequence) support 32-bit arrays for WebGL buffers.

Three.js r60 Height Map

So I have a heightmap system which works well enough, however since the THREE.js has updated to r60 which removed the Face4 object, I am having issues.
My code is something like this:
this.buildGeometry = function(){
var geo, len, i, f, y;
geo = new THREE.PlaneGeometry(3000, 3000, 128, 128);
geo.dynamic = true;
geo.applyMatrix(new THREE.Matrix4().makeRotationX(-Math.PI / 2));
this.getHeightData('heightmap.png', function (data) {
len = geo.faces.length;
for(i=0;i<len;i++){
f = geo.faces[i];
if( f ){
y = (data[i].r + data[i].g + data[i].b) / 2;
geo.vertices[f.a].y = y;
geo.vertices[f.b].y = y;
geo.vertices[f.c].y = y;
geo.vertices[f.d].y = y;
}
}
geo.computeFaceNormals();
geo.computeCentroids();
mesh = new THREE.Mesh(geo, new THREE.MeshBasicMaterial({color:0xff0000}) );
scene.add(mesh);
});
};
This works well since a pixel represents each face. How is this done now that the faces are all triangulated?
Similarly I use image maps for model positioning as well. Each pixel matches to the respective Face4 and a desired mesh is placed at its centroid. How can this be accomplished now?
I really miss being able to update the library and do not want to be stuck in r59 anymore =[
This approach works fine on the recent versions (tested on r66).
Notice that the genFn returns the height y given current col and row, maxCol and maxRow (for testing purposes, you can of course replace it with a proper array lookup or from a grayscale image... 64x64 determines the mesh resolution and 1x1 the real world dimensions.
var genFn = function(x, y, X, Y) {
var dx = x/X;
var dy = y/Y;
return (Math.sin(dx*15) + Math.cos(dy * 5) ) * 0.05 + 0.025;
};
var geo = new THREE.PlaneGeometry(1, 1, 64, 64);
geo.applyMatrix(new THREE.Matrix4().makeRotationX(-Math.PI / 2));
var iz, ix,
gridZ1 = geo.widthSegments +1,
gridX1 = geo.heightSegments+1;
for (iz = 0; iz < gridZ1; ++iz) {
for (ix = 0; ix < gridX1; ++ix) {
geo.vertices[ ix + gridX1*iz ].y = genFn(ix, iz, gridX1, gridZ1);
}
}
geo.computeFaceNormals();
geo.computeVertexNormals();
geo.computeCentroids();
var mesh = new THREE.Mesh(
geo,
mtl
);
scene.add(mesh);

Resources