THREE.js planeGeometry clipped inside the walls - three.js

I want to make a 3D building using Three.js. For example, I made 6 walls and a floor by checkerboard texture. I used clippingPlanes for wall1 and wall4:
floor1.material.clippingPlanes = [plane1,plane4];
I made my planes(plane1 and plane4) by my walls(wall1 and wall4). For example, my wall4 planeGeometry and plane4 code is here:
var wallGeometry4 = new THREE.PlaneGeometry(40, Floor_Height, 1, 1);
var wall4 = createMesh(wallGeometry4, "brick_diffuse.jpg", THREE.DoubleSide, 1024, 1024);
unit1.add(wall4);
wall4.position.x = -10;
wall4.position.y = 0;
wall4.position.z = -20;
wall4.rotation.y = 1.5 * Math.PI;
wall4.add(new THREE.EdgesHelper(wall4, 0x000000));
var plane4 = new THREE.Plane();
var normal4 = new THREE.Vector3();
var point4 = new THREE.Vector3();
normal4.set(0, 0, -1).applyQuaternion(wall4.quaternion);
point4.copy(wall4.position);
plane4.setFromNormalAndCoplanarPoint(normal4, point4);
But I see an empty area between wall5 and wall6, because plane4(that used for clipping the floor) isn't the same size of wall4. I think Plane4 is whole of the scene. How to change size of my plane to clip correctly? Or Is there any way to make floor bounded in walls?

One way to achieve this as suggested is to use ShapeGeometry.
When you are creating your walls you can save the x and z co-ordinate of their starting and ending points in an array to form a loop of points of Vector2. Then you can create a new custom shape from these points using shapeGeometry.
points = [{x:0,y:0},{x:0,y:10},{x:10,y:10},{x:10,y:0},{x:0,y:0}]
function getShapeFromPoints(points){
const shape = new THREE.Shape();
shape.moveTo(points[0].x, points[0].y);
for (let i = 1; i < points.length; i++) {
shape.lineTo(points[i].x, points[i].y);
}
return shape;
}
function createPlaneFromPoints(points) {
const planeMaterial = getPlaneMaterial();
const shape = getShapeFromPoints(points);
const geometry = new THREE.ShapeBufferGeometry(shape);
geometry.rotateX(degreeToRadians(-90));
const mesh = new THREE.Mesh(geometry, planeMaterial);
return mesh;
}
Hope that helps you!

Related

ShapePath auto closing

I'm having an issue with ShapePath where the shape is automatically being closed when converting to geometry?
var path = new THREE.ShapePath();
path.moveTo(0, 0);
path.lineTo(0,50);
path.lineTo(50,50);
var shapes = path.toShapes(true);
for ( var j = 0; j < shapes.length; j ++ ) {
var shape = shapes[ j ];
console.log(shape);
var material = new THREE.MeshLambertMaterial( {
color: Math.random() * 0xffffff
});
var geometry = new THREE.ShapeBufferGeometry( shape );
var mesh = new THREE.Mesh( geometry, material );
scene.add(mesh);
}
JS Fiddle
What I'm expecting to have is a L style shape, instead i'm getting a triangle being rendered.
I really just want to extrude the path to a solid L shape.
Any ideas on the best approach?
You can draw your intended result if you create an enclosing contour of the L shape like shown in the following example: https://jsfiddle.net/zbeLk6jw/1/
var path = new THREE.ShapePath();
path.moveTo(0, 0);
path.lineTo(0,50);
path.lineTo(10,50);
path.lineTo(10,10);
path.lineTo(25,10);
path.lineTo(25,0);

how to move multiple objects after scene.add(object);

I am using Three.js to create an animation of 100 cubes.
For each cube I set its geometry and material and then add it to the scene with:
scene.add(cube);
I have been storing the positions of the cubes in three arrays: cube_x[99] cube_y[99] cube_z[99]
I do not know how to tell the renderer their positions as they are modified. The following works for one cube when I have only one cube in the scene:
cube.position.x=300;
cube.position.y=000;
I tried making cube into an array to hold the xyz positions of 100 cubes. But I can not seem to call scene.add(cube[i]);
How can I update the position of 100 cubes after scene.add(cube);
How do I let three.js know which cube I am trying to move?
Thanks - javascript beginner
You would basically store a reference to each cube in an array:
// reference array
var cubes = [];
// create cube mesh
var cube = new THREE.Mesh(new THREE.BoxGeometry(), someMaterial);
// push a reference to array
cubes.push(cube);
// add same cube to scene
scene.add(cube);
Now you can iterate over each cube and move it:
cubes.forEach(function(cube) {
cube.position.z -= 0.1; // move for example in Z direction
});
You would do this within your animation loop.
// Setup a simple demo scene
var scene = new THREE.Scene();
var cam = new THREE.PerspectiveCamera(40, innerWidth / innerHeight, 0.1, maxDist);
var renderer = new THREE.WebGLRenderer({antialias:true});
renderer.setPixelRatio(devicePixelRatio);
renderer.setSize(innerWidth, innerHeight);
document.body.appendChild(renderer.domElement);
// STore our cubes somewhere, dist for random generation and clipping
var cubes = [], maxDist = 500, r = Math.random, materials = [];
for(var i = 0; i < 7; i++) materials.push(new THREE.MeshBasicMaterial({color: (r()*0xffffff)&0xffffff|0x999999}));
// Add and store some cubes
for(i = 0; i < 3000; i++) {
var cube = new THREE.Mesh(new THREE.BoxBufferGeometry(), materials[i % materials.length]);
cubes.push(cube); // store to our array
cube.position.set(r()*maxDist*2-maxDist, r()*maxDist*2-maxDist, r()*maxDist*2-maxDist); // random positions
}
scene.add(...cubes); //es6, add to scene
cam.position.z = maxDist;
// Animate cubes
function loop(time) {
for(i = 0, cube; i < cubes.length; i++) { // for each cube do:
cube = cubes[i];
cube.position.z -= 1; // move on Z in this demo
if (cube.position.z < -maxDist) cube.position.z = maxDist; // reset position for demo
}
cam.rotation.z = Math.sin(time*0.0003); // add some camera action
cam.rotation.y = Math.sin(time*0.0005)*0.3; // add some camera action
renderer.render(scene, cam); // render everything
requestAnimationFrame(loop)
};
requestAnimationFrame(loop)
body {margin:0:overflow:hidden}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/88/three.min.js"></script>

Merge geometries, but use a single material

Somewhat new to Three.js and 3d libraries in general.
I merged two geometries (a quarter cylinder and a plane) using this code:
var planeGeo = new THREE.PlaneGeometry(planeW, planeD / 2, 199, 399);
var planeMesh = new THREE.Mesh(planeGeo);
planeMesh.updateMatrix();
var cylinderGeo = new THREE.CylinderGeometry(100, 100, planeW, 199, 399, true, 0, Math.PI / 2);
cylinderGeo.rotateZ(Math.PI / 2).translate(0, 200, -100);
var cylinderMesh = new THREE.Mesh(cylinderGeo);
cylinderMesh.updateMatrix();
var singleGeometry = new THREE.Geometry();
singleGeometry.merge(planeMesh.geometry, planeMesh.matrix);
singleGeometry.merge(cylinderMesh.geometry, cylinderMesh.matrix);
var testmaterial = new THREE.MeshPhongMaterial({ color: 0x666666 });
mesh = new THREE.Mesh(singleGeometry, testmaterial);
scene.add(mesh);
I then would like to use a single material (png) over the entire thing. This code doesn't work:
textureLoader.load('data/test.png', function (texture) {
material = new THREE.MeshLambertMaterial({
map: texture
});
});
Later in the block with the merging...
mesh = new THREE.Mesh(singleGeometry, material);
scene.add(mesh);
This results in:
I would like the end result to be a single draped png over the entire merged geometry, but I can't find anything that suggests this is a normal thing to do. Is there a better way to achieve that result than merging geometries? Or am I just looking in the wrong places?
A poor-mans solution to achieve this, using the shape supplied in your post, is the following:
https://jsfiddle.net/87wg5z27/44/
Using code from this answer: https://stackoverflow.com/a/20774922/4977165
It sets the UVs based on the bounding box of the geometry, leaving out the z-coordinate (=0). Thats why the texture is a little bit stretched at the top, you can correct that manually or maybe its sufficent for you.
geometry.computeBoundingBox();
var max = geometry.boundingBox.max,
min = geometry.boundingBox.min;
var offset = new THREE.Vector2(0 - min.x, 0 - min.y);
var range = new THREE.Vector2(max.x - min.x, max.y - min.y);
var faces = geometry.faces;
geometry.faceVertexUvs[0] = [];
for (var i = 0; i < faces.length ; i++) {
var v1 = geometry.vertices[faces[i].a],
v2 = geometry.vertices[faces[i].b],
v3 = geometry.vertices[faces[i].c];
geometry.faceVertexUvs[0].push([
new THREE.Vector2((v1.x + offset.x)/range.x ,(v1.y + offset.y)/range.y),
new THREE.Vector2((v2.x + offset.x)/range.x ,(v2.y + offset.y)/range.y),
new THREE.Vector2((v3.x + offset.x)/range.x ,(v3.y + offset.y)/range.y)
]);
}
geometry.uvsNeedUpdate = true;

THREE.js (r60) PointLight not reflected by a special plane object (heightmapped from image)

UPDATE Cause of problem has been found - see Update section end of question.
I have a complex app using THREE.js (r60) which adds a special plane object to the main scene. The plane geometry is determined by heightmapping from an internally-supplied base64 uri image (size 16x16, 32x32 or 64x64 pixels). The scene has two static lights (ambient and directional) and one moveable point light which switches on and off.
In the complex app the point light is not reflected by the plane object. (Point light is toggled by pressing "R" key or button).
I have made a first JSFiddle example using THREE.js latest version (r70) where the lights work fine.
[Update] I have now made a second JSFiddle example using the older THREE.js library (r60) it also works OK.
I suspect the problem in the complex app (r60) may have something to do with system capacity and or timing/sequencing. Capacity is definitely an issue because other simpler scene objects (boxes and cylinders) show individual responses or non-responses to the point light which vary from one run of the app to the next, seemingly depending on the overall level of system activity (cpu, memory usage). These simpler objects may reflect in one run but not in the next. But the heightmapped plane object is consistently non-reflective to the point light. These behaviors are observed on (i) a Win7 laptop and (ii) an Android Kitkat tablet.
The heightmapping process may be part of the cause. I say this because when I comment out the heightmapped plane and activate a simple similar plane object (with randomly assigned z-levels) the latter plane behaves as expected (i.e. it reflects point light).
I guess that the usual approach now would be to upgrade my complex app to r70 (not a trivial step) and then start disabling chunks of the app to narrow down the cause. However it may be that the way in which heightmapping is implemented (e.g. with a callback) is a factor in explaining the failure of the heightmapped plane to reflect point light.
[RE-WRITTEN] So I would be grateful if anyone could take a look at the code in the correctly-working, previously-cited, (r70) JSFiddle example and point out any glaring design faults which (if applied in more complex, heavilly-loaded apps) might lead to failure of the height-mapped plane to reflect point light.
Full code (javascript, not html or css) of the (r70) JSFiddle:-
//... Heightmap from Image file
//... see http://danni-three.blogspot.co.uk/2013/09/threejs-heightmaps.html
var camera, scene, renderer;
var lpos_x = -60,lpos_y = 20,lpos_z = 100;
var mz = 1;
var time = 0, dt = 0;
var MyPlane, HPlane;
base64_imgData = "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQEAeAB4AAD/4QBoRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUAAAABAAAARgEoAAMAAAABAAIAAAExAAIAAAASAAAATgAAAAAAAAB4AAAAAQAAAHgAAAABUGFpbnQuTkVUIHYzLjUuMTAA/9sAQwANCQoLCggNCwsLDw4NEBQhFRQSEhQoHR4YITAqMjEvKi4tNDtLQDQ4RzktLkJZQkdOUFRVVDM/XWNcUmJLU1RR/9sAQwEODw8UERQnFRUnUTYuNlFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFR/8AAEQgAIAAgAwEiAAIRAQMRAf/EAB8AAAEFAQEBAQEBAAAAAAAAAAABAgMEBQYHCAkKC//EALUQAAIBAwMCBAMFBQQEAAABfQECAwAEEQUSITFBBhNRYQcicRQygZGhCCNCscEVUtHwJDNicoIJChYXGBkaJSYnKCkqNDU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6g4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2drh4uPk5ebn6Onq8fLz9PX29/j5+v/EAB8BAAMBAQEBAQEBAQEAAAAAAAABAgMEBQYHCAkKC//EALURAAIBAgQEAwQHBQQEAAECdwABAgMRBAUhMQYSQVEHYXETIjKBCBRCkaGxwQkjM1LwFWJy0QoWJDThJfEXGBkaJicoKSo1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoKDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uLj5OXm5+jp6vLz9PX29/j5+v/aAAwDAQACEQMRAD8A19Z8SXdu5KOMKxBAFOi1uTUdNJguxFcAchv6Vz2so/mzKc8sc8VX8MyQjUVWYNweCO9AEsOuX8s+xrqWQh8DJ4rsJCphSN3Czsm7ArG1bT7fSFe7EZJzuX3J6VQsdRnvryJ2+/wooA6O501JY7yRh0U8Vyg1WzsghhsAkqnBO4nd713t8NsEqIhJfqRXEahotxPJlISDnOaANzWvL1rR4JiTG6ryorG0C2aDUI02lhu6kVZ02wvLVSpYtu65yRXQaZYvDL5rhRx2oA//2Q==";
init();
animate();
//==================================================================
function init() {
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 10);
camera.position.x = 1300;
camera.position.y = 400;
camera.position.z = 0;
camera.lookAt(0, 0, 0);
scene.add(camera);
scene.add(new THREE.AmbientLight(0x001900));
SunLight = new THREE.DirectionalLight(0xff0000,.3,20000);//...color, intensity, range.
SunLight.position.set(0, 3000, -8000);
scene.add(SunLight);
//POINT LIGHT
PL_color = 0x0000ff;
PL_intensity = 10;
PL_range_to_zero_intensity = 1200;
PL = new THREE.PointLight(PL_color, PL_intensity, PL_range_to_zero_intensity);
scene.add(PL);
PL_pos_x = -100;
PL_pos_y = -100;
PL_pos_z = 120;
PL.position.set(PL_pos_x, PL_pos_y, PL_pos_z);
//INDICATOR SPHERE
var s_Geometry = new THREE.SphereGeometry(5, 20, 20);
var s_Material = new THREE.MeshBasicMaterial({
color: 0xaaaaff
});
i_Sphere = new THREE.Mesh(s_Geometry, s_Material);
i_Sphere.position.set(PL_pos_x, PL_pos_y, PL_pos_z);
scene.add(i_Sphere);
//Plane02
var Plane02Geo = new THREE.PlaneGeometry(50, 50); //...
var Plane02Material = new THREE.MeshPhongMaterial({
side: THREE.DoubleSide
}, {
color: 0xaaaaaa
});
Plane02 = new THREE.Mesh(Plane02Geo, Plane02Material);
Plane02.position.set(0, 0, -120);
scene.add(Plane02);
//PEAS
xxx = SOW_F_Make_peas();
//RENDERER
renderer = new THREE.WebGLRenderer({
antialias: true
});
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.shadowMapEnabled = true;
renderer.shadowMapSoft = false;
document.body.appendChild(renderer.domElement);
// controls
controls = new THREE.OrbitControls(camera, renderer.domElement);
xxx = SOW_F_Make_Heightmap_Object_from_Image_File(scene, camera);
} //...EOFunction Init
//==================================================================
function animate() {
dt = 0.1;
time += dt;
if (time < 10000) {
requestAnimationFrame(animate);
//move point light & indicator sphere
speed = 16;
if (Math.abs(PL_pos_z) > 400) mz = (-1)* mz;
PL_pos_x += 0.01 * speed * mz;
PL_pos_y += 0.05 * speed * mz;
PL_pos_z -= 0.2 * speed * mz;
PL.position.set(PL_pos_x, PL_pos_y, PL_pos_z);
i_Sphere.position.set(PL_pos_x, PL_pos_y, PL_pos_z);
renderer.render(scene, camera);
} else alert("Time=" + time + "Finished");
}
//==================================================================
function SOW_F_Make_Heightmap_Object_from_Image_File(givenScene, givenCamera) {
//... Read a Heightmap from a coloured image file
//... into a (pre-defined global) plane object called HPlane
MyImage = new Image();
MyImage.onload = function () {
var MyPlane_width = 1000;//6000; //...MyPlane width or height are in scene units and do not have to match image width or height
var MyPlane_height = 1000;//6000;
var MyPlane_w_segs = MyImage.naturalWidth - 1; //... important that this mapping is correct for texture 1 pixel :: 1 segment.
var MyPlane_h_segs = MyImage.naturalHeight - 1; //... important that this mapping is correct for texture 1 pixel :: 1 segment.
var Hgeometry = new THREE.PlaneGeometry(MyPlane_width, MyPlane_height, MyPlane_w_segs, MyPlane_h_segs);
//var texture = THREE.ImageUtils.loadTexture( '/images/Tri_VP_Texturemap.jpg' );
var texture = THREE.ImageUtils.loadTexture( base64_imgData );
//... Choose texture or color
//var Hmaterial = new THREE.MeshLambertMaterial( { map: texture, side: THREE.DoubleSide} );//....fails
var Hmaterial = new THREE.MeshPhongMaterial( {
color: 0x111111 , side: THREE.DoubleSide } ); //... works OK
HPlane = new THREE.Mesh(Hgeometry, Hmaterial);
//...get Height Data from Image
var scale = 0.6;//1//6; //0.25;
var Height_data = DA_getHeightData(MyImage, scale);
//... set height of vertices
X_offset = 0;
Y_offset = 0;
Z_offset = -100; //...this will (after rotation) add to the vertical height dimension (+ => up).
for (var iii = 0; iii < HPlane.geometry.vertices.length; iii++) {
//HPlane.geometry.vertices[iii].x = X_offset;
//HPlane.geometry.vertices[iii].y = Y_offset;
HPlane.geometry.vertices[iii].z = Z_offset + Height_data[iii];
}
//----------------------------------------------------------------------
//... Must do it in this order...Faces before Vertices
//... see WestLangley's response in http://stackoverflow.com/questions/13943907/my-object-isnt-reflects-the-light-in-three-js
HPlane.rotation.x = (-(Math.PI) / 2); //... rotate MyPlane -90 degrees on X
//alert("Rotated");
HPlane.geometry.computeFaceNormals(); //... for Lambert & Phong materials
HPlane.geometry.computeVertexNormals(); //... for Lambert & Phong materials
/*
HPlane.updateMatrixWorld();
HPlane.matrixAutoUpdate = false;
HPlane.geometry.verticesNeedUpdate = true;
*/
givenScene.add(HPlane);
HPlane.position.set(0, -150, 0);//... cosmetic
//return HPlane; //... not necessary, given that HPlane is global.
} ; //... End of MyImage.onload = function ()
//===============================================================
//... *** IMPORTANT ***
//... Only NOW do we command the script to actually load the image source
//... This .src statement will load the image from file into MyImage object
//... and invoke the pre-associated MyImage.OnLoad function
//... cause cross-origin problem: MyImage.src = '/images/Tri_VP_Heightmap_64x64.jpg'; //...if image file is local to this html file.
MyImage.src = base64_imgData;//... uses image data provided in the script to avoid Cross-origin file source restrictions.
} //... End of function SOW_F_Make_Heightmap_Object_from_Image_File
//===========================================================================
function DA_getHeightData(d_img, scale) {
//... This is used by function SOW_F_Make_Heightmap_Object_from_Image_File.
//if (scale == undefined) scale=1;
var canvas = document.createElement('canvas');
canvas.width = d_img.width; //OK
canvas.height = d_img.height;
var context = canvas.getContext('2d');
var size = d_img.width * d_img.height;
var data = new Float32Array(size);
context.drawImage(d_img, 0, 0);
for (var ii = 0; ii < size; ii++) {
data[ii] = 0;
}
var imgData = context.getImageData(0, 0, d_img.width, d_img.height);
var pix = imgData.data; //... Uint(8) UnClamped Array[1024] for a 16x16 = 256 pixel image = 4 slots per pixel.
var jjj = 0;
//... presumably each pix cell can have value 0 to 255
for (var iii = 0; iii < pix.length; iii += 4) {
var all = pix[iii] + pix[iii + 1] + pix[iii + 2];
//... I guess RGBA and we don't use the fourth cell (A, = Alpha channel)
jjj++;
data[jjj] = all * scale / 3; //...original code used 12 not 3 ??? and divided by scale.
//console.log (iii, all/(3*scale), data[jjj]);
}
return data;
} //... end of function DA_getHeightData(d_img,scale)
//==================================================================================================
function SOW_F_Get_A_Plane(givenScene, givenCamera) {
//...MyPlane width or height are in scene units and do not have to match image width or height
var MyPlane_width = 1000;
var MyPlane_height = 1000;
var MyPlane_w_segs = 64; //...
var MyPlane_h_segs = 64; //...
geometry = new THREE.PlaneGeometry(MyPlane_width, MyPlane_height, MyPlane_w_segs, MyPlane_h_segs);
//var material = new THREE.MeshLambertMaterial( { color: 0xeeee00, side: THREE.DoubleSide} );
var material = new THREE.MeshPhongMaterial({
color: 0xeeee00,side: THREE.DoubleSide
}); //... OK
MyPlane = new THREE.Mesh(geometry, material);
givenScene.add(MyPlane);
MyPlane.rotation.x = (-(Math.PI) / 2); // rotate it -90 degrees on X
MyPlane.position.set(0, 100, 0);
MyPlane.geometry.computeFaceNormals(); //...for Lambert & Phong materials
MyPlane.geometry.computeVertexNormals(); //...for Lambert & Phong materials
/*
MyPlane.geometry.verticesNeedUpdate = true;
MyPlane.updateMatrixWorld();
MyPlane.matrixAutoUpdate = false;
*/
} //... EOF SOW_F_Get_A_Plane
//====================================================================
function SOW_F_Make_peas()
{
//----------------- Make an array of spheres -----------------------
Pea_geometry = new THREE.SphereGeometry(5,16,16);
//Pea_material = new THREE.MeshNormalMaterial({ shading: THREE.SmoothShading});
Pea_material = new THREE.MeshPhongMaterial({ color: 0xaa5522});
// global...
num_peas = 1200;
for (var iii = 0; iii < num_peas; iii++)
{
//...now global
ob_Pea = new THREE.Mesh(Pea_geometry, Pea_material);
ob_Pea.position.set(
400 * Math.random() - 150,
300 * Math.random() - 150,
1200 * Math.random() - 150);
scene.add(ob_Pea);//TEST
}
}
UPDATE
It appears the problem is a result of phasing. See this new JSFiddle(r70). Pointlight is created in function init() but not added to scene, or is immediately removed from scene after being added. Then various graphical mesh objects are created. When pointlight is added back to the scene (in the animate loop) it is too late - the mesh objects will not be illuminated by the pointlight.
A procedural solution is simply to not remove pointlights from the scene if they are to be used later. If they need to be "extinguished" temporarilly then just turn down the intensity and turn it up later: e.g.
myPointLight.intensity = 0.00

THREE.JS UV Mapping on ExtrudeGeometry

I need to apply a texture on a ExtrudeGeometry object.
The shape is a circle and the extrude path is composed of 2 vectors :
One for the top.
One for the bottom.
I didn't choose cylinderGeometry because I need to place top/bottom sections of my geometry at precise positions and because the geometry created will not be always purely vertical (like a oblique cylinder for example).
Here is a picture of a section (one top vector, one bottom vector and a shape extruded between these 2 vectors).
and a picture of the texture I'm trying to apply.
All I want to do is to wrap this picture on the vertical sides of my object just one time.
Here is my code :
var biVectors = [ new THREE.Vector3( this.startVector.x, this.startVector.y, this.startVector.z ) , new THREE.Vector3( this.endVector.x, this.endVector.y, this.endVector.z ) ];
var wellSpline = new THREE.SplineCurve3(biVectors);
var extrudeSettings = {
steps : 1,
material: 0,
extrudeMaterial: 1,
extrudePath : wellSpline
};
var pts = [];
for (var i = 0; i <= this.segments; i++) {
var theta = (i / this.segments) * Math.PI * 2;
pts.push( new THREE.Vector3(Math.cos(theta) * this.diameter , Math.sin(theta) * this.diameter, 0) );
}
var shape = new THREE.Shape( pts );
var geometry = new THREE.ExtrudeGeometry( shape, extrudeSettings );
var texture = THREE.ImageUtils.loadTexture( 'textures/sampleTexture2.jpg' );
texture.wrapS = texture.wrapT = THREE.RepeatWrapping;
texture.flipY = false;
var material = new THREE.MeshBasicMaterial( { map: texture } );
var slice = new THREE.Mesh( geometry, material );
var faceNormals = new THREE.FaceNormalsHelper( slice );
console.log("face normals: ", faceNormals);
myCanvas.scene.add( faceNormals );
slice.parentObject = this;
myCanvas.scene.add( slice );
this.object3D = slice;
}
Now, as you can see, the mapping is not correct at all.
I've read a lot of information about this problem the last 3 days. But I'm running out of options as I'm new to THREE.JS.
I think I have to redefine the UV coordinates but I have no clue how to do this.
It seems that wrapping a texture on a cylinder like object is anything but easy in THREE.JS.
Can someone please help me on this issue ?

Resources