Image in texture, adjusting the UV, three js - three.js

I am stuck!
I am trying to map a image on to a surface in three js. It has something to do with UV mapping. I have tried to understand
I create the shape using
var i, verts = [];
for (i = 0; i < 6; i++) {
verts.push(createVert(i, Hex.FLAT));
}
hexShape = new THREE.Shape(verts);
I have read http://paulyg.f2s.com/uv.htm, Custom UVgenerator Three.js for extrudedgeometry, UV mapping in THREE.js, THREE.js generate UV coordinate.
They use a square, I am using hex/triangles.
I am loading the image:
if (true) {
texture = new THREE.ImageUtils.loadTexture("data/mylogo.png",{}, onText, onErr);
} else {
var textLoader = new THREE.TextureLoader();
textLoader.load("data/hemp_logoB.png", {}, onText, onErr);
function onErr(err) {
console.log('ERR ', err);
}
function onText(text) {
console.log('TEXTLOADED ', text);
var t = text;
var img = t.image;
console.log(img.height, img.width);
texture = text;
texture.needsUpdate = true;
update();
}
}
I the extrude the hex.
geo = new THREE.ExtrudeGeometry(hexShape, extrudeSettings);
I even have made the face and side different
var mats = [imgMaterial, sideMaterial];
var mat = new THREE.MeshFaceMaterial(mats);
mesh = new THREE.Mesh(geo, mat);
I am seeing just a tiny dot in the center of the hex that is the image. On the shape ( geo) I have 44 faces**
I think I need a uvGenerator or custom uv. The size on the image is 256x256 but I can make it any size***. The documentation use square surfaces this a is a hex surface.
Ideas???
** Should is be 24? 6 + 6 + 12 top + bottom + (6 * 2) sides
*** mylogo.png was generated from SVG file, I would like to use SVG for image on surface.

Related

Three.js / React-three-fiber | How to spread PointsMaterial evenly on a BufferGeometry(costum SphereGeometry) and compare vertices with Image data

I try to reacreate the github landingpage globe (if youre not logged in https://github.com/) with three.js and react-three-fiber by the example provided by them (https://github.blog/2020-12-21-how-we-built-the-github-globe/). My goal is to archieve that, with the only differents being, using three.js pointsMaterial and hide those materials, which would have the same coordinates as water on an image of the earth.
Sorry for bad english or misspelling (english is not my native language) and this is my first stackoverflow question too.. So if something is unclear or I wasnt specific enough let me know and I try my best to correct it. Thanks in advance for any help!
My Questions are:
how do you spread those three.js points along different latitudes from, in this case south to the north pole.
how would you compare for example image color / alpha values with the pointsMaterial and decide if it should be visible (land) or not (water)
I played around with the code I found on the Github explanation above for a view days, but cant figure out how to translate it, so I could use it:
for (let lat = -90; lat <= 90; lat += 180/rows) {
const radius = Math.cos(Math.abs(lat) * DEG2RAD) * GLOBE_RADIUS; // espacially this part what would DEG2RAD and GLOBE_RADIUS mean?
const circumference = radius * Math.PI * 2;
const dotsForLat = circumference * dotDensity;
for (let x = 0; x < dotsForLat; x++) {
const long = -180 + x*360/dotsForLat;
if (!this.visibilityForCoordinate(long, lat)) continue;
// Setup and save circle matrix data
}
}
Currently I managed to get some data from an image of our planet, by creating a non rendered canvas, created context for it, painted the image in there and getting the values out of it by using .getImageData(). I was able to create a halo by using a for loop for the vertices position of the BufferGeometry, but i guess you'll see it in the image / code provided below.
Current progress Image
import { useEffect } from "react";
import * as THREE from "three";
// Had some troubles with setting up loaders in next.js, so I used Three / react-three-fiber
const Box = () => {
// Loading and append image to get values of it from a not rendered canvas
useEffect(() => {
const textureLoader = new THREE.TextureLoader();
textureLoader.load("/globe/earth.png", (texture) => {
const width = texture.image.width;
const height = texture.image.height;
const img = texture.image;
const canvas = document.createElement("canvas");
const ctx = canvas.getContext("2d");
canvas.width = width;
canvas.height = height;
ctx.scale(1, -1);
ctx.drawImage(img, 0, 0, width, height * -1);
const imgData = ctx.getImageData(0, 0, canvas.width, canvas.height);
console.log(imgData);
});
}, []);
// Creating Geometry / Material
const count = 500;
const vertices = new Float32Array(count * 3);
// Looping to get values for vertices
for (let x = 0; x < count * 3; x++) {
const value = Math.cos(Math.abs(x));
vertices[x] = value;
}
const material = new THREE.PointsMaterial({ color: "white", size: 0.005 });
const geometry = new THREE.BufferGeometry();
geometry.setAttribute("position", new THREE.BufferAttribute(vertices, 3));
// Returning to render on canvas in index file
return <points material={material} geometry={geometry}></points>;
};
export default Box;

THREE.js planeGeometry clipped inside the walls

I want to make a 3D building using Three.js. For example, I made 6 walls and a floor by checkerboard texture. I used clippingPlanes for wall1 and wall4:
floor1.material.clippingPlanes = [plane1,plane4];
I made my planes(plane1 and plane4) by my walls(wall1 and wall4). For example, my wall4 planeGeometry and plane4 code is here:
var wallGeometry4 = new THREE.PlaneGeometry(40, Floor_Height, 1, 1);
var wall4 = createMesh(wallGeometry4, "brick_diffuse.jpg", THREE.DoubleSide, 1024, 1024);
unit1.add(wall4);
wall4.position.x = -10;
wall4.position.y = 0;
wall4.position.z = -20;
wall4.rotation.y = 1.5 * Math.PI;
wall4.add(new THREE.EdgesHelper(wall4, 0x000000));
var plane4 = new THREE.Plane();
var normal4 = new THREE.Vector3();
var point4 = new THREE.Vector3();
normal4.set(0, 0, -1).applyQuaternion(wall4.quaternion);
point4.copy(wall4.position);
plane4.setFromNormalAndCoplanarPoint(normal4, point4);
But I see an empty area between wall5 and wall6, because plane4(that used for clipping the floor) isn't the same size of wall4. I think Plane4 is whole of the scene. How to change size of my plane to clip correctly? Or Is there any way to make floor bounded in walls?
One way to achieve this as suggested is to use ShapeGeometry.
When you are creating your walls you can save the x and z co-ordinate of their starting and ending points in an array to form a loop of points of Vector2. Then you can create a new custom shape from these points using shapeGeometry.
points = [{x:0,y:0},{x:0,y:10},{x:10,y:10},{x:10,y:0},{x:0,y:0}]
function getShapeFromPoints(points){
const shape = new THREE.Shape();
shape.moveTo(points[0].x, points[0].y);
for (let i = 1; i < points.length; i++) {
shape.lineTo(points[i].x, points[i].y);
}
return shape;
}
function createPlaneFromPoints(points) {
const planeMaterial = getPlaneMaterial();
const shape = getShapeFromPoints(points);
const geometry = new THREE.ShapeBufferGeometry(shape);
geometry.rotateX(degreeToRadians(-90));
const mesh = new THREE.Mesh(geometry, planeMaterial);
return mesh;
}
Hope that helps you!

Texture map to control pixel colors in a THREE.Points mesh

I'm fairly new to Three.js, but I'm trying to use a Texture map to control pixel colors in a THREE.Points mesh. My desired outcome is that vertex 0 will be colored as image pixel x=0, y=0. Vertex 1 will be colored as image x=1, y=0, and so on.
All attempts I have make result is each vertex point being rendered with the whole image (not use one color according to the coordinates.)
Here is a snippet of my code.
... Define Geometry ...
var texture = new THREE.TextureLoader().load("img/MyImage.jpg");
var mat = new THREE.PointsMaterial({ size: 2.5, vertexColors: THREE.VertexColors, map: texture });
var points = new THREE.Points(geo, mat);
If you don't feel like you have to use a texure (maybe that is possible with a similar approach) you could just read the pixeldata of the image and use that to set colors similar to WestLangleys suggestion.
I'm not sure if the difference between colors size and number of vertices are going to be a problem, but it's working fine for me. Good luck!
var img = new Image();
img.src = "Image1.png";
img.onload = function(){
// apply image to canvas
var canvas = document.createElement("canvas");
canvas.width = img.width;
canvas.height = img.height;
canvas.getContext('2d').drawImage(img,0,0,img.width,img.height);
var colors = [];
// geometry of choice
var geo = new THREE.SphereGeometry(50,50);
for(var i = 0; i < img.width; i++){
for(var j = 0; j < img.height; j++){
// pd - pixeldata array in the format of rgba. example: [53, 53, 130, 255]
var pd = canvas.getContext('2d').getImageData(i,j,1,1).data;
// not pretty, but works
colors.push(new THREE.Color("rgb("+pd[0]+","+pd[1]+","+pd[2]+")"));
}
}
geo.colors = colors;
var material = new THREE.PointsMaterial( {size: 10, vertexColors: THREE.VertexColors });
var point = new THREE.Points(geo, material);
scene.add(point);
};

Get the color of a pixel by its xyz coordinates

I need to get the color of an image texture on a mesh at a given xyz point (mouse click + ray cast). How can I achieve it in THREE.js?
I know I could use gl.readPixels from plain webgl, but it's not a valid option for me.
Thanks!
So I ended using a separate canvas, in which I load the image texture, translate the three.js coordinates into canvas one and read the pixel. Something like this:
// point is a THREE.Vector3
var getColor = function(point, callback) {
var img = new Image();
img.src = 'assets/img/myImage.png';
img.onload = function() {
// get the xy coords from the point
var xyCoords = convertVector3ToXY(point);
// create a canvas to manipulate the image
var canvas = document.createElement('canvas');
canvas.width = img.width;
canvas.height = img.height;
canvas.getContext('2d').drawImage(img, 0, 0, img.width, img.height);
// get the pixel data and callback
var pixelData = canvas.getContext('2d').getImageData(x, y, 1, 1).data;
callback(pixelData);
}
};
Thanks

threejs merge plane geometries or create a texture from multiple images

I have a set of plane geometries, each having a their own texture from a url. At any point during navigation(zoom/pan) the container (THREE.Object3D) contains 26 plane geometries. How will i merge them to a single big plane so that i could apply a heighmap for all tiles in the merge geometry.
Or How could I get all the texture from the 36 images.(currently as a map property for MeshPhongMaterial) in to a single geometry?
EDIT:
Currently I create a Big geometry as suggested by Dainel. and put a texture to the geometry which is a combined texture of a set of images via canvas.
context = canvas.getContext( '2d' );
var img = Image();
img.src = url //url of the texture tile
context.drawImage(img,xpos, ypos);
This is done or every images. Each image have a url, xpos, ypos. Images are preloaded and after loading every images a callback is called which creates the geometry(plane) and add texture from the canvas.
var texture = new THREE.Texture(canvas);
texture.needsUpdate = true;
var material = new THREE.MeshPhongMaterial({ map : texture });
var geom = new THREE.PlaneGeometry(canvas.width, canvas.height, 57, 40);
var mesh = new THREE.Mesh(geom, material);
scene.add( mesh );
also i update the z vertex of geometry with the height values.
u may create one "big" plane and apply a merged texture to it
or u could manually set the vertex Y-values of each plane to match the corresponding part of the height map. so u have a set of separate planes that all share the same "base" height map.
regards,
daniel
Using terrainLoader class for threejs and canvas texture I was able to acheive
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(45, mywidth / myheight, 0.01, 10000);
camera.position.set(0, 0, 240);
scene.add(new THREE.AmbientLight(0xeeeeee));
renderer = new THREE.WebGLRenderer();
renderer.setSize(mywidth, myheight);
var group = new THREE.Object3D();
var canvas = document.createElement( 'canvas' );
canvas.width = mywidth;
canvas.height = myheight;
var ctx = canvas.getContext('2d');
/* adjust width, height, segments based on height data;*/
/* create a plane geometry */
var geom = new THREE.PlaneGeometry(800, 692, 40, 55);
/*
read height values from .bin file uing terrainLoader
and update the Z values of the plane
*/
heightdata = 'assets/heightmap.bin';
terrainLoader.load(heightdata, function(data) {
/* bin file generated from USGS SRTM tile */
for (var i = 0, l = geom.vertices.length ; i < l; i++) {
geom.vertices[i].z = data[i] / 200; /*simple scale */
}
geom.verticesNeedUpdate = true;
});
/*
create a texture from canvas created above with width and height
of the of the scene. This is to be careful as the size of texture
image may not be the same but as same ratio as height map.
Atleast for my case it was the situation. But geometry width and
texture width are in same ratio. So that i need a small array of
heightmap from bin but a much bigger texture
*/
var texture = new THREE.Texture(canvas);
var material = new THREE.MeshPhongMaterial({ map : texture });
var mesh = new THREE.Mesh(geom, material);
group.add( mesh );
scene.add(group);
Now to draw multiple images to canvas check this page
I will just duplicate the code from HTML5 Canvas Image Loader Tutorial
function loadImages(sources, callback) {
var images = {};
var loadedImages = 0;
var numImages = 0;
// get num of sources
for(var src in sources) {
numImages++;
}
for(var src in sources) {
images[src] = new Image();
images[src].onload = function() {
if(++loadedImages >= numImages) {
callback(images);
}
};
images[src].src = sources[src];
}
}
var sources = {
darthVader: 'http://www.html5canvastutorials.com/demos/assets/darth-vader.jpg',
yoda: 'http://www.html5canvastutorials.com/demos/assets/yoda.jpg'
};
You can call the loadImages function after updating the Plane geometry from heightmap in above code.
loadImages(sources, function(images) {
//context = context of the texture put in threejs texture
context.drawImage(images.darthVader, 100, 30, 200, 137);
context.drawImage(images.yoda, 350, 55, 93, 104);
});

Resources