Three.js incorrect texture mapping orientation - three.js

I'm trying apply a simple texture mapping to a cube, but when the texture is applied it's rotated 90° CCW.
Just for my tests I'm using the same uv mapping for all faces:
window.head = function(){
var txSplitX = 1/64;
var txSplitY = 1/32;
var mesh;
var cube = new THREE.CubeGeometry( 100, 100, 100);
var material = new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture('assets/charSkin/Superman.png') } );
var faceTx= [
new THREE.Vector2(0.125,0.75),
new THREE.Vector2(0.25,0.75),
new THREE.Vector2(0.25,0.5),
new THREE.Vector2(0.125,0.5)
];
var j=0, k=1;
for(var i=0; i<6;i++){
cube.faceVertexUvs[0][j] = [faceTx[0],faceTx[1],faceTx[3]];
cube.faceVertexUvs[0][k] = [faceTx[1],faceTx[2],faceTx[3]];
j+=2;
k+=2;
}
this.mesh = new THREE.Mesh(cube, material);
}
The result I have :
http://securilabs.free.fr/images/result.png
My texture :
http://securilabs.free.fr/images/Superman.png
How can I get the correct orientation of the texture on each face ?

Related

In three.js how to position image texture similar to 'contain' in css?

My image texture is positioned relative to the center of 3d space instead of mesh and I don't quite understand what determines its size.
Here is example showing how the same image is positioned on different meshes:
https://imgur.com/glHE97L
I'd like the image be in the center of the mesh and it's size set similar as 'contain' in css.
The mesh is flat plane created using ShapeBufferGeometry:
const shape = new THREE.Shape( edgePoints );
const geometry = new THREE.ShapeBufferGeometry( shape );
To see any image I have to set:
texture.repeat.set(0.001, 0.001);
Not sure if that matters but after creating the mesh I than set its position and rotation:
mesh.position.copy( position[0] );
mesh.rotation.set( rotation[0], rotation[1], rotation[2] );
I've tried setting those:
mesh.updateMatrixWorld( true );
mesh.geometry.computeBoundingSphere();
mesh.geometry.verticesNeedUpdate = true;
mesh.geometry.elementsNeedUpdate = true;
mesh.geometry.morphTargetsNeedUpdate = true;
mesh.geometry.uvsNeedUpdate = true;
mesh.geometry.normalsNeedUpdate = true;
mesh.geometry.colorsNeedUpdate = true;
mesh.geometry.tangentsNeedUpdate = true;
texture.needsUpdate = true;
I've played with wrapS / wrapT and offset.
I've checked UV's - I don't yet fully understand this concept but it seems fine. Example of UV for one mesh (I understand those are XY coordinates and they seem to reflect the actual corners of my mesh):
uv: Float32BufferAttribute
array: Float32Array(8)
0: -208
1: 188
2: 338
3: 188
4: 338
5: 12
6: -208
7: 12
I've tried setting:
texture.repeat.set(imgHeight/geometryHeight/1000, imgWidth/geometryWidth/1000);
This is how THREE.ShapeGeometry() computes UV coordinate:
https://github.com/mrdoob/three.js/blob/e622cc7890e86663011d12ec405847baa4068515/src/geometries/ShapeGeometry.js#L157
But you can re-compute them, to put in range [0..1].
Here is an example, click the button to re-compute uvs of the shape geometry:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight, 1, 1000);
camera.position.set(0, 0, 10);
var renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
var grid = new THREE.GridHelper(10, 10);
grid.rotation.x = Math.PI * 0.5;
scene.add(grid);
var points = [
new THREE.Vector2(0, 5),
new THREE.Vector2(-5, 4),
new THREE.Vector2(-3, -3),
new THREE.Vector2(2, -5),
new THREE.Vector2(5, 0)
];
var shape = new THREE.Shape(points);
var shapeGeom = new THREE.ShapeBufferGeometry(shape);
var shapeMat = new THREE.MeshBasicMaterial({
map: new THREE.TextureLoader().load("https://threejs.org/examples/textures/uv_grid_opengl.jpg")
});
var mesh = new THREE.Mesh(shapeGeom, shapeMat);
scene.add(mesh);
btnRecalc.addEventListener("click", onClick);
var box3 = new THREE.Box3();
var size = new THREE.Vector3();
var v3 = new THREE.Vector3(); // for re-use
function onClick(event) {
box3.setFromObject(mesh); // get AABB of the shape mesh
box3.getSize(size); // get size of that box
var pos = shapeGeom.attributes.position;
var uv = shapeGeom.attributes.uv;
for (let i = 0; i < pos.count; i++) {
v3.fromBufferAttribute(pos, i);
v3.subVectors(v3, box3.min).divide(size); // cast world uvs to range 0..1
uv.setXY(i, v3.x, v3.y);
}
uv.needsUpdate = true; // set it to true to make changes visible
}
renderer.setAnimationLoop(() => {
renderer.render(scene, camera);
});
body {
overflow: hidden;
margin: 0;
}
<script src="https://threejs.org/build/three.min.js"></script>
<button id="btnRecalc" style="position: absolute;">Re-calculate UVs</button>

Three.js - Create new mesh from certain faces/vertices of another mesh

I´ve been several days struggling with a particular Three.js issue, and I cannot find any way to do it. This is my case:
1) I have a floating mesh, formed by several triangled faces. This mesh is created from the geometry returned by a loader, after obtaining its vertices and faces using getAttribute('position'): How to smooth mesh triangles in STL loaded BufferGeometry
2) What I want to do now is to "project" the bottom face agains the floor.
3) Later, with this new face added, create the resulting mesh of filling the space between the 3 vertices of both faces.
I already have troubles in step 2... To create a new face I´m supossed to have its 3 vertices already added to geometry.vertices. I did it, cloning the original face vertices. I use geometry.vertices.push() results to know their new indexes, and later I use that indexes (-1) to finally create the new face. But its shape is weird, also the positions and the size. I think I´m not getting the world/scene/vector position equivalence theory right :P
I tried applying this, with no luck:
How to get the absolute position of a vertex in three.js?
Converting World coordinates to Screen coordinates in Three.js using Projection
http://barkofthebyte.azurewebsites.net/post/2014/05/05/three-js-projecting-mouse-clicks-to-a-3d-scene-how-to-do-it-and-how-it-works
I discovered that if I directly clone the full original face and simply add it to the mesh, the face is added but in the same position, so I cannot then change its vertices to place it on the floor (or at least without modifying the original face vertices!). I mean, I can change their x, y, z properties, but they are in a very small measure that doesn´t match the original mesh dimensions.
Could someone help me get this concept right?
EDIT: source code
// Create geometry
var geo = new THREE.Geometry();
var geofaces = [];
var geovertices = [];
original_geometry.updateMatrixWorld();
for(var index in original_geometry.faces){
// Get original face vertexNormals to know its 3 vertices
var face = original_geometry[index];
var vertexNormals = face.vertexNormals;
// Create 3 new vertices, add it to the array and then create a new face using the vertices indexes
var vertexIndexes = [null, null, null];
for (var i = 0, l = vertexNormals.length; i < l; i++) {
var vectorClone = vertexNormals[i].clone();
vectorClone.applyMatrix4( original_geometry.matrixWorld );
//vectorClone.unproject(camera); // JUST TESTING
//vectorClone.normalize(); // JUST TESTING
var vector = new THREE.Vector3(vectorClone.x, vectorClone.z, vectorClone.y)
//vector.normalize(); // JUST TESTING
//vector.project(camera); // JUST TESTING
//vector.unproject(camera); // JUST TESTING
vertexIndexes[i] = geovertices.push( vector ) - 1;
}
var newFace = new THREE.Face3( vertexIndexes[0], vertexIndexes[1], vertexIndexes[2] );
geofaces.push(newFace);
}
// Assign filled arrays to the geometry
geo.faces = geofaces;
geo.vertices = geovertices;
geo.mergeVertices();
geo.computeVertexNormals();
geo.computeFaceNormals();
// Create a new mesh with resulting geometry and add it to scene (in this case, to the original mesh to keep the positions)
new_mesh = new THREE.Mesh( geo, new THREE.MeshFaceMaterial(material) ); // material is defined elsewhere
new_mesh.position.set(0, -100, 0);
original_mesh.add( new_mesh );
I created a fully operational JSFiddle with the case to try things and see the problem more clear. With this STL (smaller than my local example) I cannot even see the badly cloned faces added to the scene.. Maybe they are too small or out of focus.
Take a look to the calculateProjectedMesh() function, here is where I tried to clone and place the bottom faces (already detected because they have a different materialIndex):
JSFiddle: https://jsfiddle.net/tc39sgo1/
var container;
var stlPath = 'https://dl.dropboxusercontent.com/s/p1xp4lhy4wxmf19/Handle_Tab_floating.STL';
var camera, controls, scene, renderer, model;
var mouseX = 0,
mouseY = 0;
var test = true;
var meshPlane = null, meshStl = null, meshCube = null, meshHang = null;
var windowHalfX = window.innerWidth / 2;
var windowHalfY = window.innerHeight / 2;
/*THREE.FrontSide = 0;
THREE.BackSide = 1;
THREE.DoubleSide = 2;*/
var materials = [];
materials.push( new THREE.MeshPhongMaterial({color : 0x00FF00, side:0, shading: THREE.FlatShading, transparent: true, opacity: 0.9, overdraw : true, wireframe: false}) );
materials.push( new THREE.MeshPhongMaterial({color : 0xFF0000, transparent: true, opacity: 0.8, side:0, shading: THREE.FlatShading, overdraw : true, metal: false, wireframe: false}) );
materials.push( new THREE.MeshPhongMaterial({color : 0x0000FF, side:2, shading: THREE.FlatShading, overdraw : true, metal: false, wireframe: false}) );
var lineMaterial = new THREE.LineBasicMaterial({ color: 0x0000ff, transparent: true, opacity: 0.05 });
init();
animate();
function webglAvailable() {
try {
var canvas = document.createElement('canvas');
return !!(window.WebGLRenderingContext && (
canvas.getContext('webgl') || canvas.getContext('experimental-webgl')));
} catch (e) {
return false;
}
}
function init() {
container = document.createElement('div');
document.body.appendChild(container);
camera = new THREE.PerspectiveCamera(25, window.innerWidth / window.innerHeight, 0.1, 100000000);
camera.position.x = 1500;
camera.position.z = -2000;
camera.position.y = 1000;
controls = new THREE.OrbitControls(camera);
// scene
scene = new THREE.Scene();
var ambient = new THREE.AmbientLight(0x101030); //0x101030
scene.add(ambient);
var directionalLight = new THREE.DirectionalLight(0xffffff, 2);
directionalLight.position.set(0, 3, 0).normalize();
scene.add(directionalLight);
var directionalLight = new THREE.DirectionalLight(0xffffff, 2);
directionalLight.position.set(0, 1, -2).normalize();
scene.add(directionalLight);
if (webglAvailable()) {
renderer = new THREE.WebGLRenderer();
} else {
renderer = new THREE.CanvasRenderer();
}
renderer.setClearColor( 0xCDCDCD, 1 );
// renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
container.appendChild(renderer.domElement);
document.addEventListener('mousemove', onDocumentMouseMove, false);
window.addEventListener('resize', onWindowResize, false);
createPlane(500, 500);
createCube(500);
loadStl();
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
function onDocumentMouseMove(event) {
mouseX = (event.clientX - windowHalfX) / 2;
mouseY = (event.clientY - windowHalfY) / 2;
}
function animate() {
requestAnimationFrame(animate);
render();
}
function render() {
renderer.render(scene, camera);
}
function createPlane(width, height) {
var planegeometry = new THREE.PlaneBufferGeometry(width, height, 0, 0);
var material = new THREE.MeshLambertMaterial({
color: 0xFFFFFF,
side: THREE.DoubleSide
});
planegeometry.computeBoundingBox();
planegeometry.center();
meshPlane = new THREE.Mesh(planegeometry, material);
meshPlane.rotation.x = 90 * (Math.PI/180);
//meshPlane.position.y = -height/2;
scene.add(meshPlane);
}
function createCube(size) {
var geometry = new THREE.BoxGeometry( size, size, size );
geometry.computeFaceNormals();
geometry.mergeVertices();
geometry.computeVertexNormals();
geometry.center();
var material = new THREE.MeshPhongMaterial({
color: 0xFF0000,
opacity: 0.04,
transparent: true,
wireframe: true,
side: THREE.DoubleSide
});
meshCube = new THREE.Mesh(geometry, material);
meshCube.position.y = size/2;
scene.add(meshCube);
}
function loadStl() {
var loader = new THREE.STLLoader();
loader.load( stlPath, function ( geometry ) {
// Convert BufferGeometry to Geometry
var geometry = new THREE.Geometry().fromBufferGeometry( geometry );
geometry.computeBoundingBox();
geometry.computeVertexNormals();
geometry.center();
var faces = geometry.faces;
for(var index in faces){
var face = faces[index];
var faceNormal = face.normal;
var axis = new THREE.Vector3(0,-1,0);
var angle = Math.acos(axis.dot(faceNormal));
var angleReal = (angle / (Math.PI/180));
if(angleReal <= 70){
face.materialIndex = 1;
}
else{
face.materialIndex = 0;
}
}
geometry.computeFaceNormals();
geometry.computeVertexNormals();
meshStl = new THREE.Mesh(geometry, new THREE.MeshFaceMaterial(materials));
meshStl.position.x = 0;
meshStl.position.y = 400;
scene.add( meshStl );
// Once loaded, calculate projections mesh
calculateProjectedMesh();
});
}
function calculateProjectedMesh(){
var geometry = meshStl.geometry;
var faces = geometry.faces;
var vertices = geometry.vertices;
var geometry_projected = new THREE.Geometry();
var faces_projected = [];
var vertices_projected = [];
meshStl.updateMatrixWorld();
for(var index in faces){
var face = faces[index];
// This are the faces
if(face.materialIndex == 1){
var vertexIndexes = [face.a, face.b, face.c];
for (var i = 0, l = vertexIndexes.length; i < l; i++) {
var relatedVertice = vertices[ vertexIndexes[i] ];
var vectorClone = relatedVertice.clone();
console.warn(vectorClone);
vectorClone.applyMatrix4( meshStl.matrixWorld );
////////////////////////////////////////////////////////////////
// TEST: draw line
var geometry = new THREE.Geometry();
geometry.vertices.push(new THREE.Vector3(vectorClone.x, vectorClone.y, vectorClone.z));
//geometry.vertices.push(new THREE.Vector3(vectorClone.x, vectorClone.y, vectorClone.z));
geometry.vertices.push(new THREE.Vector3(vectorClone.x, meshPlane.position.y, vectorClone.z));
var line = new THREE.Line(geometry, lineMaterial);
scene.add(line);
console.log("line added");
////////////////////////////////////////////////////////////////
vectorClone.y = 0;
var vector = new THREE.Vector3(vectorClone.x, vectorClone.y, vectorClone.z);
vertexIndexes[i] = vertices_projected.push( vector ) - 1;
}
var newFace = new THREE.Face3( vertexIndexes[0], vertexIndexes[1], vertexIndexes[2] );
newFace.materialIndex = 2;
faces_projected.push(newFace);
}
}
geometry_projected.faces = faces_projected;
geometry_projected.vertices = vertices_projected;
geometry_projected.mergeVertices();
console.info(geometry_projected);
meshHang = new THREE.Mesh(geometry_projected, new THREE.MeshFaceMaterial(materials));
var newY = -(2 * meshStl.position.y) + 0;
var newY = -meshStl.position.y;
meshHang.position.set(0, newY, 0);
meshStl.add( meshHang );
}
EDIT: Finally!! I got it! To clone the original faces I must access their 3 original vertices using "a", "b" and "c" properties, which are indexes referencing Vector3 instances in the "vertices" array of the original geometry.
I cloned the 3 vertices flatting the Z position to zero, use their new indexes to create the new face and add it to the projection mesh (in blue).
I´m also adding lines as a visual union between both faces. Now I´m ready for step 3, but I think this is complex enough to close this question.
Thanks for the updateMatrixWorld clue! It was vital to achieve my goal ;)
try this
original_geometry.updateMatrixWorld();
var vertexIndexes = [null, null, null];
for (var i = 0, l = vertexNormals.length; i < l; i++) {
var position = original_geometry.geometry.vertices[i].clone();
position.applyMatrix4( original_geometry.matrixWorld );
var vector = new THREE.Vector3(position.x, position.y, position.z)
vertexIndexes[i] = geovertices.push( vector ) - 1;
}

threejs merge plane geometries or create a texture from multiple images

I have a set of plane geometries, each having a their own texture from a url. At any point during navigation(zoom/pan) the container (THREE.Object3D) contains 26 plane geometries. How will i merge them to a single big plane so that i could apply a heighmap for all tiles in the merge geometry.
Or How could I get all the texture from the 36 images.(currently as a map property for MeshPhongMaterial) in to a single geometry?
EDIT:
Currently I create a Big geometry as suggested by Dainel. and put a texture to the geometry which is a combined texture of a set of images via canvas.
context = canvas.getContext( '2d' );
var img = Image();
img.src = url //url of the texture tile
context.drawImage(img,xpos, ypos);
This is done or every images. Each image have a url, xpos, ypos. Images are preloaded and after loading every images a callback is called which creates the geometry(plane) and add texture from the canvas.
var texture = new THREE.Texture(canvas);
texture.needsUpdate = true;
var material = new THREE.MeshPhongMaterial({ map : texture });
var geom = new THREE.PlaneGeometry(canvas.width, canvas.height, 57, 40);
var mesh = new THREE.Mesh(geom, material);
scene.add( mesh );
also i update the z vertex of geometry with the height values.
u may create one "big" plane and apply a merged texture to it
or u could manually set the vertex Y-values of each plane to match the corresponding part of the height map. so u have a set of separate planes that all share the same "base" height map.
regards,
daniel
Using terrainLoader class for threejs and canvas texture I was able to acheive
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(45, mywidth / myheight, 0.01, 10000);
camera.position.set(0, 0, 240);
scene.add(new THREE.AmbientLight(0xeeeeee));
renderer = new THREE.WebGLRenderer();
renderer.setSize(mywidth, myheight);
var group = new THREE.Object3D();
var canvas = document.createElement( 'canvas' );
canvas.width = mywidth;
canvas.height = myheight;
var ctx = canvas.getContext('2d');
/* adjust width, height, segments based on height data;*/
/* create a plane geometry */
var geom = new THREE.PlaneGeometry(800, 692, 40, 55);
/*
read height values from .bin file uing terrainLoader
and update the Z values of the plane
*/
heightdata = 'assets/heightmap.bin';
terrainLoader.load(heightdata, function(data) {
/* bin file generated from USGS SRTM tile */
for (var i = 0, l = geom.vertices.length ; i < l; i++) {
geom.vertices[i].z = data[i] / 200; /*simple scale */
}
geom.verticesNeedUpdate = true;
});
/*
create a texture from canvas created above with width and height
of the of the scene. This is to be careful as the size of texture
image may not be the same but as same ratio as height map.
Atleast for my case it was the situation. But geometry width and
texture width are in same ratio. So that i need a small array of
heightmap from bin but a much bigger texture
*/
var texture = new THREE.Texture(canvas);
var material = new THREE.MeshPhongMaterial({ map : texture });
var mesh = new THREE.Mesh(geom, material);
group.add( mesh );
scene.add(group);
Now to draw multiple images to canvas check this page
I will just duplicate the code from HTML5 Canvas Image Loader Tutorial
function loadImages(sources, callback) {
var images = {};
var loadedImages = 0;
var numImages = 0;
// get num of sources
for(var src in sources) {
numImages++;
}
for(var src in sources) {
images[src] = new Image();
images[src].onload = function() {
if(++loadedImages >= numImages) {
callback(images);
}
};
images[src].src = sources[src];
}
}
var sources = {
darthVader: 'http://www.html5canvastutorials.com/demos/assets/darth-vader.jpg',
yoda: 'http://www.html5canvastutorials.com/demos/assets/yoda.jpg'
};
You can call the loadImages function after updating the Plane geometry from heightmap in above code.
loadImages(sources, function(images) {
//context = context of the texture put in threejs texture
context.drawImage(images.darthVader, 100, 30, 200, 137);
context.drawImage(images.yoda, 350, 55, 93, 104);
});

ExtrudeGeometry with diagonal line

I have created an ExtrudeGeometry Object used the three.js lib,code bellow:
var extrudeSettings = {};
extrudeSettings.bevelEnabled = false;
extrudeSettings.bevelSegments = 0;
extrudeSettings.steps =2;
//create a shape
var rectShape;
......
//add the shape to scene
var geometry = new THREE.ExtrudeGeometry(shape, extrudeSettings);
var mesh = new THREE.Mesh(geometry, new THREE.MeshLambertMaterial({ color: color,wireframe:true }));
mesh.position.set(100, 100, 0 );
mesh.rotation.x = -Math.PI / 2;
mesh.scale.set(s, s, s);
scene.add(mesh);
If I set the wireframe property true,the diagonal line of the object will show,how can I avoid it.Are there some property I should set?thanks!

How do I make a material show up on a sphere

// set the scene size
var WIDTH = 1650,
HEIGHT = 700;
// set some camera attributes
var VIEW_ANGLE = 100,
ASPECT = WIDTH / HEIGHT,
NEAR = 0.1,
FAR = 10000;
// get the DOM element to attach to
// - assume we've got jQuery to hand
var $container = $('#container');
// create a WebGL renderer, camera
// and a scene
var renderer = new THREE.WebGLRenderer();
var camera = new THREE.PerspectiveCamera( VIEW_ANGLE,
ASPECT,
NEAR,
FAR );
//camera.lookAt(new THREE.Vector3( 0, 0, 0 ));
var scene = new THREE.Scene();
// the camera starts at 0,0,0 so pull it back
camera.position.x = 200;
camera.position.y = 200;
camera.position.z = 300;
// start the renderer
renderer.setSize(WIDTH, HEIGHT);
// attach the render-supplied DOM element
$container.append(renderer.domElement);
// create the sphere's material
var sphereMaterial = new THREE.MeshLambertMaterial(
{
color: 0xCC0000
});
// set up the sphere vars
var radius = 60, segments = 20, rings = 20;
// create a new mesh with sphere geometry -
// we will cover the sphereMaterial next!
var sphere = new THREE.Mesh(
new THREE.SphereGeometry(radius, segments, rings),
img);
// add the sphere to the scene
scene.add(sphere);
// and the camera
scene.add(camera);
// create a point light
var pointLight = new THREE.PointLight( 0xFFFFFF );
// set its position
pointLight.position.x = 50;
pointLight.position.y = 100;
pointLight.position.z = 180;
// add to the scene
scene.add(pointLight);
// add a base plane
var planeGeo = new THREE.PlaneGeometry(500, 500,8, 8);
var planeMat = new THREE.MeshLambertMaterial({color: 0x666699});
var plane = new THREE.Mesh(planeGeo, planeMat);
plane.position.x = 160;
plane.position.y = 0;
plane.position.z = 20;
//rotate it to correct position
plane.rotation.x = -Math.PI/2;
scene.add(plane);
// add 3D img
var img = new THREE.MeshBasicMaterial({
map:THREE.ImageUtils.loadTexture('cube.png')
});
img.map.needsUpdate = true;
// draw!
renderer.render(scene, camera);
I've put the var img as a material to the sphere but everytime I render it aoutomaticly changes color...
How do I do so it just will have the image as I want... not all the colors?
I whould possibly put the img on a plane.
You need to use the callback of the loadTexture function. If you refer to the source of THREE.ImageUtils you will see that the loadTexture function is defined as
loadTexture: function ( url, mapping, onLoad, onError ) {
var image = new Image();
var texture = new THREE.Texture( image, mapping );
var loader = new THREE.ImageLoader();
loader.addEventListener( 'load', function ( event ) {
texture.image = event.content;
texture.needsUpdate = true;
if ( onLoad ) onLoad( texture );
} );
loader.addEventListener( 'error', function ( event ) {
if ( onError ) onError( event.message );
} );
loader.crossOrigin = this.crossOrigin;
loader.load( url, image );
return texture;
},
Here, only the url parameter is required. The texture mapping can be passed in as null. The last two parameters are the callbacks. You can pass in a function for each one which will be called at the respective load and error events. The problem you are experiencing is most likely caused by the fact that three.js is attempting to apply your material before your texture is properly loaded. In order to remedy this, you should only do this inside a function passed as the onLoad callback (which will only be called after the image has been loaded up). Also, you should always create your material before you apply it to an object.
So you should change your code from:
// set up the sphere vars
var radius = 60, segments = 20, rings = 20;
// create a new mesh with sphere geometry -
// we will cover the sphereMaterial next!
var sphere = new THREE.Mesh(
new THREE.SphereGeometry(radius, segments, rings),
img);
// add the sphere to the scene
scene.add(sphere);
to something like:
var sphereGeo, sphereTex, sphereMat, sphereMesh;
var radius = 60, segments = 20, rings = 20;
sphereGeo = new THREE.SphereGeometry(radius, segments, rings);
sphereTex = THREE.ImageUtils.loadTexture('cube.png', null, function () {
sphereMat = new THREE.MeshBasicMaterial({map: sphereTex});
sphereMesh = new THREE.Mesh(sphereGeo, sphereMat);
scene.add(sphereMesh);
});

Resources