Three.js Updating DataTextures with a renderTarget's image data - three.js

I am working on a motion detection program in Three.js which uses the difference between the current and previous frame. For now, before the subtraction, the current and the previous frame are both blurred using a Three.EffectComposer each.
The main problem is: Instead of having to blur the previous frame again, I want to use the previously blurred "current" frame as the texture in the subtraction process.
The closest I have managed to do is by using the function below to update the image.data of the Three.DataTexture. It was used in the render()-function after the Blurring composer is rendered, but before the subtraction is rendered.
Both of them were rendered to the screen with Three.CopyShader.
function getData(image) {
var canvas = document.createElement('canvas');
canvas.width = image.width;
canvas.height = image.height;
var context = canvas.getContext('2d');
context.drawImage(image, 0, 0);
return new Uint8Array(context.getImageData(0, 0, image.width, image.height).data);
}
Where the “image” is the renderer.domElement. This method feels quite inefficient and I needed to render the Blur-pass to the screen, which caused the result to flicker.
Edit 1 The current code is shown below, it blurs the current and previous images and then calculates the difference. The animate()-function is the point of interest.
<!DOCTYPE html>
<html lang="en">
<head>
<title>Three.js Webcam Test</title>
<meta charset="utf-8">
<!-- <meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
-->
<script src="//ajax.googleapis.com/ajax/libs/jquery/1.10.2/jquery.min.js"></script>
<script src="//ajax.googleapis.com/ajax/libs/jqueryui/1.10.3/jquery-ui.min.js"></script>
<script src="lib/three.min.js"></script>
<!-- Effect composer scripts -->
<script src = "postprocessing/EffectComposer.js"></script>
<script src = "postprocessing/MaskPass.js"></script>
<script src = "postprocessing/RenderPass.js"></script>
<script src = "postprocessing/TexturePass.js"></script>
<script src = "postprocessing/ShaderPass.js"></script>
<script src = "postprocessing/SavePass.js"></script>
<script src = "shaders/CopyShader.js"></script>
<script src = "shaders/ColorifyShader.js"></script>
<script src = "shaders/DilationShader.js"></script>
<script src = "shaders/ErosionShader.js"></script>
<script src = "shaders/HorizontalBlurShader.js"></script>
<script src = "shaders/VerticalBlurShader.js"></script>
<script src = "shaders/BlendShader.js"></script>
<script src = "shaders/passThroughShader.js"></script>
<script src = "shaders/framediffShader.js"></script>
<script src = "shaders/PawaskarPostShader.js"></script>
<!-- ----------------------- -->
<script src="lib/Projector.js"></script>
<script src="lib/CanvasRenderer.js"></script>
<script src="lib/webcam.js"></script>
<script src="lib/perspective.js"></script>
<script src="lib/stats.min.js"></script>
<script src="lib/rStats.js"></script>
<script src="lib/rStats.extras.js"></script>
<script type="text/javascript" src="lib/dat.gui.min.js"></script>
<link href="css/style.css" rel="stylesheet" type="text/css">
<link rel="stylesheet" href="http://bootswatch.com/lumen/bootstrap.min.css">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/css/bootstrap-theme.min.css">
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/js/bootstrap.min.js"></script>
</head>
<body>
<h1>Motion Detection (in progress)</h1>
<p>Press P to print the current frame</p>
<div id="WebGL-output">
</div>
<div id="camera" class="camera">
<div class="label"></div>
<video id="theVideo" autoplay width="640" height="480" class="webcam"></video>
<canvas id="theCanvas" width="640" height="480" class="hidden"></canvas>
</div>
<script>
var SCREEN_HEIGHT = 480;
var SCREEN_WIDTH = 640;
var values = {
detectmotion: true,
softness: 0.17,
threshold: 0.11,
color: "#ffae23",
usecolor: false,
postprocess: false,
postprocessmethod: 0,
preprocess: true,
detectedges: false,
framedifference: false,
binarydifference: false,
bufferlength: 1
};
var stats, container, video, renderer, currTexture, uniforms, camera, scene, prevTexture, prevTexture2, prevTexture3, prevTextureBuffer = [], bufferLenght,
videoContext,
prevTime;
var rS, glS, tS;
var postProcessFilters = [];
var prepScene, prepRenderTarget, prepComposer, prepPrevComposer, hBlur, vBlur, temporalShader, prevTemporalShader, prevBlur;
var modelScene, modelRenderTarget, modelComposer, passShader;
var subtractScene, subtractRenderTarget,subtractComposer, subtractShader;
//GUI variables
var gui, cPostProcessMethod, doPostProcess = false;
var frameNumber;
/** TEST **/
var BlurSave;
function init(){
frameNumber = 0;
/* INIT */
scene = new THREE.Scene();
camera = new THREE.Camera();
scene.add(camera);
webcam.updateSources(function(s){
webcam.start('theVideo',s[0]);
});
var size = SCREEN_WIDTH * SCREEN_HEIGHT;
video = document.getElementById( 'theVideo' );
videoContext = document.getElementById('theCanvas').getContext('2d');
//The textures
currTexture = new THREE.DataTexture([],SCREEN_WIDTH,SCREEN_HEIGHT);
prevTexture = new THREE.DataTexture([],SCREEN_WIDTH,SCREEN_HEIGHT);
prevBlur = new THREE.DataTexture([], SCREEN_WIDTH, SCREEN_HEIGHT);
currTexture.minFilter = prevTexture.minFilter = prevBlur.minFilter= THREE.LinearFilter;
prevTime = -1;
renderer = new THREE.WebGLRenderer();
renderer.setSize(SCREEN_WIDTH, SCREEN_HEIGHT);
renderer.domElement.width = SCREEN_WIDTH;
renderer.domElement.height = SCREEN_HEIGHT;
renderer.autoClear = false;
document.body.insertBefore(renderer.domElement, document.body.childNodes[0]);
uniforms = {
currentTexture: { type: "t", value: currTexture },
mirrorImage: { type: "i", value: 0}
}
var geometry = new THREE.PlaneBufferGeometry(1, 1);
var material = new THREE.ShaderMaterial( {
uniforms: uniforms,
vertexShader: THREE.passThroughShader.vertexShader,
fragmentShader: THREE.passThroughShader.fragmentShader
} );
// A plane with the current video context as texture
var mesh = new THREE.Mesh(geometry,material);
mesh.material.depthTest = false;
mesh.material.depthWrite = false;
scene.add(mesh);
// COPY SHADER, used to render the current context to the screen
var effectCopy = new THREE.ShaderPass(THREE.CopyShader);
effectCopy.renderToScreen = true;
/** Preprocess stage **/
prepScene = new THREE.Scene();
prepScene.add( new THREE.AmbientLight( 0xffffff ) );
prepScene.add(mesh) // add the current quad
var renderTargetParameters = {minFilter: THREE.LinearFilter, magFilter: THREE.LinearFilter, format: THREE.RGBFormat, stencilBuffer: false};
//blur shaders
hBlur = new THREE.ShaderPass(THREE.HorizontalBlurShader);
hBlur.uniforms["h"].value = 1 / SCREEN_WIDTH;
hBlur.enabled = values['preprocess'];
vBlur = new THREE.ShaderPass(THREE.VerticalBlurShader);
vBlur.uniforms["v"].value = 1 / SCREEN_HEIGHT;
vBlur.enabled = values['preprocess'];
BlurSave = new THREE.SavePass(new THREE.WebGLRenderTarget(SCREEN_WIDTH, SCREEN_HEIGHT, renderTargetParameters));
//preprocess scene render pass
var renderModelPrep = new THREE.RenderPass(prepScene, camera);
var prevPassShader1 = new THREE.ShaderPass(THREE.passThroughShader);
prevPassShader1.uniforms["mirrorImage"].value = 1;
//Preprocess of the current image
//It is this prepComposer's rendertarget value I want to use in the next loop
prepComposer = new THREE.EffectComposer(renderer, new THREE.WebGLRenderTarget(SCREEN_WIDTH, SCREEN_HEIGHT, renderTargetParameters));
prepComposer.addPass(renderModelPrep);
prepComposer.addPass(prevPassShader1);
prepComposer.addPass(hBlur);
prepComposer.addPass(vBlur);
prepComposer.addPass(BlurSave);
//
// subtractComposer.addPass(effectCopy);
//Preprocess of the previous image
//Want to skip this stage
var prevPassShader = new THREE.ShaderPass(THREE.passThroughShader, prevTexture);
prevPassShader.uniforms["currentTexture"].value = prevTexture;
prevPassShader.uniforms["mirrorImage"].value = 1;
var prevBlurSave = new THREE.SavePass(new THREE.WebGLRenderTarget(SCREEN_WIDTH, SCREEN_HEIGHT, renderTargetParameters));
prepPrevComposer = new THREE.EffectComposer(renderer, new THREE.WebGLRenderTarget(SCREEN_WIDTH, SCREEN_HEIGHT, renderTargetParameters));
prepPrevComposer.addPass(renderModelPrep);
prepPrevComposer.addPass(prevPassShader);
prepPrevComposer.addPass(hBlur);
prepPrevComposer.addPass(vBlur);
prepPrevComposer.addPass(prevBlurSave);
/**------------------**/
/**---------------------------**/
/** Background Subtraction stage **/
subtractScene = new THREE.Scene();
subtractScene.add( new THREE.AmbientLight( 0xffffff ) );
var renderTargetParameters3 = {minFilter: THREE.LinearFilter, magFilter: THREE.LinearFilter, format: THREE.RGBAFormat, stencilBuffer: false};
//Background Subtraction shaders
subtractShader = new THREE.ShaderPass(THREE.framediffShader);
subtractShader.uniforms['currentTexture'].value = BlurSave.renderTarget; // from the preprocess
subtractShader.uniforms['previousTexture'].value = prevBlurSave.renderTarget; //modelled background
//Background subtraction scene render pass
var renderSubtract = new THREE.RenderPass(subtractScene, camera);
//Background subtraction Composer
subtractComposer = new THREE.EffectComposer(renderer, new THREE.WebGLRenderTarget(SCREEN_WIDTH, SCREEN_HEIGHT, renderTargetParameters3));
subtractComposer.addPass(renderSubtract);
subtractComposer.addPass(subtractShader);
//subtractComposer.addPass(effectCopy2);
/**------------------------------**/
/** Postprocessing stage **/
//Dilation
var dilationFilter = new THREE.ShaderPass(THREE.DilationShader);
dilationFilter.enabled = values['postprocess'];
postProcessFilters.push(dilationFilter);
//Erosion
var erosionFilter = new THREE.ShaderPass(THREE.ErosionShader);
erosionFilter.enabled = values['postprocess'];
postProcessFilters.push(erosionFilter);
//Pawaskar's postprocess filter
var pawaskarFilter = new THREE.ShaderPass(THREE.PawaskarPostShader);
pawaskarFilter.uniforms['threshold'].value = values['threshold'];
pawaskarFilter.enabled = values['postprocess'];
postProcessFilters.push(pawaskarFilter);
subtractComposer.addPass(pawaskarFilter);
//Opening
subtractComposer.addPass(erosionFilter);
subtractComposer.addPass(dilationFilter);
//Closing
subtractComposer.addPass(dilationFilter);
subtractComposer.addPass(erosionFilter);
//The final result rendered to the screen
subtractComposer.addPass(effectCopy);
/**----------------------**/
animate();
}
function animate()
{
if(video.readyState === video.HAVE_ENOUGH_DATA ){
var time = video.currentTime;
if(time !== prevTime){
//Because a firefox bug when drawImage is used, need to catch NS_ERROR_NOT_AVAILABLE
try {
videoContext.drawImage(video, 0, 0,SCREEN_WIDTH,SCREEN_HEIGHT); //update the video
if(currTexture.image.data.length){
//var imgData = getData(renderer.domElement);
//var imgData = renderer.domElement.toDataURL();
// var gl = renderer.getContext();
//gl.readPixels( 0, 0, SCREEN_WIDTH, SCREEN_HEIGHT, gl.RGBA, gl.UNSIGNED_BYTE, prevBlur.image.data );
//prevBlur.image.data = imgData;
// prevBlur.needsUpdate = true;
/** I want to update the prevBlur texture with the BlurSave.renderTarget! **/
prevTexture.image.data = currTexture.image.data;
prevTexture.needsUpdate = true; //updates the previous texture in the shader
}
currTexture.image.data = new Uint8Array(videoContext.getImageData(0,0,SCREEN_WIDTH, SCREEN_HEIGHT).data);
currTexture.needsUpdate = true; //updates the current texture in the shader
prevTime = time;
}catch (e) {
if (e.name == "NS_ERROR_NOT_AVAILABLE") {
console.error(e);
} else {
throw e;
}
}
}
}
prepComposer.render(0.05);
prepPrevComposer.render(0.05);
subtractComposer.render(0.05);
frameNumber++;
requestAnimationFrame(animate);
}
function getData(image) {
var canvas = document.createElement('canvas');
canvas.width = image.width;
canvas.height = image.height;
var context = canvas.getContext('2d');
context.drawImage(image, 0, 0);
return new Uint8Array(context.getImageData(0, 0, image.width, image.height).data);
}
function copyCanvas(e) {
var imgData, imgNode;
if (e.which !== 80) {
return;
} else {
imgData = renderer.domElement.toDataURL();
}
// create a new image and add to the document
imgNode = document.createElement("img");
imgNode.src = imgData;
document.body.appendChild(imgNode);
}
window.onload = init;
window.addEventListener("keyup", copyCanvas);
</script>
</body>
</html>
How can I update the prevBlur.image.data with the current image data of the BlurSave.rendertarget?
Are there any other way to update a shader's Sampler2D uniform with the value from a WebGLRenderTarget’s image data from a previous time-step?

If you want to post-process your scene with a subtraction shader that uses the difference between the current and previous frames, you can do something like the following:
First create two render targets rt1 and rt2. Set currentRT = rt1 and prevRT = rt2.
Then in your render loop, (1) render to currentRT, then (2) pass currentRT and prevRT as uniforms to your subtraction shader and render to the screen, then (3) swap the render targets.
three.js r.70

Related

this.geometry is undefined when using tree-mesh-bvh accelerated raycasting and ifc.js

In a simple IFC.js application, loading IFC models (and fragments for that matter) fails with TypeError: this.geometry is undefined when accelerating the raycast using three-mesh-bvh. Removing that makes the application work again. I have tried with plenty of different IFC models, all of which work when removing the acceleration. Even the test models (Schependomlaan.ifc for example) fail when loading them.
Source of the app: https://github.com/gjkf/simple-ifc
app.js:
import { AmbientLight, AxesHelper, DirectionalLight, GridHelper, PerspectiveCamera, Scene, WebGLRenderer } from 'three';
import { acceleratedRaycast, computeBoundsTree, disposeBoundsTree } from 'three-mesh-bvh';
import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls';
import { IFCLoader } from 'web-ifc-three/IFCLoader';
//Creates the Three.js scene
const scene = new Scene();
//Object to store the size of the viewport
const size = {
width: window.innerWidth,
height: window.innerHeight,
};
//Creates the camera (point of view of the user)
const camera = new PerspectiveCamera(75, size.width / size.height);
camera.position.z = 15;
camera.position.y = 13;
camera.position.x = 8;
//Creates the lights of the scene
const lightColor = 0xffffff;
const ambientLight = new AmbientLight(lightColor, 0.5);
scene.add(ambientLight);
const directionalLight = new DirectionalLight(lightColor, 1);
directionalLight.position.set(0, 10, 0);
directionalLight.target.position.set(-5, 0, 0);
scene.add(directionalLight);
scene.add(directionalLight.target);
//Sets up the renderer, fetching the canvas of the HTML
const threeCanvas = document.getElementById("three-canvas");
const renderer = new WebGLRenderer({ canvas: threeCanvas, alpha: true });
renderer.setSize(size.width, size.height);
renderer.setPixelRatio(Math.min(window.devicePixelRatio, 2));
//Creates grids and axes in the scene
const grid = new GridHelper(50, 30);
scene.add(grid);
const axes = new AxesHelper();
axes.material.depthTest = false;
axes.renderOrder = 1;
scene.add(axes);
//Creates the orbit controls (to navigate the scene)
const controls = new OrbitControls(camera, threeCanvas);
controls.enableDamping = true;
controls.target.set(-2, 0, 0);
//Animation loop
const animate = () => {
controls.update();
renderer.render(scene, camera);
requestAnimationFrame(animate);
};
animate();
//Adjust the viewport to the size of the browser
window.addEventListener("resize", () => {
(size.width = window.innerWidth), (size.height = window.innerHeight);
camera.aspect = size.width / size.height;
camera.updateProjectionMatrix();
renderer.setSize(size.width, size.height);
});
//Sets up the IFC loading
const ifcLoader = new IFCLoader();
ifcLoader.ifcManager.useWebWorkers(true, "worker/IFCWorker.js");
ifcLoader.ifcManager.setWasmPath("../wasm/");
ifcLoader.ifcManager.applyWebIfcConfig({
USE_FAST_BOOLS: true,
COORDINATE_TO_ORIGIN: true,
});
ifcLoader.ifcManager.setupThreeMeshBVH(
acceleratedRaycast,
computeBoundsTree,
disposeBoundsTree
);
const input = document.getElementById("file-input");
input.addEventListener(
"change",
async (changed) => {
const ifcURL = URL.createObjectURL(changed.target.files[0]);
// ifcLoader.load(ifcURL, (ifcModel) => scene.add(ifcModel));
const model = await ifcLoader.loadAsync(ifcURL, (e) => console.log(e));
console.log(model);
scene.add(model);
},
false
);
index.html:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="stylesheet" href="styles.css">
<title>IFC.js</title>
</head>
<body>
<input type="file" id="file-input" accept=".ifc, .ifcXML, .ifcZIP">
<canvas id="three-canvas"></canvas>
<script src="bundle.js"></script>
</body>
</html>
I made this simple application to nail down the problem, but in a different (Angular) project I would really need the accelerated raycasting to ensure the experience is smooth.
The problem was with your call to "setupThreeMeshBVH()"
This is a proper call with a proper sequence👇
ifcLoader.ifcManager.setupThreeMeshBVH(
computeBoundsTree,
disposeBoundsTree,
acceleratedRaycast
);
In your code, you had given "computeBoundsTree" as the second parameter whereas it should be first.

How to make particles in threejs take the shape of a model/object -onscroll

I am relatively new to ThreeJS.
I want to make a website very similar to this one:
Web de CraftedBYGC
In which I make a system of particles that as the user scrolls they disperse and rejoin in a form.
The truth is that I am very lost about where to start.
I would greatly appreciate some help.
I tried to create a geometry with particle mesh and move the camera on scroll to simulate a little bit the effect, but the truth was very poor. I need to find another method and I don't know well what to do...
Below is a snippet of the working example:
var webGLRenderer = initRenderer();
var scene = new THREE.Scene();
var camera = initCamera(new THREE.Vector3(-30, 40, 50));
// call the render function
var step = 0;
var knot;
// setup the control gui
var controls = new function () {
// we need the first child, since it's a multimaterial
this.radius = 13;
this.tube = 1.7;
this.radialSegments = 156;
this.tubularSegments = 12;
this.redraw = function () {
// remove the old plane
if (knot) scene.remove(knot);
// create a new one
var geom = new THREE.TorusKnotGeometry(controls.radius, controls.tube, Math.round(controls.radialSegments), Math.round(controls.tubularSegments), Math.round(controls.p), Math.round(controls.q));
knot = createPoints(geom);
// add it to the scene.
scene.add(knot);
};
};
var gui = new dat.GUI();
gui.add(controls, 'radius', 0, 40).onChange(controls.redraw);
gui.add(controls, 'tube', 0, 40).onChange(controls.redraw);
gui.add(controls, 'radialSegments', 0, 400).step(1).onChange(controls.redraw);
gui.add(controls, 'tubularSegments', 1, 20).step(1).onChange(controls.redraw);
controls.redraw();
render();
// from THREE.js examples
function generateSprite() {
var canvas = document.createElement('canvas');
canvas.width = 16;
canvas.height = 16;
var context = canvas.getContext('2d');
var gradient = context.createRadialGradient(canvas.width / 2, canvas.height / 2, 0, canvas.width / 2, canvas.height / 2, canvas.width / 2);
gradient.addColorStop(0, 'rgba(255,255,255,1)');
gradient.addColorStop(0.2, 'rgba(0,255,255,1)');
gradient.addColorStop(0.4, 'rgba(0,0,64,1)');
gradient.addColorStop(1, 'rgba(0,0,0,1)');
context.fillStyle = gradient;
context.fillRect(0, 0, canvas.width, canvas.height);
var texture = new THREE.Texture(canvas);
texture.needsUpdate = true;
return texture;
}
function createPoints(geom) {
var material = new THREE.PointsMaterial({
color: 0xffffff,
size: 3,
transparent: true,
blending: THREE.AdditiveBlending,
map: generateSprite(),
depthWrite: false // instead of sortParticles
});
var cloud = new THREE.Points(geom, material);
return cloud;
}
function render() {
knot.rotation.y = step += 0.01;
// render using requestAnimationFrame
requestAnimationFrame(render);
webGLRenderer.render(scene, camera);
}
body {
margin: 0;
overflow: none
}
<!DOCTYPE html>
<html>
<head>
<title>Example 07.11 - 3D Torusknot</title>
<script type="text/javascript" charset="UTF-8" src="https://www.smartjava.org/ltjs3/libs/three/three.js"></script>
<script src="https://www.smartjava.org/ltjs3/libs/three/controls/TrackballControls.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/dat-gui/0.7.9/dat.gui.min.js"></script>
<script src="https://www.smartjava.org/ltjs3/src/js/util.js"></script>
</head>
<body>
<div id="webgl-output">
</body>
</html>
This is working on a TorusKnot, but you can apply the particles to a model, too. All you have to do is change the source.

Pan is the only working thing for OrbitControls

I am trying to program a solar system that you can orbit around and pan so I added an OrbitControls to my project and pan is working fine but for some reason rotate and zoom aren't working. I have tried copying other people's examples and I cannot figure out what is wrong with mine. I don't know if it could just be my computer, but I have no reason to believe that it would be.
<!DOCTYPE html>
<html>
<head>
<title>Example 01.02 - First Scene</title>
<script type="text/javascript" src="../libs/three.js"></script>
<script type="text/javascript" src="../libs/jquery-1.9.0.js"></script>
<script type="text/javascript" src="../libs/stats.js"></script>
<script type="text/javascript" src="../libs/dat.gui.js"></script>
<script type="text/javascript" src="../libs/OrbitControls.js"></script>
<script type="text/javascript" src="../libs/chroma.js"></script>
<script type="text/javascript" src="../libs/dat.gui.js"></script>
<style>
body{
/* set margin to 0 and overflow to hidden, to go fullscreen */
margin: 0;
overflow: hidden;
}
</style>
</head>
<body>
<div id="Stats-output">
</div>
<!-- Div which will hold the Output -->
<div id="WebGL-output">
</div>
<!-- Javascript code that runs our Three.js examples -->
<script type="text/javascript">
// once everything is loaded, we run our Three.js stuff.
$(function () {
var stats = initStats();
var SCREEN_WIDTH = window.innerWidth, SCREEN_HEIGHT = window.innerHeight;
var clock = new THREE.Clock();
// create a scene, that will hold all our elements such as objects, cameras and lights.
var scene = new THREE.Scene();
// create a camera, which defines where we're looking at.
var camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 1, 5000);
// position and point the camera to the center of the scene
camera.position.x = -150;
camera.position.y = 200;
camera.position.z = 150;
camera.lookAt(scene.position);
// create a render and set the size
renderer = new THREE.WebGLRenderer( {antialias:true} );
renderer.setSize(SCREEN_WIDTH, SCREEN_HEIGHT);
ccontrols = new THREE.OrbitControls( camera, renderer.domElement );
ccontrols.damping = 0.2;
ccontrols.addEventListener( 'change', render );
var ambiLight = new THREE.AmbientLight(0x747474)
scene.add(ambiLight);
var pointLight = new THREE.PointLight(0xffffff);
pointLight.position.set(0, 0, 0);
pointLight.distance = 100;
scene.add(pointLight);
//create the light box
var lightboxgeom = new THREE.SphereGeometry(3000,50,50);
var lightboxmat = new THREE.MeshPhongMaterial({map: THREE.ImageUtils.loadTexture("./TexturesAndModels/lightbox.png"), side: THREE.BackSide});
var lightbox = new THREE.Mesh(lightboxgeom, lightboxmat);
//sun and glow
var sun = createMesh(new THREE.SphereGeometry(20, 20, 20), "texture_sun.jpg", "texture_sun_DISP.jpg", "texture_sun_SPEC.jpg");
sun.shading = THREE.NoShading;
var spriteMaterial = new THREE.SpriteMaterial(
{
map: new THREE.ImageUtils.loadTexture( "TexturesAndModels/GLOW.png" ),
useScreenCoordinates: false, alignment: THREE.SpriteAlignment.center,
color: 0xffcc00, transparent: false, blending: THREE.AdditiveBlending
});
var sprite = new THREE.Sprite( spriteMaterial );
sprite.scale.set(100, 100, 1.0);
sun.add(sprite);
//mercury
var mercury = createMesh(new THREE.SphereGeometry(1.50, 20, 20), "texture_mercury.jpg", "texture_mercury_DISP.jpg", "texture_mercury_SPEC.jpg", "texture_mercury_NRM.jpg");
//venus
var venus = createMesh(new THREE.SphereGeometry(3.80, 20, 20), "texture_venus_surface.jpg", "texture_venus_surface_DISP.jpg", "texture_venus_surface_SPEC.jpg", "texture_venus_surface_NRM.jpg");
//earth and clouds
var earth = createMesh(new THREE.SphereGeometry(4.00, 20, 20), "ColorMap.jpg", "Bump2.jpg", "SpecMask2.png", "ColorMap_NRM.jpg");
//mars
var mars = createMesh(new THREE.SphereGeometry(2.10, 20, 20), "texture_mars.jpg", "texture_mars_DISP.jpg", "texture_mars_SPEC.jpg", "texture_mars_NRM.jpg");
//Jupiter
var jupiter = createMesh(new THREE.SphereGeometry(18.7, 20, 20), "texture_jupiter.jpg", "texture_jupiter_DISP.jpg", "texture_jupiter_SPEC.jpg", "texture_jupiter_NRM.jpg");
//saturn
var saturn = createMesh(new THREE.SphereGeometry(18, 20, 20), "texture_saturn.jpg", "texture_saturn_DISP.jpg", "texture_saturn_SPEC.jpg", "texture_saturn_NRM.jpg");
//uranus
var uranus = createMesh(new THREE.SphereGeometry(15, 20, 20), "texture_uranus.jpg", "texture_uranus_DISP.jpg", "texture_uranus_SPEC.jpg", "texture_uranus_NRM.jpg");
//neptune
var neptune = createMesh(new THREE.SphereGeometry(14, 20, 20), "texture_neptune.jpg", "texture_neptune_DISP.jpg", "texture_neptune_SPEC.jpg", "texture_neptune_NRM.jpg");
// position the planets
sun.position.x=0;
sun.position.y=0;
sun.position.z=0;
earth.position.y=0;
mars.position.y=0;
venus.position.y=0;
mercury.position.y=0;
saturn.position.y=0;
jupiter.position.y=0;
uranus.position.y=0;
neptune.position.y=0;
// add the planets to the scene
scene.add(lightbox);
scene.add(earth);
scene.add(sun);
scene.add(mercury);
scene.add(venus);
scene.add(mars);
scene.add(jupiter);
scene.add(saturn);
scene.add(uranus);
scene.add(saturn);
// add the output of the renderer to the html element
$("#WebGL-output").append(renderer.domElement);
var r = 0;
var step = 0;
var controls = new function() {
this.timeScale = 1;
}
var gui = new dat.GUI();
gui.add(controls, 'timeScale', 0, 10);
// render the scene
render();
function createMesh(geom, imageFile, bump, spec, normal) {
var texture = THREE.ImageUtils.loadTexture("./TexturesAndModels/" + imageFile)
var mat = new THREE.MeshPhongMaterial();
mat.map = texture;
if (bump) {
var bump = THREE.ImageUtils.loadTexture("./TexturesAndModels/" + bump)
mat.bumpMap = bump;
mat.bumpScale = 0.2;
}
if(spec) {
var spec = THREE.ImageUtils.loadTexture("./TexturesandModels/" + spec)
mat.specularMap = spec;
}
if(normal) {
var norm = THREE.ImageUtils.loadTexture("./TexturesAndModels/" + normal)
mat.normalMap = norm;
}
var mesh = new THREE.Mesh(geom, mat);
return mesh;
}
function render() {
stats.update();
earth.position.x = Math.sin(r*0.1)*150;
earth.position.z = Math.cos(r*0.1)*150;
earth.rotation.y = step += controls.timeScale * 0.02;
mercury.position.x = Math.sin(r*0.4)*58;
mercury.position.z = Math.cos(r*0.4)*58;
mercury.rotation.y = step/58.7;
venus.position.x = Math.sin(r*0.1625)*108;
venus.position.z = Math.cos(r*0.1625)*108;
venus.rotation.y = step/243;
mars.position.x = Math.sin(r*0.05)*228;
mars.position.z = Math.cos(r*0.05)*228;
mars.rotation.y = step*1.026;
jupiter.position.x = Math.sin(r*.008)*483;
jupiter.position.z = Math.cos(r*.008)*483;
jupiter.rotation.y = step*2.44;
saturn.position.x = Math.sin(r*.003)*886;
saturn.position.z = Math.cos(r*.003)*886;
saturn.rotation.y = step*2.35;
uranus.position.x = Math.sin(r*.001)*1784;
uranus.position.z = Math.cos(r*.001)*1784;
uranus.rotation.y = step*1.34;
neptune.position.x = Math.sin(r*.0006)*2794;
neptune.position.z = Math.cos(r*.0006)*2794;
neptune.rotation.y = step*1.26;
r+=controls.timeScale * (Math.PI/180*2);
renderer.render(scene, camera);
requestAnimationFrame(render);
}
function initStats() {
var stats = new Stats();
stats.setMode(0); // 0: fps, 1: ms
// Align top-left
stats.domElement.style.position = 'absolute';
stats.domElement.style.left = '0px';
stats.domElement.style.top = '0px';
$("#Stats-output").append(stats.domElement);
return stats;
}
});
</script>
</body>
</html>

How to render geojson with three.js and d3.js

I use three.js r49, d3.v2.js and d3-threeD.js i want to render my city(Mashhad) with three.js
but i got a problem i can render europian countries with these codes but i cant render with existing json file.
d3.js Ref
d3-threeD.js Ref
these are my codes:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>GEO Bar</title>
<link rel="stylesheet" href="">
<script type="text/javascript" src="./scripts/jquery-1.7.2.js"></script>
<script type="text/javascript" src="./scripts/d3.v2.js"></script>
<script type="text/javascript" src="//cdnjs.cloudflare.com/ajax/libs/three.js/r49/Three.min.js"></script>
<script type="text/javascript" src="./scripts/d3-threeD.js"></script>
<script type="text/javascript" src="./scripts/constants.js"></script>
<script type="text/javascript" src="./scripts/util-geo.js"></script>
<script type="text/javascript" src="./scripts/script.js"></script>
<style type="text/css">
.btns{
position: fixed;
right: 100px;
top: 100px;
}
.btn{
width: 40px;
height: 60px;
background-color: #999;
margin-top: 5px;
text-align: center;
color: #111;
}
.btn > div{
margin-bottom: 5px;
cursor: pointer;
}
</style>
</head>
<body>
<div id="chart"></div>
<div class="btns">
<div class="btn x">
<div class="plus">+</div>X
<div class="minus">-</div>
</div>
<div class="btn y">
<div class="plus">+</div>Y
<div class="minus">-</div>
</div>
<div class="btn z">
<div class="plus">+</div>Z
<div class="minus">-</div>
</div>
</div>
</body>
</html>
constants.js codes:
var appConstants = {
TRANSLATE_0 : -360,
TRANSLATE_1 : 10040,
SCALE : 60000
}
util-geo.js codes:
var geons = {};
geons.geoConfig = function() {
this.TRANSLATE_0 = appConstants.TRANSLATE_0;
this.TRANSLATE_1 = appConstants.TRANSLATE_1;
this.SCALE = appConstants.SCALE;
this.mercator = d3.geo.mercator();
this.path = d3.geo.path().projection(this.mercator);
this.setupGeo = function() {
var translate = this.mercator.translate();
translate[0] = this.TRANSLATE_0;
translate[1] = this.TRANSLATE_1;
this.mercator.translate(translate);
this.mercator.scale(this.SCALE);
}
console.log(2,this.path);
}
// geoConfig contains the configuration for the geo functions
geo = new geons.geoConfig();
script.js codes:
(function($) {
$(function(){
// get the correct geo for d3s
geo.setupGeo();
var
dataURL = "area.json",
scene,
renderer,
camera;
btnXPlus = $(".btns .x .plus"),
btnXMinus = $(".btns .x .minus"),
btnYPlus = $(".btns .y .plus"),
btnYMinus = $(".btns .y .minus"),
btnZPlus = $(".btns .z .plus"),
btnZMinus = $(".btns .z .minus"),
translate = geo.mercator.translate();
// get the data
$.getJSON( dataURL, function(data, textStatus, jqXHR) {
initScene();
addGeoObject();
renderer.render( scene, camera );
// Set up the three.js scene. This is the most basic setup without
// any special stuff
function initScene() {
// set the scene size
var
WIDTH = 600,
HEIGHT = 600;
// set some camera attributes
var
VIEW_ANGLE = 45,
ASPECT = WIDTH / HEIGHT,
NEAR = 1, FAR = 10000;
// create a WebGL renderer, camera, and a scene
renderer = new THREE.WebGLRenderer({antialias:true});
camera = new THREE.PerspectiveCamera(VIEW_ANGLE, ASPECT, NEAR, FAR);
scene = new THREE.Scene();
// add and position the camera at a fixed position
scene.add(camera);
camera.position.z = 550;
camera.position.x = 0;
camera.position.y = 550;
console.log("scene.position: ",scene.position);
camera.lookAt( scene.position );
// start the renderer, and black background
renderer.setSize(WIDTH, HEIGHT);
renderer.setClearColor(0x321);
// add the render target to the page
$("#chart").append(renderer.domElement);
// add a light at a specific position
var pointLight = new THREE.PointLight(0xFFFFFF);
scene.add(pointLight);
pointLight.position.x = 800;
pointLight.position.y = 800;
pointLight.position.z = 800;
// add a base plane on which we'll render our map
var planeGeo = new THREE.PlaneGeometry(10000, 10000, 10, 10);
var planeMat = new THREE.MeshLambertMaterial({color: 0x666699});
var plane = new THREE.Mesh(planeGeo, planeMat);
// rotate it to correct position
plane.rotation.x = -Math.PI/2;
scene.add(plane);
console.log(1);
}
// add the loaded gis object (in geojson format) to the map
function addGeoObject() {
// keep track of rendered objects
var meshes = [];
var colors = [];
var averageValues = [];
var totalValues = [];
// keep track of min and max, used to color the objects
var maxValueAverage = 0;
var minValueAverage = -1;
// keep track of max and min of total value
var maxValueTotal = 0;
var minValueTotal = -1;
// convert to mesh and calculate values
for (var i = 0 ; i < data.features.length ; i++) {
console.log(data.features[i]);
var geoFeature = data.features[i];
var feature = geo.path(geoFeature);
console.log("feature:",feature);
// we only need to convert it to a three.js path
var mesh = transformSVGPathExposed(feature);
console.log(mesh);
// add to array
meshes.push(mesh);
// we get a property from the json object and use it
// to determine the color later on
var value = parseInt(geoFeature.properties.bev_dichth);
if (value > maxValueAverage) maxValueAverage = value;
if (value < minValueAverage || minValueAverage == -1) minValueAverage = value;
averageValues.push(value);
// var style = JSON.parse(geoFeature.properties.style);
// colors.push(hexToRgb(style.strokeColor));
// colors.push(style.strokeColor);
// and we get the max values to determine height later on.
value = parseInt(geoFeature.properties.aant_inw);
if (value > maxValueTotal) maxValueTotal = value;
if (value < minValueTotal || minValueTotal == -1) minValueTotal = value;
totalValues.push(value);
}
console.log(averageValues,totalValues,colors);
// we've got our paths now extrude them to a height and add a color
for (var i = 0 ; i < averageValues.length ; i++) {
// create material color based on average
var scale = ((averageValues[i] - minValueAverage) / (maxValueAverage - minValueAverage)) * 255;
var mathColor = gradient(Math.round(scale),255);
console.log(scale,mathColor);
var material = new THREE.MeshLambertMaterial({
color: mathColor
});
// var material = new THREE.MeshLambertMaterial({
// // color: hexToRgb(colors[i])
// });
// material.color.setHex(colors[i]);
console.log("material:", material);
// create extrude based on total
var extrude = ((totalValues[i] - minValueTotal) / (maxValueTotal - minValueTotal)) * 100;
var shape3d = meshes[i].extrude({amount: Math.round(extrude), bevelEnabled: false});
console.log("extrude: ",extrude, "shape3d:", shape3d);
// create a mesh based on material and extruded shape
var toAdd = new THREE.Mesh(shape3d, material);
// rotate and position the elements nicely in the center
toAdd.rotation.x = Math.PI/2;
toAdd.translateX(-490);
toAdd.translateZ(50);
toAdd.translateY(extrude/2);
console.log("toAdd: ",toAdd);
// add to scene
scene.add(toAdd);
}
}
// simple gradient function
// function gradient(length, maxLength) {
// var i = (length * 255 / maxLength);
// var r = i;
// var g = 255-(i);
// var b = 0;
// var rgb = b | (g << 8) | (r << 16);
// console.log(rgb);
// return rgb;
// }
function gradient(color, maxLength) {
var i = (length * 255 / maxLength);
var r = i;
var g = 255-(i);
var b = 0;
var rgb = b | (g << 8) | (r << 16);
console.log("rgb: ",rgb);
return rgb;
}
function hexToRgb(hex) {
// Expand shorthand form (e.g. "03F") to full form (e.g. "0033FF")
var shorthandRegex = /^#?([a-f\d])([a-f\d])([a-f\d])$/i;
hex = hex.replace(shorthandRegex, function(m, r, g, b) {
return r + r + g + g + b + b;
});
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
// return result ? {
// r: parseInt(result[1], 16),
// g: parseInt(result[2], 16),
// b: parseInt(result[3], 16)
// } : null;
return result ? parseInt(result[1], 16) | (parseInt(result[2], 16) << 8) | (parseInt(result[3], 16) << 16) : null;
}
});
btnXPlus.click(function(){
camera.position.x +=10;
console.log(camera.position.x);
renderer.render( scene, camera );
});
btnXMinus.click(function(){
camera.position.x -=10;
console.log(camera.position.x);
renderer.render( scene, camera );
});
btnYPlus.click(function(){
camera.position.y +=10;
console.log(camera.position.y);
renderer.render( scene, camera );
});
btnYMinus.click(function(){
camera.position.x -=10;
console.log(camera.position.y);
renderer.render( scene, camera );
});
btnZPlus.click(function(){
camera.position.z +=10;
console.log(camera.position.z);
renderer.render( scene, camera );
});
btnZMinus.click(function(){
camera.position.z -=10;
console.log(camera.position.z);
renderer.render( scene, camera );
});
});
})(jQuery);
my geo json file: omidgharib.ir/area.json

ShaderMaterial not work with MeshFaceMaterial

I use ShaderMaterial to create real material for my cube. But it not works well. As can see, only first face work well. Even when i use different texture for different faces, it not works. I don't know what is problem with my code.
Here is my code:
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
<title>Test_material</title>
<style type="text/css" media="all">
canvas {
border: 1px solid #000;
}
</style>
</head>
<body>
<div id='container'></div>
<script type="text/javascript" src="js/threejs/Three.js">
</script>
<script type="text/javascript" src="js/lib/util.js">
</script>
<script type="text/javascript" src="js/threejs/TrackballControls.js"></script>
<script>
var WIDTH= 400;
var HEIGHT = 400;
var container = document.getElementById('container');
var camera =new THREE.PerspectiveCamera(25, WIDTH/HEIGHT, 1, 10000);
camera.position.set(0,300,100);
var animate;
var scene = new THREE.Scene();
scene.add(camera);
camera.lookAt(scene.position);
var controls = new THREE.TrackballControls(camera);
//custom shape
materialArr = [];
var data = new utils.Storage.ResourceStorage();
data.load({
'textures': {
'track_normal_0' : 'images/pattern/track_normal_0.jpg',
'track_diffuse_0' : 'images/pattern/track_diffuse_0.jpg',
'track_specular_0' : 'images/pattern/track_specular_0.jpg',
'track_normal_0' : 'images/pattern/track_normal_0.jpg',
'track_diffuse_0' : 'images/pattern/track_diffuse_0.jpg',
'track_specular_0' : 'images/pattern/track_specular_0.jpg',
'track_normal_1' : 'images/pattern/track_normal_1.jpg',
'track_diffuse_1' : 'images/pattern/track_diffuse_1.jpg',
'track_specular_1' : 'images/pattern/track_specular_1.jpg',
'track_normal_2' : 'images/pattern/track_normal_2.jpg',
'track_diffuse_2' : 'images/pattern/track_diffuse_2.jpg',
'track_specular_2' : 'images/pattern/track_specular_2.jpg',
'track_normal_3' : 'images/pattern/track_normal_3.jpg',
'track_diffuse_3' : 'images/pattern/track_diffuse_3.jpg',
'track_specular_3' : 'images/pattern/track_specular_3.jpg',
'track_normal_4' : 'images/pattern/track_normal_4.jpg',
'track_diffuse_4' : 'images/pattern/track_diffuse_4.jpg',
'track_specular_4' : 'images/pattern/track_specular_4.jpg',
'track_normal_5' : 'images/pattern/track_normal_5.jpg',
'track_diffuse_5' : 'images/pattern/track_diffuse_5.jpg',
'track_specular_5' : 'images/pattern/track_specular_5.jpg',
},
'onReady': init
});
function init(){
var materialArr = [];
for (var i=0; i< 6; i++){
var shader = THREE.ShaderLib['normalmap'];
var normalTexture = data.getTexture('track_normal_' + i).clone();
normalTexture.needsUpdate = true;
var diffuseTexture = data.getTexture('track_diffuse_' + i);
diffuseTexture.needsUpdate = true;
var specularTexture = data.getTexture('track_specular_' + i);
specularTexture.needsUpdate = true;
var uniforms = THREE.UniformsUtils.clone(shader.uniforms);
uniforms['tNormal'].value = normalTexture;
uniforms['tDiffuse'].value = diffuseTexture;
uniforms['tSpecular'].value = specularTexture;
uniforms['enableDiffuse'].value = true;
uniforms['enableSpecular'].value = true;
uniforms['uSpecularColor'].value.setHex(0x00cc00);
uniforms['uDiffuseColor'].value.setHex(0x0000ff);
<!--uniforms['uShininess'].value = 100;-->
var material = new THREE.ShaderMaterial({
fragmentShader: shader.fragmentShader,
vertexShader: shader.vertexShader + " ",
uniforms: uniforms,
lights: true
});
material.perPixel = true;
material.needsUpdate = true;
materialArr.push(material);
}
var geometry = new THREE.CubeGeometry(40, 40, 40);
geometry.computeTangents();
var material = new THREE.MeshFaceMaterial(materialArr);
for (var i=0; i< material.materials.length; i++){
material.materials[i].needsUpdate = true;
}
mesh =new THREE.Mesh(geometry, material);
scene.add(mesh);
var renderer = new THREE.WebGLRenderer({antialias: true});
renderer.setSize(WIDTH, HEIGHT);
container.appendChild(renderer.domElement);
var dirLight = new THREE.DirectionalLight(0xffffff);
scene.add(dirLight);
var ambientLight = new THREE.AmbientLight(0xcddacc);
scene.add(ambientLight);
renderer.render(scene, camera);
animate = function (){
requestAnimationFrame(animate);
controls.update();
renderer.render(scene, camera);
};
animate();
}
</script>
</body>
</html>
My result
I would suggest using a single material and tweaking the UVs of the geometry instead. Take a look at the minecraft example to see how to do that.

Resources