WebGL: Count the number of rendered vertices - opengl-es

Using the WebGL API, is there a way to count the number of vertices rendered within a given canvas? I've seen some tools that attempt to accomplish this task but some are giving strange results (e.g. Three.js' renderer.info.render is reporting my scene has 10,134.3 triangles).
Any help with using the raw WebGL API to count the number of rendered vertices (and, ideally, points and lines) would be greatly appreciated.

WebGL can't do this for you but you could can add your own augmentation.
The most obvious way is just to track your own usage. Instead of calling gl.drawXXX call functionThatTracksDrawingCountsXXX and track the values yourself.
You can also augment the WebGL context itself. Example
// copy this part into a file like `augmented-webgl.js`
// and include it in your page
(function() {
// NOTE: since WebGL constants are um, constant
// we could statically init this.
let primMap;
function addCount(ctx, type, count) {
const ctxInfo = ctx.info;
const primInfo = primMap[type];
ctxInfo.vertCount += count;
ctxInfo.primCount[primInfo.ndx] += primInfo.fn(count);
}
WebGLRenderingContext.prototype.drawArrays = (function(oldFn) {
return function(type, offset, count) {
addCount(this, type, count);
oldFn.call(this, type, offset, count);
};
}(WebGLRenderingContext.prototype.drawArrays));
WebGLRenderingContext.prototype.drawElements = (function(oldFn) {
return function(type, count, indexType, offset) {
addCount(this, type, count);
oldFn.call(this, type, count, indexType, offset);
};
}(WebGLRenderingContext.prototype.drawElements));
HTMLCanvasElement.prototype.getContext = (function(oldFn) {
return function(type, ...args) {
const ctx = oldFn.call(this, type, args);
if (ctx && type === "webgl") {
if (!primMap) {
primMap = {};
primMap[ctx.POINTS] = { ndx: 0, fn: count => count, };
primMap[ctx.LINE_LOOP] = { ndx: 1, fn: count => count, };
primMap[ctx.LINE_STRIP]= { ndx: 1, fn: count => count - 1, };
primMap[ctx.LINES] = { ndx: 1, fn: count => count / 2 | 0, };
primMap[ctx.TRIANGLE_STRIP] = { ndx: 2, fn: count => count - 2, };
primMap[ctx.TRIANGLE_FAN] = { ndx: 2, fn: count => count - 2, };
primMap[ctx.TRIANGLES] = { ndx: 2, fn: count => count / 3 | 0, };
};
ctx.info = {
vertCount: 0,
primCount: [0, 0, 0],
};
}
return ctx;
}
}(HTMLCanvasElement.prototype.getContext));
}());
// ---- cut above ----
const $ = document.querySelector.bind(document);
const scene = new THREE.Scene();
const camera = new THREE.PerspectiveCamera(75, 1, 0.1, 1000);
const renderer = new THREE.WebGLRenderer({canvas: $('canvas')});
const geometry = new THREE.BoxGeometry(1, 1, 1);
const items = [];
for (let i = 0; i < 50; ++i) {
let item;
switch(rand(0, 3) | 0) {
case 0:
case 1:
const material = new THREE.MeshBasicMaterial({
color: rand(0xFFFFFF) | 0,
wireframe: rand(0, 3) > 2,
});
item = new THREE.Mesh(geometry, material);
break;
case 2:
const pmat = new THREE.PointsMaterial({
color: rand(0xFFFFFF) | 0,
});
item = new THREE.Points(geometry, pmat);
break;
default:
throw "oops";
}
item.position.x = rand(-10, 10);
item.position.y = rand(-10, 10);
item.position.z = rand( 0, -50);
scene.add(item);
items.push(item);
}
camera.position.z = 5;
const countElem = $('#count');
function render(time) {
time *= 0.001;
resize();
// animate the items
items.forEach((items, ndx) => {
items.rotation.x = time * 1.2 + ndx * 0.01;
items.rotation.y = time * 1.1;
});
// turn on/off a random items
items[rand(items.length) | 0].visible = Math.random() > .5;
renderer.render(scene, camera);
// get the current counts
const info = renderer.context.info;
countElem.textContent = ` VERTS: ${info.vertCount}
POINTS: ${info.primCount[0]}
LINES: ${info.primCount[1]}
TRIANGLES: ${info.primCount[2]}`;
// zero out the count
renderer.context.info.vertCount = 0;
renderer.context.info.primCount = [0, 0, 0];
requestAnimationFrame(render);
}
requestAnimationFrame(render);
function rand(min, max) {
if (max === undefined) {
max = min;
min = 0;
}
return Math.random() * (max - min) + min;
}
function resize() {
const canvas = renderer.domElement;
const width = canvas.clientWidth;
const height = canvas.clientHeight;
if (canvas.width !== width || canvas.height !== height) {
renderer.setSize(width, height, false);
camera.aspectRatio = width / height;
camera.updateProjectionMatrix();
}
}
body { border: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
#ui { position: absolute; left: 1em; top: 1em; background: rgba(0,0,0,.5); color: white; padding: .5em; width: 10em; }
<canvas></canvas>
<div id="ui">
<pre id="count"></pre>
</div>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/92/three.min.js"></script>
Of course you might want to add support for drawArraysInstanced etc... and support for WebGL2.

We removed the amount of processed vertices from renderer.info.render since the important measurement is the amount or rendered primitives (so triangles, points, lines). Please read https://github.com/mrdoob/three.js/pull/13404 and the related issues/PRs for more information. If you still want to know how many vertices were processed, you need to count manually. WebGL can't do this for you.

Related

Cannon.js and Three.js – Character Control with Physics

I am trying to build a little world with a Third Person Controller.
For the character, I followed this tutorial by SimonDev and just changed the model from a FBX to GLTF.
Now, I try to implement a Physics World with Cannon.js.
I got it to the point where the collider body is positioned at the starting point of the model. But it stays there after I move the model. I need the collider body to be attached at the model.
I know that I should move the collider body and update the character model to that position but I cannot get it to work. This is my current code. Maybe it is just a simple solution but I am new to Cannon.js, so any help is appreciated. Thank you!
class BasicCharacterController {
constructor(experience, params) {
this.experience = experience;
this._Init(params);
}
_Init(params) {
this._params = params;
this._decceleration = new THREE.Vector3(-0.0005, -0.0001, -5.0);
this._acceleration = new THREE.Vector3(1, 0.25, 50.0);
this._velocity = new THREE.Vector3(0, 0, 0);
this._position = new THREE.Vector3();
this._animations = {};
this._input = new BasicCharacterControllerInput();
this._stateMachine = new CharacterFSM(
new BasicCharacterControllerProxy(this._animations));
this._LoadModels();
}
_LoadModels() {
this.physicsCharacterShape = new CANNON.Box(new CANNON.Vec3(0.5, 1, 0.5));
this.physicsCharacterBody = new CANNON.Body({
mass: 0,
shape: this.physicsCharacterShape,
position: new CANNON.Vec3(0, 0, 0)
});
this.experience.physicsWorld.addBody(this.physicsCharacterBody);
this.gltfLoader = new GLTFLoader();
this.gltfLoader.setPath('./sources/assets/');
this.gltfLoader.load('VSLBINA_TPOSE_GLTF.gltf', (gltf) => {
gltf.scene.traverse(c => {
c.castShadow = true;
});
this._target = gltf.scene;
this._params.scene.add(this._target);
this._target.position.copy(this.physicsCharacterBody.position);
this._target.quaternion.copy(this.physicsCharacterBody.quaternion);
this._mixer = new THREE.AnimationMixer(this._target);
this._manager = new THREE.LoadingManager();
this._manager.onLoad = () => {
this._stateMachine.SetState('idle');
};
const _OnLoad = (animName, anim) => {
const clip = anim.animations[0];
const action = this._mixer.clipAction(clip);
this._animations[animName] = {
clip: clip,
action: action,
};
};
const loader = new GLTFLoader(this._manager);
loader.setPath('./sources/assets/');
loader.load('VSLBINA_WALKING_GLTF.gltf', (a) => { _OnLoad('walk', a); });
loader.load('VSLBINA_IDLE_GLTF.gltf', (a) => { _OnLoad('idle', a); });
});
}
get Position() {
return this._position;
}
get Rotation() {
if (!this._target) {
return new THREE.Quaternion();
}
return this._target.quaternion;
}
Update(timeInSeconds) {
if (!this._stateMachine._currentState) {
return;
}
this._stateMachine.Update(timeInSeconds, this._input);
const velocity = this._velocity;
const frameDecceleration = new THREE.Vector3(
velocity.x * this._decceleration.x,
velocity.y * this._decceleration.y,
velocity.z * this._decceleration.z
);
frameDecceleration.multiplyScalar(timeInSeconds);
frameDecceleration.z = Math.sign(frameDecceleration.z) * Math.min(
Math.abs(frameDecceleration.z), Math.abs(velocity.z));
velocity.add(frameDecceleration);
const controlObject = this._target;
const _Q = new THREE.Quaternion();
const _A = new THREE.Vector3();
const _R = controlObject.quaternion.clone();
const acc = this._acceleration.clone();
if (this._input._keys.shift) {
acc.multiplyScalar(2.0);
}
if (this._input._keys.forward) {
velocity.z += acc.z * timeInSeconds;
}
if (this._input._keys.backward) {
velocity.z -= acc.z * timeInSeconds;
}
if (this._input._keys.left) {
_A.set(0, 1, 0);
_Q.setFromAxisAngle(_A, 4.0 * Math.PI * timeInSeconds * this._acceleration.y);
_R.multiply(_Q);
}
if (this._input._keys.right) {
_A.set(0, 1, 0);
_Q.setFromAxisAngle(_A, 4.0 * -Math.PI * timeInSeconds * this._acceleration.y);
_R.multiply(_Q);
}
controlObject.quaternion.copy(_R);
const oldPosition = new THREE.Vector3();
oldPosition.copy(controlObject.position);
const forward = new THREE.Vector3(0, 0, 1);
forward.applyQuaternion(controlObject.quaternion);
forward.normalize();
const sideways = new THREE.Vector3(1, 0, 0);
sideways.applyQuaternion(controlObject.quaternion);
sideways.normalize();
sideways.multiplyScalar(velocity.x * timeInSeconds);
forward.multiplyScalar(velocity.z * timeInSeconds);
controlObject.position.add(forward);
controlObject.position.add(sideways);
this._position.copy(controlObject.position);
if (this._mixer) {
this._mixer.update(timeInSeconds);
};
// Physics Collider Body
// if (this._target) {
// this._target.position.copy(this.physicsCharacterBody.position);
// this._target.quaternion.copy(this.physicsCharacterBody.quaternion);
// }
}
};

How do you correctly include three.js and gltf.js

I'm rewriting my question because stackoverflow thought my post was spam (because I included 6000+ lines of code). I'm trying to make a web app that tracks the user's face and puts a 3D object over the face like a "filter". The thing is, I don't want this app to have any external dependencies except 1 (at least for scripts/packages/modules/whatever). Therefore, I copied three.js minified (from https://cdn.jsdelivr.net/npm/three#0.120.1/build/three.min.js) inbetween in the HTML file, as well as the GLTFLoader.js script from the GitHub repository in examples/jsm/loaders/.
I started with Jeeliz's face filter repository, and I'm trying to implement GLTFLoader.js, but when I use
const loader = new GLTFLoader();
it gives me a GLTFLoader is not defined error, as well as console messages that ES6 module was not imported.
When I use
const loader = new THREE.GLTFLoader();
it says it's not a constructor, so I lean towards the former being the correct way to construct the loader.
I appreciate any help in advance! I mostly code in Python or C++ and I'm still a beginner, but I've tinkered with JavaScript a few times so I thought I could handle this. I posted this question once but stackoverflow crashed after saying the post is spam, so I'm including a link to the html file:
https://www.dropbox.com/s/p86xchlldr7j2vf/index-mine.html?dl=0
Code:
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta http-equiv="content-language" content="en-EN" />
<title>Filter Web App</title>
<!-- INCLUDE FACEFILTER SCRIPT -->
<script src=‘https://raw.githubusercontent.com/jeeliz/jeelizFaceFilter/master/dist/jeelizFaceFilter.js’>
</script>
<!-- INCLUDE THREE.JS -->
<script src=‘https://cdn.jsdelivr.net/npm/three#0.120.1/build/three.min.js’>
</script>
<!-- INCLUDE RESIZER -->
<script src=‘https://raw.githubusercontent.com/jeeliz/jeelizFaceFilter/master/helpers/JeelizResizer.js’>
</script>
<!-- INCLUDE GLTFLOADER.JS -->
<script src=‘https://raw.githubusercontent.com/mrdoob/three.js/dev/examples/jsm/loaders/GLTFLoader.js’>
</script>
<!-- INCLUDE JEELIZ THREE.JS HELPER -->
<script>
/*
Helper for Three.js
*/
const punkThreeHelper = (function(){
// internal settings:
const _settings = {
rotationOffsetX: 0.0, // negative -> look upper. in radians
pivotOffsetYZ: [0.2, 0.6],// YZ of the distance between the center of the cube and the pivot
detectionThreshold: 0.8, // sensibility, between 0 and 1. Less -> more sensitive
detectionHysteresis: 0.02,
//tweakMoveYRotateX: 0,//0.5, // tweak value: move detection window along Y axis when rotate the face around X (look up <-> down)
cameraMinVideoDimFov: 35 // Field of View for the smallest dimension of the video in degrees
};
// private vars:
let _threeRenderer = null,
_threeScene = null,
_threeVideoMesh = null,
_threeVideoTexture = null,
_threeTranslation = null;
let _maxFaces = -1,
_isMultiFaces = false,
_detectCallback = null,
_isVideoTextureReady = false,
_isSeparateThreeCanvas = false,
_faceFilterCv = null,
_videoElement = null,
_isDetected = false,
_scaleW = 1,
_canvasAspectRatio = -1;
const _threeCompositeObjects = [];
let _gl = null,
_glVideoTexture = null,
_glShpCopyCut = null,
_glShpCopyCutVideoMatUniformPointer = null;
let _videoTransformMat2 = null;
// private funcs:
function destroy(){
_isVideoTextureReady = false;
_threeCompositeObjects.splice(0);
if (_threeVideoTexture){
_threeVideoTexture.dispose();
_threeVideoTexture = null;
}
}
function create_threeCompositeObjects(){
for (let i=0; i<_maxFaces; ++i){
// COMPOSITE OBJECT WHICH WILL TRACK A DETECTED FACE
const threeCompositeObject = new THREE.Object3D();
threeCompositeObject.frustumCulled = false;
threeCompositeObject.visible = false;
_threeCompositeObjects.push(threeCompositeObject);
_threeScene.add(threeCompositeObject);
}
}
function create_videoScreen(){
const videoScreenVertexShaderSource = "attribute vec2 position;\n\
uniform mat2 videoTransformMat2;\n\
varying vec2 vUV;\n\
void main(void){\n\
gl_Position = vec4(position, 0., 1.);\n\
vUV = 0.5 + videoTransformMat2 * position;\n\
}";
const videoScreenFragmentShaderSource = "precision lowp float;\n\
uniform sampler2D samplerVideo;\n\
varying vec2 vUV;\n\
void main(void){\n\
gl_FragColor = texture2D(samplerVideo, vUV);\n\
}";
if (_isSeparateThreeCanvas){
const compile_shader = function(source, type, typeString) {
const glShader = _gl.createShader(type);
_gl.shaderSource(glShader, source);
_gl.compileShader(glShader);
if (!_gl.getShaderParameter(glShader, _gl.COMPILE_STATUS)) {
alert("ERROR IN " + typeString + " SHADER: " + _gl.getShaderInfoLog(glShader));
return null;
}
return glShader;
};
const glShaderVertex = compile_shader(videoScreenVertexShaderSource, _gl.VERTEX_SHADER, 'VERTEX');
const glShaderFragment = compile_shader(videoScreenFragmentShaderSource, _gl.FRAGMENT_SHADER, 'FRAGMENT');
_glShpCopyCut = _gl.createProgram();
_gl.attachShader(_glShpCopyCut, glShaderVertex);
_gl.attachShader(_glShpCopyCut, glShaderFragment);
_gl.linkProgram(_glShpCopyCut);
const samplerVideo = _gl.getUniformLocation(_glShpCopyCut, 'samplerVideo');
_glShpCopyCutVideoMatUniformPointer = _gl.getUniformLocation(_glShpCopyCut, 'videoTransformMat2');
return;
}
// init video texture with red:
_threeVideoTexture = new THREE.DataTexture( new Uint8Array([255,0,0]), 1, 1, THREE.RGBFormat);
_threeVideoTexture.needsUpdate = true;
// CREATE THE VIDEO BACKGROUND:
const videoMaterial = new THREE.RawShaderMaterial({
depthWrite: false,
depthTest: false,
vertexShader: videoScreenVertexShaderSource,
fragmentShader: videoScreenFragmentShaderSource,
uniforms:{
samplerVideo: {value: _threeVideoTexture},
videoTransformMat2: {
value: _videoTransformMat2
}
}
});
const videoGeometry = new THREE.BufferGeometry()
const videoScreenCorners = new Float32Array([-1,-1, 1,-1, 1,1, -1,1]);
// handle both new and old THREE.js versions:
const setVideoGeomAttribute = (videoGeometry.setAttribute || videoGeometry.addAttribute).bind(videoGeometry);
setVideoGeomAttribute( 'position', new THREE.BufferAttribute( videoScreenCorners, 2 ) );
videoGeometry.setIndex(new THREE.BufferAttribute(new Uint16Array([0,1,2, 0,2,3]), 1));
_threeVideoMesh = new THREE.Mesh(videoGeometry, videoMaterial);
that.apply_videoTexture(_threeVideoMesh);
_threeVideoMesh.renderOrder = -1000; // render first
_threeVideoMesh.frustumCulled = false;
_threeScene.add(_threeVideoMesh);
} //end create_videoScreen()
function detect(detectState){
_threeCompositeObjects.forEach(function(threeCompositeObject, i){
_isDetected = threeCompositeObject.visible;
const ds = detectState[i];
if (_isDetected && ds.detected < _settings.detectionThreshold-_settings.detectionHysteresis){
// DETECTION LOST
if (_detectCallback) _detectCallback(i, false);
threeCompositeObject.visible = false;
} else if (!_isDetected && ds.detected > _settings.detectionThreshold+_settings.detectionHysteresis){
// FACE DETECTED
if (_detectCallback) _detectCallback(i, true);
threeCompositeObject.visible = true;
}
}); //end loop on all detection slots
}
function update_poses(ds, threeCamera){
// tan( <horizontal FoV> / 2 ):
const halfTanFOVX = Math.tan(threeCamera.aspect * threeCamera.fov * Math.PI/360); //tan(<horizontal FoV>/2), in radians (threeCamera.fov is vertical FoV)
_threeCompositeObjects.forEach(function(threeCompositeObject, i){
if (!threeCompositeObject.visible) return;
const detectState = ds[i];
// tweak Y position depending on rx:
//const tweak = _settings.tweakMoveYRotateX * Math.tan(detectState.rx);
const cz = Math.cos(detectState.rz), sz = Math.sin(detectState.rz);
// relative width of the detection window (1-> whole width of the detection window):
const W = detectState.s * _scaleW;
// distance between the front face of the cube and the camera:
const DFront = 1 / ( 2 * W * halfTanFOVX );
// D is the distance between the center of the unit cube and the camera:
const D = DFront + 0.5;
// coords in 2D of the center of the detection window in the viewport:
const xv = detectState.x * _scaleW;
const yv = detectState.y * _scaleW;
// coords in 3D of the center of the cube (in the view coordinates system):
const z = -D; // minus because view coordinate system Z goes backward
const x = xv * D * halfTanFOVX;
const y = yv * D * halfTanFOVX / _canvasAspectRatio;
// set position before pivot:
threeCompositeObject.position.set(-sz*_settings.pivotOffsetYZ[0], -cz*_settings.pivotOffsetYZ[0], -_settings.pivotOffsetYZ[1]);
// set rotation and apply it to position:
threeCompositeObject.rotation.set(detectState.rx+_settings.rotationOffsetX, detectState.ry, detectState.rz, "ZYX");
threeCompositeObject.position.applyEuler(threeCompositeObject.rotation);
// add translation part:
_threeTranslation.set(x, y+_settings.pivotOffsetYZ[0], z+_settings.pivotOffsetYZ[1]);
threeCompositeObject.position.add(_threeTranslation);
}); //end loop on composite objects
}
//public methods:
const that = {
// launched with the same spec object than callbackReady. set spec.threeCanvasId to the ID of the threeCanvas to be in 2 canvas mode:
init: function(spec, detectCallback){
destroy();
_maxFaces = spec.maxFacesDetected;
_glVideoTexture = spec.videoTexture;
_videoTransformMat2 = spec.videoTransformMat2;
_gl = spec.GL;
_faceFilterCv = spec.canvasElement;
_isMultiFaces = (_maxFaces>1);
_videoElement = spec.videoElement;
// enable 2 canvas mode if necessary:
let threeCanvas = null;
if (spec.threeCanvasId){
_isSeparateThreeCanvas = true;
// adjust the threejs canvas size to the threejs canvas:
threeCanvas = document.getElementById(spec.threeCanvasId);
threeCanvas.setAttribute('width', _faceFilterCv.width);
threeCanvas.setAttribute('height', _faceFilterCv.height);
} else {
threeCanvas = _faceFilterCv;
}
if (typeof(detectCallback) !== 'undefined'){
_detectCallback = detectCallback;
}
// init THREE.JS context:
_threeRenderer = new THREE.WebGLRenderer({
context: (_isSeparateThreeCanvas) ? null : _gl,
canvas: threeCanvas,
alpha: (_isSeparateThreeCanvas || spec.alpha) ? true : false,
preserveDrawingBuffer: true // to make image capture possible
});
_threeScene = new THREE.Scene();
_threeTranslation = new THREE.Vector3();
create_threeCompositeObjects();
create_videoScreen();
// handle device orientation change:
window.addEventListener('orientationchange', function(){
setTimeout(punkfacefilter.resize, 1000);
}, false);
const returnedDict = {
videoMesh: _threeVideoMesh,
renderer: _threeRenderer,
scene: _threeScene
};
if (_isMultiFaces){
returnedDict.faceObjects = _threeCompositeObjects
} else {
returnedDict.faceObject = _threeCompositeObjects[0];
}
return returnedDict;
}, //end that.init()
detect: function(detectState){
const ds = (_isMultiFaces) ? detectState : [detectState];
// update detection states:
detect(ds);
},
get_isDetected: function() {
return _isDetected;
},
render: function(detectState, threeCamera){
const ds = (_isMultiFaces) ? detectState : [detectState];
// update detection states then poses:
detect(ds);
update_poses(ds, threeCamera);
if (_isSeparateThreeCanvas){
// render the video texture on the faceFilter canvas:
_gl.viewport(0, 0, _faceFilterCv.width, _faceFilterCv.height);
_gl.useProgram(_glShpCopyCut);
_gl.uniformMatrix2fv(_glShpCopyCutVideoMatUniformPointer, false, _videoTransformMat2);
_gl.activeTexture(_gl.TEXTURE0);
_gl.bindTexture(_gl.TEXTURE_2D, _glVideoTexture);
_gl.drawElements(_gl.TRIANGLES, 3, _gl.UNSIGNED_SHORT, 0);
} else {
// reinitialize the state of THREE.JS because JEEFACEFILTER have changed stuffs:
// -> can be VERY costly !
_threeRenderer.state.reset();
}
// trigger the render of the THREE.JS SCENE:
_threeRenderer.render(_threeScene, threeCamera);
},
sortFaces: function(bufferGeometry, axis, isInv){ // sort faces long an axis
// Useful when a bufferGeometry has alpha: we should render the last faces first
const axisOffset = {X:0, Y:1, Z:2}[axis.toUpperCase()];
const sortWay = (isInv) ? -1 : 1;
// fill the faces array:
const nFaces = bufferGeometry.index.count/3;
const faces = new Array(nFaces);
for (let i=0; i<nFaces; ++i){
faces[i] = [bufferGeometry.index.array[3*i], bufferGeometry.index.array[3*i+1], bufferGeometry.index.array[3*i+2]];
}
// compute centroids:
const aPos = bufferGeometry.attributes.position.array;
const centroids = faces.map(function(face, faceIndex){
return [
(aPos[3*face[0]]+aPos[3*face[1]]+aPos[3*face[2]])/3, // X
(aPos[3*face[0]+1]+aPos[3*face[1]+1]+aPos[3*face[2]+1])/3, // Y
(aPos[3*face[0]+2]+aPos[3*face[1]+2]+aPos[3*face[2]+2])/3, // Z
face
];
});
// sort centroids:
centroids.sort(function(ca, cb){
return (ca[axisOffset]-cb[axisOffset]) * sortWay;
});
// reorder bufferGeometry faces:
centroids.forEach(function(centroid, centroidIndex){
const face = centroid[3];
bufferGeometry.index.array[3*centroidIndex] = face[0];
bufferGeometry.index.array[3*centroidIndex+1] = face[1];
bufferGeometry.index.array[3*centroidIndex+2] = face[2];
});
}, //end sortFaces
get_threeVideoTexture: function(){
return _threeVideoTexture;
},
apply_videoTexture: function(threeMesh){
if (_isVideoTextureReady){
return;
}
threeMesh.onAfterRender = function(){
// Replace _threeVideoTexture.__webglTexture by the real video texture:
try {
_threeRenderer.properties.update(_threeVideoTexture, '__webglTexture', _glVideoTexture);
_threeVideoTexture.magFilter = THREE.LinearFilter;
_threeVideoTexture.minFilter = THREE.LinearFilter;
_isVideoTextureReady = true;
} catch(e){
console.log('WARNING in punkThreeHelper: the glVideoTexture is not fully initialized');
}
delete(threeMesh.onAfterRender);
};
},
// create an occluder, IE a transparent object which writes on the depth buffer:
create_threejsOccluder: function(occluderURL, callback){
const occluderMesh = new THREE.Mesh();
new THREE.BufferGeometryLoader().load(occluderURL, function(occluderGeometry){
const mat = new THREE.ShaderMaterial({
vertexShader: THREE.ShaderLib.basic.vertexShader,
fragmentShader: "precision lowp float;\n void main(void){\n gl_FragColor=vec4(1.,0.,0.,1.);\n }",
uniforms: THREE.ShaderLib.basic.uniforms,
colorWrite: false
});
occluderMesh.renderOrder = -1; //render first
occluderMesh.material = mat;
occluderMesh.geometry = occluderGeometry;
if (typeof(callback)!=='undefined' && callback) callback(occluderMesh);
});
return occluderMesh;
},
set_pivotOffsetYZ: function(pivotOffset) {
_settings.pivotOffsetYZ = pivotOffset;
},
create_camera: function(zNear, zFar){
const threeCamera = new THREE.PerspectiveCamera(1, 1, (zNear) ? zNear : 0.1, (zFar) ? zFar : 100);
that.update_camera(threeCamera);
return threeCamera;
},
update_camera: function(threeCamera){
// compute aspectRatio:
const canvasElement = _threeRenderer.domElement;
const cvw = canvasElement.width;
const cvh = canvasElement.height;
_canvasAspectRatio = cvw / cvh;
// compute vertical field of view:
const vw = _videoElement.videoWidth;
const vh = _videoElement.videoHeight;
const videoAspectRatio = vw / vh;
const fovFactor = (vh > vw) ? (1.0 / videoAspectRatio) : 1.0;
const fov = _settings.cameraMinVideoDimFov * fovFactor;
console.log('INFO in punkThreeHelper - update_camera(): Estimated vertical video FoV is', fov);
// compute X and Y offsets in pixels:
let scale = 1.0;
if (_canvasAspectRatio > videoAspectRatio) {
// the canvas is more in landscape format than the video, so we crop top and bottom margins:
scale = cvw / vw;
} else {
// the canvas is more in portrait format than the video, so we crop right and left margins:
scale = cvh / vh;
}
const cvws = vw * scale, cvhs = vh * scale;
const offsetX = (cvws - cvw) / 2.0;
const offsetY = (cvhs - cvh) / 2.0;
_scaleW = cvw / cvws;
// apply parameters:
threeCamera.aspect = _canvasAspectRatio;
threeCamera.fov = fov;
console.log('INFO in punkThreeHelper.update_camera(): camera vertical estimated FoV is', fov, 'deg');
threeCamera.setViewOffset(cvws, cvhs, offsetX, offsetY, cvw, cvh);
threeCamera.updateProjectionMatrix();
// update drawing area:
_threeRenderer.setSize(cvw, cvh, false);
_threeRenderer.setViewport(0, 0, cvw, cvh);
}, //end update_camera()
resize: function(w, h, threeCamera){
_threeRenderer.domElement.width = w;
_threeRenderer.domElement.height = h;
punkfacefilter.resize();
if (threeCamera){
that.update_camera(threeCamera);
}
}
}
return that;
})();
// Export ES6 module:
try {
module.exports = punkThreeHelper;
} catch(e){
console.log('punkThreeHelper ES6 Module not exported');
window.punkThreeHelper = punkThreeHelper;
}
</script>
<!-- INCLUDE DEMO SCRIPT -->
<script>
let THREECAMERA = null;
// callback: launched if a face is detected or lost.
function detect_callback(faceIndex, isDetected) {
if (isDetected) {
console.log('INFO in detect_callback(): DETECTED');
} else {
console.log('INFO in detect_callback(): LOST');
}
}
// build the 3D. called once when punk Face Filter is OK
function init_threeScene(spec) {
const threeStuffs = punkThreeHelper.init(spec, detect_callback);
// CREATE A CUBE
const loader = new GLTFLoader();
loader.load( '/your-glb-file2.glb', function ( gltf ) {
threeStuffs.faceObject.add( gltf.scene );
} );
//CREATE THE CAMERA
THREECAMERA = punkThreeHelper.create_camera();
}
// entry point:
function main(){
punkResizer.size_canvas({
canvasId: 'jeeFaceFilterCanvas',
callback: function(isError, bestVideoSettings){
init_faceFilter(bestVideoSettings);
}
})
}
function init_faceFilter(videoSettings){
punkfacefilter.init({
followZRot: true,
canvasId: 'jeeFaceFilterCanvas',
NNCPath: '/', // root of NN_DEFAULT.json file
maxFacesDetected: 1,
callbackReady: function(errCode, spec){
if (errCode){
console.log('AN ERROR HAPPENS. ERR =', errCode);
return;
}
console.log('INFO: punkfacefilter IS READY');
init_threeScene(spec);
},
// called at each render iteration (drawing loop):
callbackTrack: function(detectState){
punkThreeHelper.render(detectState, THREECAMERA);
}
}); //end punkfacefilter.init call
}
window.addEventListener('load', main);
</script>
<style>
a {color: #eee; text-decoration: none}
a:hover {color: blue;}
body {overflow: auto; overflow-y: auto;
background-color: white;
background-attachment: fixed;
background-position: center;
background-size: contain;
margin: 0px;}
#jeeFaceFilterCanvas {
z-index: 10;
position: absolute;
max-height: 100%;
max-width: 100%;
left: 50%;
top: 50%;
width: 100vmin;
transform: translate(-50%, -50%) rotateY(180deg);
}
#threeCanvas{
z-index: 11;
position: absolute;
max-height: 100%;
max-width: 100%;
left: 50%;
top: 50%;
width: 100vmin;
transform: translate(-50%, -50%) rotateY(180deg);
}
#media (max-width: 787px) {
#jeeFaceFilterCanvas {
right: 0px;
top: 60px;
transform: rotateY(180deg);
}
}
</style>
</head>
<body>
<canvas width="600" height="600" id='jeeFaceFilterCanvas'></canvas>
</body>
</html>
/your-glb-file2.glb is a correct 3D file in the same directory as this. I made this code from the html file I linked, but src’d the script from URLs, unlike the real html file I linked.
The
// CREATE A CUBE
const loader = new GLTFLoader();
loader.load( '/your-glb-file2.glb', function ( gltf ) {
threeStuffs.faceObject.add( gltf.scene );
} );
near the end is the problem. The app correctly puts a cube over my face like I want it to when it’s
// CREATE A CUBE
const cubeGeometry = new THREE.BoxGeometry(1,1,1);
const cubeMaterial = new THREE.MeshNormalMaterial();
const threeCube = new THREE.Mesh(cubeGeometry, cubeMaterial);
threeCube.frustumCulled = false;
threeStuffs.faceObject.add(threeCube);
I tried constructing the loader as a "sub"function of three.js and not as one, but it isn't working either way, when it worked with a cube over my face and the cube 3D object loaded with three.js' native functions.

Three JS device motion glitch

I'm trying to get into WebXR programming. I'm trying to make a simple wrapper which allows for a VR headset and also a Google Cardboard style for smartphones. I got the VR headset working good so far, but I have two issues when using the smartphone in landscape mode. Portrait mode works fine.
The camera breaks when going above the horizon. ** fixed **
Tilting the phone doesn't tilt. It pans left right half way.
Codesandbox.io code: https://codesandbox.io/s/webxr-7vw5q6
Codesandbox.io app: https://7vw5q6.csb.app/
Update
I managed to fix the jumping and flipped image in landscape mode by adding some code.
if(rotType == "YZX")
{
if(orientation_g >= 0) {
screenOrientation = -90;
} else {
screenOrientation = 90;
orientation_a = orientation_a + 180;
}
}
However, I still have the issue when rolling the device left or right.
// Variables
// ---------
let camera, renderer, scene, loop;
let container;
let controls;
let controller1, controller2;
let teleportmarker, raycaster, INTERSECTION;
let baseReferenceSpace;
let tempMatrix = new THREE.Matrix4();
let effect;
let action;
// fakeVR
let fakeVR = false;
let orientation_a, orientation_b, orientation_g;
/* landscape fix test */
let alphaOffset = 0;
let screenOrientation = 0;
/* ====================================================================================================
* Controller
* ==================================================================================================== */
class Controller {
constructor(i) {
this.controllers = this.createController(i);
this.controllerGrip = this.createControllerGrip(i);
if(i == 0) { this.controllers.name = "Right"; }
if(i == 1) { this.controllers.name = "Left"; }
this.axes = new THREE.Vector2();
this.viewDirection = camera.getWorldDirection( new THREE.Vector3() );
const scope = this;
raycaster = new THREE.Raycaster();
this.initInputListenerXR();
this.group = new THREE.Group();
this.teleportmarker = new THREE.Mesh(
new THREE.RingGeometry(0.2, 0.25, 32).rotateX(-Math.PI / 2),
new THREE.MeshBasicMaterial({color: 0xFF00FF})
);
scene.add(this.teleportmarker);
}
createController(i) {
const controllers = renderer.xr.getController(i);
if (0) {
this.group.add(controllers);
controllers.visible = true;
}
return controllers;
}
createControllerGrip(i) {
const controllerModelFactory = new THREE.XRControllerModelFactory();
const controllerGrip = renderer.xr.getControllerGrip(i);
controllerGrip.add(controllerModelFactory.createControllerModel(controllerGrip));
return controllerGrip;
}
initInputListenerXR() {
const listenerFor = name => event => {
const cb = this._eventListeners[name];
if (cb) {
const uuid = event.target.uuid;
const cont = this.controllers;
if (cont && cont.uuid === uuid) cb(idx);
}
};
this._addSelectListener('selectstart', this.onSelectStart);
this._addSelectListener('selectend', this.onSelectEnd);
this._addSelectListener('connected', function(event) {
this.controllers.add(this.buildController(event.data));
this.controllers.children[0].visible = false;
});
this._addSelectListener( 'disconnected', function () {
this.controllers.remove(this.controllers.children[0]);
});
}
_addSelectListener(eventName, listener) {
this.controllers.addEventListener(eventName, listener.bind(this));
}
onSelectStart() {
console.log(this.controllers.name + ' was pressed.');
this.controllers.userData.isSelecting = true;
}
onSelectEnd() {
console.log(this.controllers.name + ' was released.');
this.controllers.userData.isSelecting = false;
if ( INTERSECTION ) {
const offsetPosition = { x: - INTERSECTION.x, y: - INTERSECTION.y, z: - INTERSECTION.z, w: 1 };
const offsetRotation = new THREE.Quaternion();
const transform = new XRRigidTransform( offsetPosition, offsetRotation );
const teleportSpaceOffset = baseReferenceSpace.getOffsetReferenceSpace( transform );
renderer.xr.setReferenceSpace( teleportSpaceOffset );
}
}
update() {
INTERSECTION = undefined;
if ( controller1.controllers.userData.isSelecting === true ) {
tempMatrix.identity().extractRotation( controller1.controllers.matrixWorld );
raycaster.ray.origin.setFromMatrixPosition( controller1.controllers.matrixWorld );
raycaster.ray.direction.set( 0, 0, - 1 ).applyMatrix4( tempMatrix );
const intersects = raycaster.intersectObjects([floor]);
if (intersects.length > 0) {
INTERSECTION = intersects[0].point;
}
}
else if ( controller2.controllers.userData.isSelecting === true ) {
tempMatrix.identity().extractRotation( controller2.controllers.matrixWorld );
raycaster.ray.origin.setFromMatrixPosition( controller2.controllers.matrixWorld );
raycaster.ray.direction.set( 0, 0, - 1 ).applyMatrix4( tempMatrix );
const intersects = raycaster.intersectObjects([floor]);
if (intersects.length > 0) {
INTERSECTION = intersects[0].point;
}
}
if (INTERSECTION) this.teleportmarker.position.copy(INTERSECTION);
this.teleportmarker.visible = INTERSECTION !== undefined;
}
// updateArc() {
// }
buildController(data) {
switch(data.targetRayMode) {
// case 'screen':
// return;
case 'tracked-pointer':
const geometry = new THREE.BufferGeometry();
geometry.setAttribute('position', new THREE.Float32BufferAttribute([0,0,0,0,0,-1],3));
geometry.setAttribute('color', new THREE.Float32BufferAttribute([0.5,0.5,0.5,0,0,0],3));
const material = new THREE.LineBasicMaterial({vertexColors:true,blending:THREE.AdditiveBlending});
return new THREE.Line(geometry,material);
case 'gaze':
const gaze_geometry = new THREE.RingGoemetry(0.02,0.04,32).translate(0,0,-1);
const gaze_material = new THREE.MeshBesicMaterial({opacity:0.5,transparent:true});
return new THREE.Mesh(gaze_geometry,gaze_material);
}
}
setAction(button, functionName) {
}
action(button, functionName) {
}
}
/* ====================================================================================================
* Resizer
* ==================================================================================================== */
class Resizer {
constructor(container, camera, renderer) {
this.setSize(container, camera, renderer);
window.addEventListener('resize', () => {
this.setSize(container, camera, renderer);
this.onResize();
});
}
onResize() {
}
setSize(container, camera, renderer) {
camera.aspect = container.clientWidth / container.clientHeight;
camera.updateProjectionMatrix();
renderer.setSize(container.clientWidth, container.clientHeight);
renderer.setPixelRatio(window.devicePixelRatio);
}
}
/* ====================================================================================================
* Camera
* ==================================================================================================== */
class Camera extends THREE.PerspectiveCamera {
constructor() {
super();
this.onCreate();
}
onCreate() {
new THREE.PerspectiveCamera(35, 1, 0.1, 100);
}
}
/* ====================================================================================================
* Model
* ==================================================================================================== */
class Model extends THREE.Group {
constructor(data) {
super();
this.modelUrl = data;
this.onCreate();
}
onCreate() {
const dracoLoader = new THREE.DRACOLoader();
dracoLoader.setDecoderPath( 'vendor/three/examples/js/libs/draco/' );
dracoLoader.setDecoderConfig({ type: 'js' });
new THREE.GLTFLoader().
setDRACOLoader( dracoLoader ).
load(this.modelUrl,
gltf => {
this.updateTransform();
this.add(gltf.scene);
console.log(this);
});
}
updateMaterials(model) {
model.traverse(child => {
child.material = new THREE.MeshNormalMaterial();
});
}
updateTransform() {
}
dispose() {
}
rotate(x, y, z) {
this.rotation.x = THREE.MathUtils.degToRad(x);
this.rotation.y = THREE.MathUtils.degToRad(y);
this.rotation.z = THREE.MathUtils.degToRad(z);
}
scale(x, y, z) {
this.scale.set(x, y, z);
}
}
/* ====================================================================================================
* Loop
* ==================================================================================================== */
const clock = new THREE.Clock();
class Loop {
constructor(camera, scene, renderer) {
this.updatables = [];
}
start() {
renderer.setAnimationLoop(() => {
this.tick();
if(fakeVR) {
effect.render( scene, camera );
} else {
renderer.render(scene, camera);
}
});
}
stop() {
renderer.setAnimationLoop(null);
}
tick() {
const delta = clock.getDelta();
controller1.update();
controller2.update();
}
}
/* ====================================================================================================
* Scene
* ==================================================================================================== */
class Scene extends THREE.Scene {
constructor() {
super();
this.onCreate();
}
onCreate() {
new THREE.Scene();
this.background = new THREE.Color('skyblue');
}
createLights() {
const ambientLight = new THREE.HemisphereLight(
'white',
'darkslategrey',
5
);
const mainLight = new THREE.DirectionalLight('white', 4);
mainLight.position.set(100, 100, 100);
return { ambientLight, mainLight };
}
}
/* ====================================================================================================
* Application
* ==================================================================================================== */
class App {
constructor(i) {
// Setup <body> CSS style
document.getElementsByTagName("body")[0].style.cssText = 'width: 100vw; height: 100vh; margin: 0; padding: 0; overflow:
hidden;';
// Create VR scene <div>
const VRdiv = document.createElement('div');
VRdiv.id = "VRScene";
VRdiv.style.cssText = 'position: absolute; width: 100vw; height: 100vh; display: block;';
document.body.insertAdjacentElement('afterbegin', VRdiv);
// Controls
controls = new Controls();
// Setup Camera
camera = new Camera();
camera.position.set(0, 0, 0);
camera.up.set(0, 1, 0);
renderer = this.createRenderer();
scene = new Scene();
loop = new Loop(camera, scene, renderer);
container = document.querySelector('#VRScene');
container.append(renderer.domElement);
const { ambientLight, mainLight } = scene.createLights();
loop.updatables.push(controls);
scene.add(ambientLight, mainLight);
const resizer = new Resizer(container, camera, renderer);
this.init(i);
this.start();
}
init(i) {
this.setupXR(i);
}
setupXR(i) {
renderer.xr.addEventListener("sessionstart", () => (baseReferenceSpace = renderer.xr.getReferenceSpace()));
document.write(`
<button id='VRIcon' class='toggleVR' style=" position: fixed; bottom: 10px; left: 10px; outline: none; border:
none; background: none; width: 60px; z-index: 10000;" onclick='` + i
+ `.toggleVR()' title='Toggle VR Mode for Mobile Devices Only'>
<svg style="width: 100%; fill: white; stroke: rgba(0,0,0,0.25);" xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" x="0px"
y="0px" viewBox="0 0 62.7 52.375" enable-background="new 0 0 62.7
41.9" xml:space="preserve"><path d="M53.4,5.5h-44c-2.1,0-3.7,1.7-3.7,3.7v22.6c0,2.1,1.7,3.7,3.7,3.7h13.4c1.1,0,2.1-0.6,2.5-1.6l3-7.5c1.2-2.6,4.9-2.5,6,0.1
l2.6,7.3c0.4,1,1.4,1.7,2.5,1.7h13.9c2.1,0,3.7-1.7,3.7-3.7V9.3C57.2,7.2,55.5,5.5,53.4,5.5z
M20.4,27c-3.2,0-5.7-2.6-5.7-5.7
s2.6-5.7,5.7-5.7s5.7,2.6,5.7,5.7S23.6,27,20.4,27z
M42.4,27c-3.2,0-5.7-2.6-5.7-5.7s2.6-5.7,5.7-5.7s5.7,2.6,5.7,5.7
S45.6,27,42.4,27z"/></svg>
</button>
<svg id="VROverlay" xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="none
meet" width="100vw" height="100vh" viewBox="0, 0, 2000, 1000"
style="position: absolute; top: 0; left: 0; bottom: 0; right: 0;
z-index: 9999; display: none;"><g id="svgg"><path id="path0" d="M 0 0
L 0 1000 L 1000000 1000 L 1000000 0 L 0 0 z M 500.04492 15 C
636.69612 15.006191 768.82704 43.380704 892.76562 99.34375 C 896.20268 100.89576 898.95249 103.64562 900.50391 107.08398 C 1013.1637 356.78574 1013.1657 643.21219 900.50781 892.91602 C 898.9564 896.35438 896.20466 899.10424 892.76758 900.65625 C 768.82901 956.61724 636.69909 984.9898 499.95508 985 C 363.30182 984.99379 231.171 956.61724 107.23242 900.65625 C 103.79536 899.10424 101.04557 896.35438 99.494141 892.91602 C -13.163603 643.21219 -13.163603 356.78574 99.494141 107.08398 C 101.04557 103.64562 103.79536 100.89576 107.23242 99.34375 C 231.171 43.380704 363.3009 15.0062 500.04492 15 z M 1500.0449 15 C 1636.6961 15.006191 1768.827 43.380704 1892.7656 99.34375 C 1896.2026 100.89576 1898.9525 103.64562 1900.5039 107.08398 L 1900.5078 107.08398 C 2013.1656 356.78574 2013.1656 643.21219 1900.5078 892.91602 C 1898.9564 896.35438 1896.2047 899.10424 1892.7676 900.65625 C 1768.8291 956.61724 1636.6991 984.9898 1499.9551 985 C 1363.3019 984.99379 1231.1709 956.61724 1107.2324 900.65625 C 1103.7953 899.10424 1101.0455 896.35438 1099.4941 892.91602 C 986.8364 643.21219 986.8364 356.78574 1099.4941 107.08398 C 1101.0455 103.64562 1103.7953 100.89576 1107.2324 99.34375 C 1231.1709 43.380704 1363.3009 15.0062 1500.0449 15 z " stroke="none" fill="#000000" fill-rule="evenodd"></path></g></svg>
`);
if ('xr' in navigator) {
navigator.xr.isSessionSupported('immersive-vr').then(function(supported)
{
if(supported) {
renderer.xr.enabled = true;
new THREE.VRButton(renderer);
document.body.appendChild(THREE.VRButton.createButton(renderer));
document.getElementById('VRButton').style.display = 'block';
document.getElementById('VRIcon').style.display = 'block';
}
});
}
controller1 = new Controller(0);
controller2 = new Controller(1);
scene.add(controller1.controllers);
scene.add(controller1.controllerGrip);
scene.add(controller2.controllers);
scene.add(controller2.controllerGrip);
controls = new Controls();
action = new Action();
// loop.tick();
}
start() {
loop.start();
}
stop() {
loop.stop();
}
createRenderer() {
const renderer = new THREE.WebGLRenderer({ antialias: true });
renderer.setPixelRatio( window.devicePixelRatio );
renderer.physicallyCorrectLights = true;
effect = new THREE.StereoEffect( renderer );
effect.setSize( window.innerWidth, window.innerHeight );
return renderer;
}
toggleVR() {
if(DeviceMotionEvent && typeof DeviceMotionEvent.requestPermission === "function") {
DeviceMotionEvent.requestPermission();
}
if (fakeVR) {
fakeVR = false;
document.getElementById('VROverlay').style.display = 'none';
window.removeEventListener("deviceorientation", this.handleOrientation);
} else {
fakeVR = true;
window.addEventListener("deviceorientation", this.handleOrientation);
document.getElementById('VROverlay').style.display = 'block';
}
loop.stop();
loop.start();
}
handleMotion(event) {
}
handleOrientation(event) {
if (window.screen.orientation) {
screenOrientation = window.screen.orientation.angle;
} else if (typeof window.orientation === "number") {
screenOrientation = window.orientation;
} else if (window.screen.mozOrientationn) {
screenOrientation = {
"portrait-primary": 0,
"portrait-secondary": 180,
"landscape-primary": 90,
"landscape-secondary": 270,
}[window.screen.mozOrientation];
}
var eyem = new THREE.Quaternion().setFromEuler(new THREE.Euler(-Math.PI / 2, 0, 0));
var d2r = Math.PI / 180;
orientation_a = event.alpha;
orientation_b = event.beta;
orientation_g = event.gamma;
var rotType = (screenOrientation === 0 || screenOrientation === 180) ? "YXZ" : "YZX";
if(rotType == "YZX")
{
if(orientation_g >= 0) {
screenOrientation = -90;
} else {
screenOrientation = 90;
orientation_a = orientation_a + 180;
}
}
var rotm = new THREE.Quaternion().setFromEuler(
new THREE.Euler(orientation_b * d2r, orientation_a * d2r, -orientation_g * d2r, rotType)
);
var devm = new THREE.Quaternion().setFromEuler(
new THREE.Euler(0, -screenOrientation * d2r, 0)
);
rotm.multiply(devm).multiply(eyem); //rot = (rot x dev) x eye
camera.quaternion.copy(rotm);
document.getElementById("Orientation_a1").innerHTML = orientation_a.toFixed(3);
document.getElementById("Orientation_b1").innerHTML = orientation_b.toFixed(3);
document.getElementById("Orientation_g1").innerHTML = orientation_g.toFixed(3);
document.getElementById("Orientation_o1").innerHTML = screenOrientation;
document.getElementById("Orientation_a2").innerHTML = orientation_a.toFixed(3);
document.getElementById("Orientation_b2").innerHTML = orientation_b.toFixed(3);
document.getElementById("Orientation_g2").innerHTML = orientation_g.toFixed(3);
document.getElementById("Orientation_o2").innerHTML = screenOrientation;
}
}
ul {
padding-inline-start: 15px;
}
li {
list-style-type: none;
overflow: hidden;
}
<script src="https://7vw5q6.csb.app/webxr.three.js"></script>
<div style="display: block; position: fixed; top: 10%; left: 10%; right: 60%; background: rgba(255,255,255,.65);">
<ul>
<li>X-axis (β): <span id="Orientation_b1">0</span><span>°</span></li>
<li>Y-axis (γ): <span id="Orientation_g1">0</span><span>°</span></li>
<li>Z-axis (α): <span id="Orientation_a1">0</span><span>°</span></li>
<li>Orientation: <span id="Orientation_o1">0</span><span>°</span></li>
</ul>
</div>
<div style="display: block; position: fixed; top: 10%; left: 60%; right: 10%; background: rgba(255,255,255,.65);">
<ul>
<li>X-axis (β): <span id="Orientation_b2">0</span><span>°</span></li>
<li>Y-axis (γ): <span id="Orientation_g2">0</span><span>°</span></li>
<li>Z-axis (α): <span id="Orientation_a2">0</span><span>°</span></li>
<li>Orientation: <span id="Orientation_o2">0</span><span>°</span></li>
</ul>
</div>

Bubbles Background in AS3

I found 3 videos from SnorklTV.
https://www.youtube.com/watch?v=ZUM0i0DLKk0
https://www.youtube.com/watch?v=_44H68-QciU
https://www.youtube.com/watch?v=Z9NUkmQDB1k
There is a problem: GreenSock still works to use it, but it is no longer officially supported.
Error 1172: Definition com.greensock could not be found.
Code:
var bubbleMax: Number = 50;
var tl: TimelineMax = new TimelineMax();
function createBubble() {
var Bubble: Bubble = new Bubble();
Bubble.y = 380;
Bubble.x = randomRange(25, 610);
Bubble.alpha = 1;
addChild(Bubble);
var nestedTL: TimelineMax = new TimelineMax();
var speed: Number = randomRange(1, 3);
var wiggle: Number = randomRange(25, 50);
wiggle = Math.random() > .5 ? wiggle : -wiggle;
nestedTL.insert(TweenMax.to(Bubble, speed, {
y: -40,
ease:Quad.easeIn
}));
nestedTL.insert(TweenMax.to(Bubble, .5, {
scaleX: speed,
scaleY: speed,
alpha: randomRange(.5, 1)
}));
nestedTL.insert(TweenMax.to(Bubble, speed * .25, {
x: String(wiggle),
repeat: randomRange(1, 4),
yoyo: true
}));
tl.append(nestedTL, speed * -.89);
}
function randomRange(min: Number, max: Number): Number {
return min + (Math.random() * (max - min));
}
function init() {
for (var count: Number = 0; count < bubbleMax; count++) {
createBubble();
}
}
init();
Any ideas for the codes without importing Greensock?
import com.greensock.*;
import com.greensock.easing.*;
var bubbleMax: Number = 200;
var tl: TimelineMax = new TimelineMax();
function createBubble() {
var Bubble: bubble = new bubble();
Bubble.y = 430;
Bubble.x = randomRange(25, 610);
Bubble.alpha = 0;
addChild(Bubble);
Bubble.visible = true;
var nestedTL: TimelineMax = new TimelineMax();
var speed: Number = randomRange(1, 3);
var wiggle: Number = randomRange(25, 50);
wiggle = Math.random() > .5 ? wiggle : -wiggle;
nestedTL.insert(TweenMax.to(Bubble, speed, {
y: -40,
ease:Quad.easeIn
}));
nestedTL.insert(TweenMax.to(Bubble, .5, {
scaleX: speed,
scaleY: speed,
alpha: randomRange(.5, 1)
}));
nestedTL.insert(TweenMax.to(Bubble, speed * .25, {
x: String(wiggle),
repeat: randomRange(1, 4),
yoyo: true
}));
tl.append(nestedTL, speed * -.89);
}
function randomRange(min: Number, max: Number): Number {
return min + (Math.random() * (max - min));
}
function init() {
for (var count: Number = 0; count < bubbleMax; count++) {
createBubble();
}
}
init();

Three JS - Uncaught TypeError: Cannot read property 'x' of undefined

I'm trying to create a morphing object based on a number of particles, I have 4 objects, 2 normal three js shapes (cube and sphere) and 2 OBJ objects.
When I hover the related name of the object it changes to that one.
The problem here is that when I try to hover the name of the obj object the console.log returns the following error at the last line where I try to get newParticles.vertices[i].x, etc:
Uncaught TypeError: Cannot read property 'x' of undefined
Code:
// Particle Vars
var particleCount = numberOfParticles;
let spherePoints,
cubePoints,
rocketPoints,
spacemanPoints;
var particles = new Geometry(),
sphereParticles = new Geometry(),
cubeParticles = new Geometry(),
rocketParticles = new Geometry(),
spacemanParticles = new Geometry();
var pMaterial = new PointsMaterial({
color: particleColor,
size: particleSize,
map: new TextureLoader().load(particleImage),
blending: AdditiveBlending,
transparent: true
});
// Objects
var geometry = new SphereGeometry( 5, 30, 30 );
spherePoints = GeometryUtils.randomPointsInGeometry(geometry, particleCount)
var geometry = new BoxGeometry( 9, 9, 9 );
cubePoints = GeometryUtils.randomPointsInGeometry(geometry, particleCount)
// Custom (OGJ) Objects
const codepenAssetUrl = 'https://s3-us-west-2.amazonaws.com/s.cdpn.io/605067/';
var objLoader = new OBJLoader();
objLoader.setPath('./upload/');
objLoader.load( 'Nymph.obj', function ( object ) {
object.traverse( function ( child ) {
if ( child instanceof Mesh ) {
let scale = 0.2; //era 2.1 per il razzo
let area = new Box3();
area.setFromObject( child );
let yOffset = (area.max.y * scale) / 2;
child.geometry.scale(scale,scale,scale);
rocketPoints = GeometryUtils.randomPointsInBufferGeometry(child.geometry, particleCount);
createVertices(rocketParticles, rocketPoints, yOffset, 2);
}
});
});
var objLoader = new OBJLoader();
objLoader.setPath(codepenAssetUrl);
objLoader.load( 'Astronaut.obj', function ( object ) {
object.traverse( function ( child ) {
if ( child instanceof Mesh ) {
let scale = 4.6;
let area = new Box3();
area.setFromObject( child );
let yOffset = (area.max.y * scale) / 2;
child.geometry.scale(scale,scale,scale);
spacemanPoints = GeometryUtils.randomPointsInBufferGeometry(child.geometry, particleCount);
createVertices(spacemanParticles, spacemanPoints, yOffset, 3);
}
});
});
// Particles
for (var p = 0; p < particleCount; p++) {
var vertex = new Vector3();
vertex.x = 0;
vertex.y = 0;
vertex.z = 0;
particles.vertices.push(vertex);
}
createVertices (sphereParticles, spherePoints, null, null)
createVertices (cubeParticles, cubePoints, null, 1)
function createVertices (emptyArray, points, yOffset = 0, trigger = null) {
for (var p = 0; p < particleCount; p++) {
var vertex = new Vector3();
vertex.x = points[p]['x'];
vertex.y = points[p]['y'] - yOffset;
vertex.z = points[p]['z'];
emptyArray.vertices.push(vertex);
}
if (trigger !== null) {
triggers[trigger].setAttribute('data-disabled', false)
}
}
var particleSystem = new Points(
particles,
pMaterial
);
particleSystem.sortParticles = true;
// Add the particles to the scene
scene.add(particleSystem);
// Animate
const normalSpeed = (defaultAnimationSpeed/100),
fullSpeed = (morphAnimationSpeed/100)
let animationVars = {
speed: normalSpeed
}
function animate() {
particleSystem.rotation.y += animationVars.speed;
particles.verticesNeedUpdate = true;
window.requestAnimationFrame( animate );
renderer.render( scene, camera );
}
animate();
setTimeout(toSphere, 500);
function toSphere () {
handleTriggers(0);
morphTo(sphereParticles);
}
function toCube () {
handleTriggers(1);
morphTo(cubeParticles);
}
function toRocket () {
handleTriggers(2);
morphTo(rocketParticles);
}
function toSpaceman () {
handleTriggers(3);
morphTo(spacemanParticles);
}
function morphTo (newParticles, color = '0xffffff') {
TweenMax.to(animationVars, .3, {ease:
Power4.easeIn, speed: fullSpeed, onComplete: slowDown});
particleSystem.material.color.setHex(color);
for (var i = 0; i < particles.vertices.length; i++){
TweenMax.to(particles.vertices[i], 4, {ease:
Elastic.easeOut.config( 1, 0.75), x: newParticles.vertices[i].x, y: newParticles.vertices[i].y, z: newParticles.vertices[i].z})
}
}
P.S. note that I'm using webpack.

Resources