I'm trying to get into WebXR programming. I'm trying to make a simple wrapper which allows for a VR headset and also a Google Cardboard style for smartphones. I got the VR headset working good so far, but I have two issues when using the smartphone in landscape mode. Portrait mode works fine.
The camera breaks when going above the horizon. ** fixed **
Tilting the phone doesn't tilt. It pans left right half way.
Codesandbox.io code: https://codesandbox.io/s/webxr-7vw5q6
Codesandbox.io app: https://7vw5q6.csb.app/
Update
I managed to fix the jumping and flipped image in landscape mode by adding some code.
if(rotType == "YZX")
{
if(orientation_g >= 0) {
screenOrientation = -90;
} else {
screenOrientation = 90;
orientation_a = orientation_a + 180;
}
}
However, I still have the issue when rolling the device left or right.
// Variables
// ---------
let camera, renderer, scene, loop;
let container;
let controls;
let controller1, controller2;
let teleportmarker, raycaster, INTERSECTION;
let baseReferenceSpace;
let tempMatrix = new THREE.Matrix4();
let effect;
let action;
// fakeVR
let fakeVR = false;
let orientation_a, orientation_b, orientation_g;
/* landscape fix test */
let alphaOffset = 0;
let screenOrientation = 0;
/* ====================================================================================================
* Controller
* ==================================================================================================== */
class Controller {
constructor(i) {
this.controllers = this.createController(i);
this.controllerGrip = this.createControllerGrip(i);
if(i == 0) { this.controllers.name = "Right"; }
if(i == 1) { this.controllers.name = "Left"; }
this.axes = new THREE.Vector2();
this.viewDirection = camera.getWorldDirection( new THREE.Vector3() );
const scope = this;
raycaster = new THREE.Raycaster();
this.initInputListenerXR();
this.group = new THREE.Group();
this.teleportmarker = new THREE.Mesh(
new THREE.RingGeometry(0.2, 0.25, 32).rotateX(-Math.PI / 2),
new THREE.MeshBasicMaterial({color: 0xFF00FF})
);
scene.add(this.teleportmarker);
}
createController(i) {
const controllers = renderer.xr.getController(i);
if (0) {
this.group.add(controllers);
controllers.visible = true;
}
return controllers;
}
createControllerGrip(i) {
const controllerModelFactory = new THREE.XRControllerModelFactory();
const controllerGrip = renderer.xr.getControllerGrip(i);
controllerGrip.add(controllerModelFactory.createControllerModel(controllerGrip));
return controllerGrip;
}
initInputListenerXR() {
const listenerFor = name => event => {
const cb = this._eventListeners[name];
if (cb) {
const uuid = event.target.uuid;
const cont = this.controllers;
if (cont && cont.uuid === uuid) cb(idx);
}
};
this._addSelectListener('selectstart', this.onSelectStart);
this._addSelectListener('selectend', this.onSelectEnd);
this._addSelectListener('connected', function(event) {
this.controllers.add(this.buildController(event.data));
this.controllers.children[0].visible = false;
});
this._addSelectListener( 'disconnected', function () {
this.controllers.remove(this.controllers.children[0]);
});
}
_addSelectListener(eventName, listener) {
this.controllers.addEventListener(eventName, listener.bind(this));
}
onSelectStart() {
console.log(this.controllers.name + ' was pressed.');
this.controllers.userData.isSelecting = true;
}
onSelectEnd() {
console.log(this.controllers.name + ' was released.');
this.controllers.userData.isSelecting = false;
if ( INTERSECTION ) {
const offsetPosition = { x: - INTERSECTION.x, y: - INTERSECTION.y, z: - INTERSECTION.z, w: 1 };
const offsetRotation = new THREE.Quaternion();
const transform = new XRRigidTransform( offsetPosition, offsetRotation );
const teleportSpaceOffset = baseReferenceSpace.getOffsetReferenceSpace( transform );
renderer.xr.setReferenceSpace( teleportSpaceOffset );
}
}
update() {
INTERSECTION = undefined;
if ( controller1.controllers.userData.isSelecting === true ) {
tempMatrix.identity().extractRotation( controller1.controllers.matrixWorld );
raycaster.ray.origin.setFromMatrixPosition( controller1.controllers.matrixWorld );
raycaster.ray.direction.set( 0, 0, - 1 ).applyMatrix4( tempMatrix );
const intersects = raycaster.intersectObjects([floor]);
if (intersects.length > 0) {
INTERSECTION = intersects[0].point;
}
}
else if ( controller2.controllers.userData.isSelecting === true ) {
tempMatrix.identity().extractRotation( controller2.controllers.matrixWorld );
raycaster.ray.origin.setFromMatrixPosition( controller2.controllers.matrixWorld );
raycaster.ray.direction.set( 0, 0, - 1 ).applyMatrix4( tempMatrix );
const intersects = raycaster.intersectObjects([floor]);
if (intersects.length > 0) {
INTERSECTION = intersects[0].point;
}
}
if (INTERSECTION) this.teleportmarker.position.copy(INTERSECTION);
this.teleportmarker.visible = INTERSECTION !== undefined;
}
// updateArc() {
// }
buildController(data) {
switch(data.targetRayMode) {
// case 'screen':
// return;
case 'tracked-pointer':
const geometry = new THREE.BufferGeometry();
geometry.setAttribute('position', new THREE.Float32BufferAttribute([0,0,0,0,0,-1],3));
geometry.setAttribute('color', new THREE.Float32BufferAttribute([0.5,0.5,0.5,0,0,0],3));
const material = new THREE.LineBasicMaterial({vertexColors:true,blending:THREE.AdditiveBlending});
return new THREE.Line(geometry,material);
case 'gaze':
const gaze_geometry = new THREE.RingGoemetry(0.02,0.04,32).translate(0,0,-1);
const gaze_material = new THREE.MeshBesicMaterial({opacity:0.5,transparent:true});
return new THREE.Mesh(gaze_geometry,gaze_material);
}
}
setAction(button, functionName) {
}
action(button, functionName) {
}
}
/* ====================================================================================================
* Resizer
* ==================================================================================================== */
class Resizer {
constructor(container, camera, renderer) {
this.setSize(container, camera, renderer);
window.addEventListener('resize', () => {
this.setSize(container, camera, renderer);
this.onResize();
});
}
onResize() {
}
setSize(container, camera, renderer) {
camera.aspect = container.clientWidth / container.clientHeight;
camera.updateProjectionMatrix();
renderer.setSize(container.clientWidth, container.clientHeight);
renderer.setPixelRatio(window.devicePixelRatio);
}
}
/* ====================================================================================================
* Camera
* ==================================================================================================== */
class Camera extends THREE.PerspectiveCamera {
constructor() {
super();
this.onCreate();
}
onCreate() {
new THREE.PerspectiveCamera(35, 1, 0.1, 100);
}
}
/* ====================================================================================================
* Model
* ==================================================================================================== */
class Model extends THREE.Group {
constructor(data) {
super();
this.modelUrl = data;
this.onCreate();
}
onCreate() {
const dracoLoader = new THREE.DRACOLoader();
dracoLoader.setDecoderPath( 'vendor/three/examples/js/libs/draco/' );
dracoLoader.setDecoderConfig({ type: 'js' });
new THREE.GLTFLoader().
setDRACOLoader( dracoLoader ).
load(this.modelUrl,
gltf => {
this.updateTransform();
this.add(gltf.scene);
console.log(this);
});
}
updateMaterials(model) {
model.traverse(child => {
child.material = new THREE.MeshNormalMaterial();
});
}
updateTransform() {
}
dispose() {
}
rotate(x, y, z) {
this.rotation.x = THREE.MathUtils.degToRad(x);
this.rotation.y = THREE.MathUtils.degToRad(y);
this.rotation.z = THREE.MathUtils.degToRad(z);
}
scale(x, y, z) {
this.scale.set(x, y, z);
}
}
/* ====================================================================================================
* Loop
* ==================================================================================================== */
const clock = new THREE.Clock();
class Loop {
constructor(camera, scene, renderer) {
this.updatables = [];
}
start() {
renderer.setAnimationLoop(() => {
this.tick();
if(fakeVR) {
effect.render( scene, camera );
} else {
renderer.render(scene, camera);
}
});
}
stop() {
renderer.setAnimationLoop(null);
}
tick() {
const delta = clock.getDelta();
controller1.update();
controller2.update();
}
}
/* ====================================================================================================
* Scene
* ==================================================================================================== */
class Scene extends THREE.Scene {
constructor() {
super();
this.onCreate();
}
onCreate() {
new THREE.Scene();
this.background = new THREE.Color('skyblue');
}
createLights() {
const ambientLight = new THREE.HemisphereLight(
'white',
'darkslategrey',
5
);
const mainLight = new THREE.DirectionalLight('white', 4);
mainLight.position.set(100, 100, 100);
return { ambientLight, mainLight };
}
}
/* ====================================================================================================
* Application
* ==================================================================================================== */
class App {
constructor(i) {
// Setup <body> CSS style
document.getElementsByTagName("body")[0].style.cssText = 'width: 100vw; height: 100vh; margin: 0; padding: 0; overflow:
hidden;';
// Create VR scene <div>
const VRdiv = document.createElement('div');
VRdiv.id = "VRScene";
VRdiv.style.cssText = 'position: absolute; width: 100vw; height: 100vh; display: block;';
document.body.insertAdjacentElement('afterbegin', VRdiv);
// Controls
controls = new Controls();
// Setup Camera
camera = new Camera();
camera.position.set(0, 0, 0);
camera.up.set(0, 1, 0);
renderer = this.createRenderer();
scene = new Scene();
loop = new Loop(camera, scene, renderer);
container = document.querySelector('#VRScene');
container.append(renderer.domElement);
const { ambientLight, mainLight } = scene.createLights();
loop.updatables.push(controls);
scene.add(ambientLight, mainLight);
const resizer = new Resizer(container, camera, renderer);
this.init(i);
this.start();
}
init(i) {
this.setupXR(i);
}
setupXR(i) {
renderer.xr.addEventListener("sessionstart", () => (baseReferenceSpace = renderer.xr.getReferenceSpace()));
document.write(`
<button id='VRIcon' class='toggleVR' style=" position: fixed; bottom: 10px; left: 10px; outline: none; border:
none; background: none; width: 60px; z-index: 10000;" onclick='` + i
+ `.toggleVR()' title='Toggle VR Mode for Mobile Devices Only'>
<svg style="width: 100%; fill: white; stroke: rgba(0,0,0,0.25);" xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" x="0px"
y="0px" viewBox="0 0 62.7 52.375" enable-background="new 0 0 62.7
41.9" xml:space="preserve"><path d="M53.4,5.5h-44c-2.1,0-3.7,1.7-3.7,3.7v22.6c0,2.1,1.7,3.7,3.7,3.7h13.4c1.1,0,2.1-0.6,2.5-1.6l3-7.5c1.2-2.6,4.9-2.5,6,0.1
l2.6,7.3c0.4,1,1.4,1.7,2.5,1.7h13.9c2.1,0,3.7-1.7,3.7-3.7V9.3C57.2,7.2,55.5,5.5,53.4,5.5z
M20.4,27c-3.2,0-5.7-2.6-5.7-5.7
s2.6-5.7,5.7-5.7s5.7,2.6,5.7,5.7S23.6,27,20.4,27z
M42.4,27c-3.2,0-5.7-2.6-5.7-5.7s2.6-5.7,5.7-5.7s5.7,2.6,5.7,5.7
S45.6,27,42.4,27z"/></svg>
</button>
<svg id="VROverlay" xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="none
meet" width="100vw" height="100vh" viewBox="0, 0, 2000, 1000"
style="position: absolute; top: 0; left: 0; bottom: 0; right: 0;
z-index: 9999; display: none;"><g id="svgg"><path id="path0" d="M 0 0
L 0 1000 L 1000000 1000 L 1000000 0 L 0 0 z M 500.04492 15 C
636.69612 15.006191 768.82704 43.380704 892.76562 99.34375 C 896.20268 100.89576 898.95249 103.64562 900.50391 107.08398 C 1013.1637 356.78574 1013.1657 643.21219 900.50781 892.91602 C 898.9564 896.35438 896.20466 899.10424 892.76758 900.65625 C 768.82901 956.61724 636.69909 984.9898 499.95508 985 C 363.30182 984.99379 231.171 956.61724 107.23242 900.65625 C 103.79536 899.10424 101.04557 896.35438 99.494141 892.91602 C -13.163603 643.21219 -13.163603 356.78574 99.494141 107.08398 C 101.04557 103.64562 103.79536 100.89576 107.23242 99.34375 C 231.171 43.380704 363.3009 15.0062 500.04492 15 z M 1500.0449 15 C 1636.6961 15.006191 1768.827 43.380704 1892.7656 99.34375 C 1896.2026 100.89576 1898.9525 103.64562 1900.5039 107.08398 L 1900.5078 107.08398 C 2013.1656 356.78574 2013.1656 643.21219 1900.5078 892.91602 C 1898.9564 896.35438 1896.2047 899.10424 1892.7676 900.65625 C 1768.8291 956.61724 1636.6991 984.9898 1499.9551 985 C 1363.3019 984.99379 1231.1709 956.61724 1107.2324 900.65625 C 1103.7953 899.10424 1101.0455 896.35438 1099.4941 892.91602 C 986.8364 643.21219 986.8364 356.78574 1099.4941 107.08398 C 1101.0455 103.64562 1103.7953 100.89576 1107.2324 99.34375 C 1231.1709 43.380704 1363.3009 15.0062 1500.0449 15 z " stroke="none" fill="#000000" fill-rule="evenodd"></path></g></svg>
`);
if ('xr' in navigator) {
navigator.xr.isSessionSupported('immersive-vr').then(function(supported)
{
if(supported) {
renderer.xr.enabled = true;
new THREE.VRButton(renderer);
document.body.appendChild(THREE.VRButton.createButton(renderer));
document.getElementById('VRButton').style.display = 'block';
document.getElementById('VRIcon').style.display = 'block';
}
});
}
controller1 = new Controller(0);
controller2 = new Controller(1);
scene.add(controller1.controllers);
scene.add(controller1.controllerGrip);
scene.add(controller2.controllers);
scene.add(controller2.controllerGrip);
controls = new Controls();
action = new Action();
// loop.tick();
}
start() {
loop.start();
}
stop() {
loop.stop();
}
createRenderer() {
const renderer = new THREE.WebGLRenderer({ antialias: true });
renderer.setPixelRatio( window.devicePixelRatio );
renderer.physicallyCorrectLights = true;
effect = new THREE.StereoEffect( renderer );
effect.setSize( window.innerWidth, window.innerHeight );
return renderer;
}
toggleVR() {
if(DeviceMotionEvent && typeof DeviceMotionEvent.requestPermission === "function") {
DeviceMotionEvent.requestPermission();
}
if (fakeVR) {
fakeVR = false;
document.getElementById('VROverlay').style.display = 'none';
window.removeEventListener("deviceorientation", this.handleOrientation);
} else {
fakeVR = true;
window.addEventListener("deviceorientation", this.handleOrientation);
document.getElementById('VROverlay').style.display = 'block';
}
loop.stop();
loop.start();
}
handleMotion(event) {
}
handleOrientation(event) {
if (window.screen.orientation) {
screenOrientation = window.screen.orientation.angle;
} else if (typeof window.orientation === "number") {
screenOrientation = window.orientation;
} else if (window.screen.mozOrientationn) {
screenOrientation = {
"portrait-primary": 0,
"portrait-secondary": 180,
"landscape-primary": 90,
"landscape-secondary": 270,
}[window.screen.mozOrientation];
}
var eyem = new THREE.Quaternion().setFromEuler(new THREE.Euler(-Math.PI / 2, 0, 0));
var d2r = Math.PI / 180;
orientation_a = event.alpha;
orientation_b = event.beta;
orientation_g = event.gamma;
var rotType = (screenOrientation === 0 || screenOrientation === 180) ? "YXZ" : "YZX";
if(rotType == "YZX")
{
if(orientation_g >= 0) {
screenOrientation = -90;
} else {
screenOrientation = 90;
orientation_a = orientation_a + 180;
}
}
var rotm = new THREE.Quaternion().setFromEuler(
new THREE.Euler(orientation_b * d2r, orientation_a * d2r, -orientation_g * d2r, rotType)
);
var devm = new THREE.Quaternion().setFromEuler(
new THREE.Euler(0, -screenOrientation * d2r, 0)
);
rotm.multiply(devm).multiply(eyem); //rot = (rot x dev) x eye
camera.quaternion.copy(rotm);
document.getElementById("Orientation_a1").innerHTML = orientation_a.toFixed(3);
document.getElementById("Orientation_b1").innerHTML = orientation_b.toFixed(3);
document.getElementById("Orientation_g1").innerHTML = orientation_g.toFixed(3);
document.getElementById("Orientation_o1").innerHTML = screenOrientation;
document.getElementById("Orientation_a2").innerHTML = orientation_a.toFixed(3);
document.getElementById("Orientation_b2").innerHTML = orientation_b.toFixed(3);
document.getElementById("Orientation_g2").innerHTML = orientation_g.toFixed(3);
document.getElementById("Orientation_o2").innerHTML = screenOrientation;
}
}
ul {
padding-inline-start: 15px;
}
li {
list-style-type: none;
overflow: hidden;
}
<script src="https://7vw5q6.csb.app/webxr.three.js"></script>
<div style="display: block; position: fixed; top: 10%; left: 10%; right: 60%; background: rgba(255,255,255,.65);">
<ul>
<li>X-axis (β): <span id="Orientation_b1">0</span><span>°</span></li>
<li>Y-axis (γ): <span id="Orientation_g1">0</span><span>°</span></li>
<li>Z-axis (α): <span id="Orientation_a1">0</span><span>°</span></li>
<li>Orientation: <span id="Orientation_o1">0</span><span>°</span></li>
</ul>
</div>
<div style="display: block; position: fixed; top: 10%; left: 60%; right: 10%; background: rgba(255,255,255,.65);">
<ul>
<li>X-axis (β): <span id="Orientation_b2">0</span><span>°</span></li>
<li>Y-axis (γ): <span id="Orientation_g2">0</span><span>°</span></li>
<li>Z-axis (α): <span id="Orientation_a2">0</span><span>°</span></li>
<li>Orientation: <span id="Orientation_o2">0</span><span>°</span></li>
</ul>
</div>
Related
I'm rewriting my question because stackoverflow thought my post was spam (because I included 6000+ lines of code). I'm trying to make a web app that tracks the user's face and puts a 3D object over the face like a "filter". The thing is, I don't want this app to have any external dependencies except 1 (at least for scripts/packages/modules/whatever). Therefore, I copied three.js minified (from https://cdn.jsdelivr.net/npm/three#0.120.1/build/three.min.js) inbetween in the HTML file, as well as the GLTFLoader.js script from the GitHub repository in examples/jsm/loaders/.
I started with Jeeliz's face filter repository, and I'm trying to implement GLTFLoader.js, but when I use
const loader = new GLTFLoader();
it gives me a GLTFLoader is not defined error, as well as console messages that ES6 module was not imported.
When I use
const loader = new THREE.GLTFLoader();
it says it's not a constructor, so I lean towards the former being the correct way to construct the loader.
I appreciate any help in advance! I mostly code in Python or C++ and I'm still a beginner, but I've tinkered with JavaScript a few times so I thought I could handle this. I posted this question once but stackoverflow crashed after saying the post is spam, so I'm including a link to the html file:
https://www.dropbox.com/s/p86xchlldr7j2vf/index-mine.html?dl=0
Code:
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta http-equiv="content-language" content="en-EN" />
<title>Filter Web App</title>
<!-- INCLUDE FACEFILTER SCRIPT -->
<script src=‘https://raw.githubusercontent.com/jeeliz/jeelizFaceFilter/master/dist/jeelizFaceFilter.js’>
</script>
<!-- INCLUDE THREE.JS -->
<script src=‘https://cdn.jsdelivr.net/npm/three#0.120.1/build/three.min.js’>
</script>
<!-- INCLUDE RESIZER -->
<script src=‘https://raw.githubusercontent.com/jeeliz/jeelizFaceFilter/master/helpers/JeelizResizer.js’>
</script>
<!-- INCLUDE GLTFLOADER.JS -->
<script src=‘https://raw.githubusercontent.com/mrdoob/three.js/dev/examples/jsm/loaders/GLTFLoader.js’>
</script>
<!-- INCLUDE JEELIZ THREE.JS HELPER -->
<script>
/*
Helper for Three.js
*/
const punkThreeHelper = (function(){
// internal settings:
const _settings = {
rotationOffsetX: 0.0, // negative -> look upper. in radians
pivotOffsetYZ: [0.2, 0.6],// YZ of the distance between the center of the cube and the pivot
detectionThreshold: 0.8, // sensibility, between 0 and 1. Less -> more sensitive
detectionHysteresis: 0.02,
//tweakMoveYRotateX: 0,//0.5, // tweak value: move detection window along Y axis when rotate the face around X (look up <-> down)
cameraMinVideoDimFov: 35 // Field of View for the smallest dimension of the video in degrees
};
// private vars:
let _threeRenderer = null,
_threeScene = null,
_threeVideoMesh = null,
_threeVideoTexture = null,
_threeTranslation = null;
let _maxFaces = -1,
_isMultiFaces = false,
_detectCallback = null,
_isVideoTextureReady = false,
_isSeparateThreeCanvas = false,
_faceFilterCv = null,
_videoElement = null,
_isDetected = false,
_scaleW = 1,
_canvasAspectRatio = -1;
const _threeCompositeObjects = [];
let _gl = null,
_glVideoTexture = null,
_glShpCopyCut = null,
_glShpCopyCutVideoMatUniformPointer = null;
let _videoTransformMat2 = null;
// private funcs:
function destroy(){
_isVideoTextureReady = false;
_threeCompositeObjects.splice(0);
if (_threeVideoTexture){
_threeVideoTexture.dispose();
_threeVideoTexture = null;
}
}
function create_threeCompositeObjects(){
for (let i=0; i<_maxFaces; ++i){
// COMPOSITE OBJECT WHICH WILL TRACK A DETECTED FACE
const threeCompositeObject = new THREE.Object3D();
threeCompositeObject.frustumCulled = false;
threeCompositeObject.visible = false;
_threeCompositeObjects.push(threeCompositeObject);
_threeScene.add(threeCompositeObject);
}
}
function create_videoScreen(){
const videoScreenVertexShaderSource = "attribute vec2 position;\n\
uniform mat2 videoTransformMat2;\n\
varying vec2 vUV;\n\
void main(void){\n\
gl_Position = vec4(position, 0., 1.);\n\
vUV = 0.5 + videoTransformMat2 * position;\n\
}";
const videoScreenFragmentShaderSource = "precision lowp float;\n\
uniform sampler2D samplerVideo;\n\
varying vec2 vUV;\n\
void main(void){\n\
gl_FragColor = texture2D(samplerVideo, vUV);\n\
}";
if (_isSeparateThreeCanvas){
const compile_shader = function(source, type, typeString) {
const glShader = _gl.createShader(type);
_gl.shaderSource(glShader, source);
_gl.compileShader(glShader);
if (!_gl.getShaderParameter(glShader, _gl.COMPILE_STATUS)) {
alert("ERROR IN " + typeString + " SHADER: " + _gl.getShaderInfoLog(glShader));
return null;
}
return glShader;
};
const glShaderVertex = compile_shader(videoScreenVertexShaderSource, _gl.VERTEX_SHADER, 'VERTEX');
const glShaderFragment = compile_shader(videoScreenFragmentShaderSource, _gl.FRAGMENT_SHADER, 'FRAGMENT');
_glShpCopyCut = _gl.createProgram();
_gl.attachShader(_glShpCopyCut, glShaderVertex);
_gl.attachShader(_glShpCopyCut, glShaderFragment);
_gl.linkProgram(_glShpCopyCut);
const samplerVideo = _gl.getUniformLocation(_glShpCopyCut, 'samplerVideo');
_glShpCopyCutVideoMatUniformPointer = _gl.getUniformLocation(_glShpCopyCut, 'videoTransformMat2');
return;
}
// init video texture with red:
_threeVideoTexture = new THREE.DataTexture( new Uint8Array([255,0,0]), 1, 1, THREE.RGBFormat);
_threeVideoTexture.needsUpdate = true;
// CREATE THE VIDEO BACKGROUND:
const videoMaterial = new THREE.RawShaderMaterial({
depthWrite: false,
depthTest: false,
vertexShader: videoScreenVertexShaderSource,
fragmentShader: videoScreenFragmentShaderSource,
uniforms:{
samplerVideo: {value: _threeVideoTexture},
videoTransformMat2: {
value: _videoTransformMat2
}
}
});
const videoGeometry = new THREE.BufferGeometry()
const videoScreenCorners = new Float32Array([-1,-1, 1,-1, 1,1, -1,1]);
// handle both new and old THREE.js versions:
const setVideoGeomAttribute = (videoGeometry.setAttribute || videoGeometry.addAttribute).bind(videoGeometry);
setVideoGeomAttribute( 'position', new THREE.BufferAttribute( videoScreenCorners, 2 ) );
videoGeometry.setIndex(new THREE.BufferAttribute(new Uint16Array([0,1,2, 0,2,3]), 1));
_threeVideoMesh = new THREE.Mesh(videoGeometry, videoMaterial);
that.apply_videoTexture(_threeVideoMesh);
_threeVideoMesh.renderOrder = -1000; // render first
_threeVideoMesh.frustumCulled = false;
_threeScene.add(_threeVideoMesh);
} //end create_videoScreen()
function detect(detectState){
_threeCompositeObjects.forEach(function(threeCompositeObject, i){
_isDetected = threeCompositeObject.visible;
const ds = detectState[i];
if (_isDetected && ds.detected < _settings.detectionThreshold-_settings.detectionHysteresis){
// DETECTION LOST
if (_detectCallback) _detectCallback(i, false);
threeCompositeObject.visible = false;
} else if (!_isDetected && ds.detected > _settings.detectionThreshold+_settings.detectionHysteresis){
// FACE DETECTED
if (_detectCallback) _detectCallback(i, true);
threeCompositeObject.visible = true;
}
}); //end loop on all detection slots
}
function update_poses(ds, threeCamera){
// tan( <horizontal FoV> / 2 ):
const halfTanFOVX = Math.tan(threeCamera.aspect * threeCamera.fov * Math.PI/360); //tan(<horizontal FoV>/2), in radians (threeCamera.fov is vertical FoV)
_threeCompositeObjects.forEach(function(threeCompositeObject, i){
if (!threeCompositeObject.visible) return;
const detectState = ds[i];
// tweak Y position depending on rx:
//const tweak = _settings.tweakMoveYRotateX * Math.tan(detectState.rx);
const cz = Math.cos(detectState.rz), sz = Math.sin(detectState.rz);
// relative width of the detection window (1-> whole width of the detection window):
const W = detectState.s * _scaleW;
// distance between the front face of the cube and the camera:
const DFront = 1 / ( 2 * W * halfTanFOVX );
// D is the distance between the center of the unit cube and the camera:
const D = DFront + 0.5;
// coords in 2D of the center of the detection window in the viewport:
const xv = detectState.x * _scaleW;
const yv = detectState.y * _scaleW;
// coords in 3D of the center of the cube (in the view coordinates system):
const z = -D; // minus because view coordinate system Z goes backward
const x = xv * D * halfTanFOVX;
const y = yv * D * halfTanFOVX / _canvasAspectRatio;
// set position before pivot:
threeCompositeObject.position.set(-sz*_settings.pivotOffsetYZ[0], -cz*_settings.pivotOffsetYZ[0], -_settings.pivotOffsetYZ[1]);
// set rotation and apply it to position:
threeCompositeObject.rotation.set(detectState.rx+_settings.rotationOffsetX, detectState.ry, detectState.rz, "ZYX");
threeCompositeObject.position.applyEuler(threeCompositeObject.rotation);
// add translation part:
_threeTranslation.set(x, y+_settings.pivotOffsetYZ[0], z+_settings.pivotOffsetYZ[1]);
threeCompositeObject.position.add(_threeTranslation);
}); //end loop on composite objects
}
//public methods:
const that = {
// launched with the same spec object than callbackReady. set spec.threeCanvasId to the ID of the threeCanvas to be in 2 canvas mode:
init: function(spec, detectCallback){
destroy();
_maxFaces = spec.maxFacesDetected;
_glVideoTexture = spec.videoTexture;
_videoTransformMat2 = spec.videoTransformMat2;
_gl = spec.GL;
_faceFilterCv = spec.canvasElement;
_isMultiFaces = (_maxFaces>1);
_videoElement = spec.videoElement;
// enable 2 canvas mode if necessary:
let threeCanvas = null;
if (spec.threeCanvasId){
_isSeparateThreeCanvas = true;
// adjust the threejs canvas size to the threejs canvas:
threeCanvas = document.getElementById(spec.threeCanvasId);
threeCanvas.setAttribute('width', _faceFilterCv.width);
threeCanvas.setAttribute('height', _faceFilterCv.height);
} else {
threeCanvas = _faceFilterCv;
}
if (typeof(detectCallback) !== 'undefined'){
_detectCallback = detectCallback;
}
// init THREE.JS context:
_threeRenderer = new THREE.WebGLRenderer({
context: (_isSeparateThreeCanvas) ? null : _gl,
canvas: threeCanvas,
alpha: (_isSeparateThreeCanvas || spec.alpha) ? true : false,
preserveDrawingBuffer: true // to make image capture possible
});
_threeScene = new THREE.Scene();
_threeTranslation = new THREE.Vector3();
create_threeCompositeObjects();
create_videoScreen();
// handle device orientation change:
window.addEventListener('orientationchange', function(){
setTimeout(punkfacefilter.resize, 1000);
}, false);
const returnedDict = {
videoMesh: _threeVideoMesh,
renderer: _threeRenderer,
scene: _threeScene
};
if (_isMultiFaces){
returnedDict.faceObjects = _threeCompositeObjects
} else {
returnedDict.faceObject = _threeCompositeObjects[0];
}
return returnedDict;
}, //end that.init()
detect: function(detectState){
const ds = (_isMultiFaces) ? detectState : [detectState];
// update detection states:
detect(ds);
},
get_isDetected: function() {
return _isDetected;
},
render: function(detectState, threeCamera){
const ds = (_isMultiFaces) ? detectState : [detectState];
// update detection states then poses:
detect(ds);
update_poses(ds, threeCamera);
if (_isSeparateThreeCanvas){
// render the video texture on the faceFilter canvas:
_gl.viewport(0, 0, _faceFilterCv.width, _faceFilterCv.height);
_gl.useProgram(_glShpCopyCut);
_gl.uniformMatrix2fv(_glShpCopyCutVideoMatUniformPointer, false, _videoTransformMat2);
_gl.activeTexture(_gl.TEXTURE0);
_gl.bindTexture(_gl.TEXTURE_2D, _glVideoTexture);
_gl.drawElements(_gl.TRIANGLES, 3, _gl.UNSIGNED_SHORT, 0);
} else {
// reinitialize the state of THREE.JS because JEEFACEFILTER have changed stuffs:
// -> can be VERY costly !
_threeRenderer.state.reset();
}
// trigger the render of the THREE.JS SCENE:
_threeRenderer.render(_threeScene, threeCamera);
},
sortFaces: function(bufferGeometry, axis, isInv){ // sort faces long an axis
// Useful when a bufferGeometry has alpha: we should render the last faces first
const axisOffset = {X:0, Y:1, Z:2}[axis.toUpperCase()];
const sortWay = (isInv) ? -1 : 1;
// fill the faces array:
const nFaces = bufferGeometry.index.count/3;
const faces = new Array(nFaces);
for (let i=0; i<nFaces; ++i){
faces[i] = [bufferGeometry.index.array[3*i], bufferGeometry.index.array[3*i+1], bufferGeometry.index.array[3*i+2]];
}
// compute centroids:
const aPos = bufferGeometry.attributes.position.array;
const centroids = faces.map(function(face, faceIndex){
return [
(aPos[3*face[0]]+aPos[3*face[1]]+aPos[3*face[2]])/3, // X
(aPos[3*face[0]+1]+aPos[3*face[1]+1]+aPos[3*face[2]+1])/3, // Y
(aPos[3*face[0]+2]+aPos[3*face[1]+2]+aPos[3*face[2]+2])/3, // Z
face
];
});
// sort centroids:
centroids.sort(function(ca, cb){
return (ca[axisOffset]-cb[axisOffset]) * sortWay;
});
// reorder bufferGeometry faces:
centroids.forEach(function(centroid, centroidIndex){
const face = centroid[3];
bufferGeometry.index.array[3*centroidIndex] = face[0];
bufferGeometry.index.array[3*centroidIndex+1] = face[1];
bufferGeometry.index.array[3*centroidIndex+2] = face[2];
});
}, //end sortFaces
get_threeVideoTexture: function(){
return _threeVideoTexture;
},
apply_videoTexture: function(threeMesh){
if (_isVideoTextureReady){
return;
}
threeMesh.onAfterRender = function(){
// Replace _threeVideoTexture.__webglTexture by the real video texture:
try {
_threeRenderer.properties.update(_threeVideoTexture, '__webglTexture', _glVideoTexture);
_threeVideoTexture.magFilter = THREE.LinearFilter;
_threeVideoTexture.minFilter = THREE.LinearFilter;
_isVideoTextureReady = true;
} catch(e){
console.log('WARNING in punkThreeHelper: the glVideoTexture is not fully initialized');
}
delete(threeMesh.onAfterRender);
};
},
// create an occluder, IE a transparent object which writes on the depth buffer:
create_threejsOccluder: function(occluderURL, callback){
const occluderMesh = new THREE.Mesh();
new THREE.BufferGeometryLoader().load(occluderURL, function(occluderGeometry){
const mat = new THREE.ShaderMaterial({
vertexShader: THREE.ShaderLib.basic.vertexShader,
fragmentShader: "precision lowp float;\n void main(void){\n gl_FragColor=vec4(1.,0.,0.,1.);\n }",
uniforms: THREE.ShaderLib.basic.uniforms,
colorWrite: false
});
occluderMesh.renderOrder = -1; //render first
occluderMesh.material = mat;
occluderMesh.geometry = occluderGeometry;
if (typeof(callback)!=='undefined' && callback) callback(occluderMesh);
});
return occluderMesh;
},
set_pivotOffsetYZ: function(pivotOffset) {
_settings.pivotOffsetYZ = pivotOffset;
},
create_camera: function(zNear, zFar){
const threeCamera = new THREE.PerspectiveCamera(1, 1, (zNear) ? zNear : 0.1, (zFar) ? zFar : 100);
that.update_camera(threeCamera);
return threeCamera;
},
update_camera: function(threeCamera){
// compute aspectRatio:
const canvasElement = _threeRenderer.domElement;
const cvw = canvasElement.width;
const cvh = canvasElement.height;
_canvasAspectRatio = cvw / cvh;
// compute vertical field of view:
const vw = _videoElement.videoWidth;
const vh = _videoElement.videoHeight;
const videoAspectRatio = vw / vh;
const fovFactor = (vh > vw) ? (1.0 / videoAspectRatio) : 1.0;
const fov = _settings.cameraMinVideoDimFov * fovFactor;
console.log('INFO in punkThreeHelper - update_camera(): Estimated vertical video FoV is', fov);
// compute X and Y offsets in pixels:
let scale = 1.0;
if (_canvasAspectRatio > videoAspectRatio) {
// the canvas is more in landscape format than the video, so we crop top and bottom margins:
scale = cvw / vw;
} else {
// the canvas is more in portrait format than the video, so we crop right and left margins:
scale = cvh / vh;
}
const cvws = vw * scale, cvhs = vh * scale;
const offsetX = (cvws - cvw) / 2.0;
const offsetY = (cvhs - cvh) / 2.0;
_scaleW = cvw / cvws;
// apply parameters:
threeCamera.aspect = _canvasAspectRatio;
threeCamera.fov = fov;
console.log('INFO in punkThreeHelper.update_camera(): camera vertical estimated FoV is', fov, 'deg');
threeCamera.setViewOffset(cvws, cvhs, offsetX, offsetY, cvw, cvh);
threeCamera.updateProjectionMatrix();
// update drawing area:
_threeRenderer.setSize(cvw, cvh, false);
_threeRenderer.setViewport(0, 0, cvw, cvh);
}, //end update_camera()
resize: function(w, h, threeCamera){
_threeRenderer.domElement.width = w;
_threeRenderer.domElement.height = h;
punkfacefilter.resize();
if (threeCamera){
that.update_camera(threeCamera);
}
}
}
return that;
})();
// Export ES6 module:
try {
module.exports = punkThreeHelper;
} catch(e){
console.log('punkThreeHelper ES6 Module not exported');
window.punkThreeHelper = punkThreeHelper;
}
</script>
<!-- INCLUDE DEMO SCRIPT -->
<script>
let THREECAMERA = null;
// callback: launched if a face is detected or lost.
function detect_callback(faceIndex, isDetected) {
if (isDetected) {
console.log('INFO in detect_callback(): DETECTED');
} else {
console.log('INFO in detect_callback(): LOST');
}
}
// build the 3D. called once when punk Face Filter is OK
function init_threeScene(spec) {
const threeStuffs = punkThreeHelper.init(spec, detect_callback);
// CREATE A CUBE
const loader = new GLTFLoader();
loader.load( '/your-glb-file2.glb', function ( gltf ) {
threeStuffs.faceObject.add( gltf.scene );
} );
//CREATE THE CAMERA
THREECAMERA = punkThreeHelper.create_camera();
}
// entry point:
function main(){
punkResizer.size_canvas({
canvasId: 'jeeFaceFilterCanvas',
callback: function(isError, bestVideoSettings){
init_faceFilter(bestVideoSettings);
}
})
}
function init_faceFilter(videoSettings){
punkfacefilter.init({
followZRot: true,
canvasId: 'jeeFaceFilterCanvas',
NNCPath: '/', // root of NN_DEFAULT.json file
maxFacesDetected: 1,
callbackReady: function(errCode, spec){
if (errCode){
console.log('AN ERROR HAPPENS. ERR =', errCode);
return;
}
console.log('INFO: punkfacefilter IS READY');
init_threeScene(spec);
},
// called at each render iteration (drawing loop):
callbackTrack: function(detectState){
punkThreeHelper.render(detectState, THREECAMERA);
}
}); //end punkfacefilter.init call
}
window.addEventListener('load', main);
</script>
<style>
a {color: #eee; text-decoration: none}
a:hover {color: blue;}
body {overflow: auto; overflow-y: auto;
background-color: white;
background-attachment: fixed;
background-position: center;
background-size: contain;
margin: 0px;}
#jeeFaceFilterCanvas {
z-index: 10;
position: absolute;
max-height: 100%;
max-width: 100%;
left: 50%;
top: 50%;
width: 100vmin;
transform: translate(-50%, -50%) rotateY(180deg);
}
#threeCanvas{
z-index: 11;
position: absolute;
max-height: 100%;
max-width: 100%;
left: 50%;
top: 50%;
width: 100vmin;
transform: translate(-50%, -50%) rotateY(180deg);
}
#media (max-width: 787px) {
#jeeFaceFilterCanvas {
right: 0px;
top: 60px;
transform: rotateY(180deg);
}
}
</style>
</head>
<body>
<canvas width="600" height="600" id='jeeFaceFilterCanvas'></canvas>
</body>
</html>
/your-glb-file2.glb is a correct 3D file in the same directory as this. I made this code from the html file I linked, but src’d the script from URLs, unlike the real html file I linked.
The
// CREATE A CUBE
const loader = new GLTFLoader();
loader.load( '/your-glb-file2.glb', function ( gltf ) {
threeStuffs.faceObject.add( gltf.scene );
} );
near the end is the problem. The app correctly puts a cube over my face like I want it to when it’s
// CREATE A CUBE
const cubeGeometry = new THREE.BoxGeometry(1,1,1);
const cubeMaterial = new THREE.MeshNormalMaterial();
const threeCube = new THREE.Mesh(cubeGeometry, cubeMaterial);
threeCube.frustumCulled = false;
threeStuffs.faceObject.add(threeCube);
I tried constructing the loader as a "sub"function of three.js and not as one, but it isn't working either way, when it worked with a cube over my face and the cube 3D object loaded with three.js' native functions.
Using the WebGL API, is there a way to count the number of vertices rendered within a given canvas? I've seen some tools that attempt to accomplish this task but some are giving strange results (e.g. Three.js' renderer.info.render is reporting my scene has 10,134.3 triangles).
Any help with using the raw WebGL API to count the number of rendered vertices (and, ideally, points and lines) would be greatly appreciated.
WebGL can't do this for you but you could can add your own augmentation.
The most obvious way is just to track your own usage. Instead of calling gl.drawXXX call functionThatTracksDrawingCountsXXX and track the values yourself.
You can also augment the WebGL context itself. Example
// copy this part into a file like `augmented-webgl.js`
// and include it in your page
(function() {
// NOTE: since WebGL constants are um, constant
// we could statically init this.
let primMap;
function addCount(ctx, type, count) {
const ctxInfo = ctx.info;
const primInfo = primMap[type];
ctxInfo.vertCount += count;
ctxInfo.primCount[primInfo.ndx] += primInfo.fn(count);
}
WebGLRenderingContext.prototype.drawArrays = (function(oldFn) {
return function(type, offset, count) {
addCount(this, type, count);
oldFn.call(this, type, offset, count);
};
}(WebGLRenderingContext.prototype.drawArrays));
WebGLRenderingContext.prototype.drawElements = (function(oldFn) {
return function(type, count, indexType, offset) {
addCount(this, type, count);
oldFn.call(this, type, count, indexType, offset);
};
}(WebGLRenderingContext.prototype.drawElements));
HTMLCanvasElement.prototype.getContext = (function(oldFn) {
return function(type, ...args) {
const ctx = oldFn.call(this, type, args);
if (ctx && type === "webgl") {
if (!primMap) {
primMap = {};
primMap[ctx.POINTS] = { ndx: 0, fn: count => count, };
primMap[ctx.LINE_LOOP] = { ndx: 1, fn: count => count, };
primMap[ctx.LINE_STRIP]= { ndx: 1, fn: count => count - 1, };
primMap[ctx.LINES] = { ndx: 1, fn: count => count / 2 | 0, };
primMap[ctx.TRIANGLE_STRIP] = { ndx: 2, fn: count => count - 2, };
primMap[ctx.TRIANGLE_FAN] = { ndx: 2, fn: count => count - 2, };
primMap[ctx.TRIANGLES] = { ndx: 2, fn: count => count / 3 | 0, };
};
ctx.info = {
vertCount: 0,
primCount: [0, 0, 0],
};
}
return ctx;
}
}(HTMLCanvasElement.prototype.getContext));
}());
// ---- cut above ----
const $ = document.querySelector.bind(document);
const scene = new THREE.Scene();
const camera = new THREE.PerspectiveCamera(75, 1, 0.1, 1000);
const renderer = new THREE.WebGLRenderer({canvas: $('canvas')});
const geometry = new THREE.BoxGeometry(1, 1, 1);
const items = [];
for (let i = 0; i < 50; ++i) {
let item;
switch(rand(0, 3) | 0) {
case 0:
case 1:
const material = new THREE.MeshBasicMaterial({
color: rand(0xFFFFFF) | 0,
wireframe: rand(0, 3) > 2,
});
item = new THREE.Mesh(geometry, material);
break;
case 2:
const pmat = new THREE.PointsMaterial({
color: rand(0xFFFFFF) | 0,
});
item = new THREE.Points(geometry, pmat);
break;
default:
throw "oops";
}
item.position.x = rand(-10, 10);
item.position.y = rand(-10, 10);
item.position.z = rand( 0, -50);
scene.add(item);
items.push(item);
}
camera.position.z = 5;
const countElem = $('#count');
function render(time) {
time *= 0.001;
resize();
// animate the items
items.forEach((items, ndx) => {
items.rotation.x = time * 1.2 + ndx * 0.01;
items.rotation.y = time * 1.1;
});
// turn on/off a random items
items[rand(items.length) | 0].visible = Math.random() > .5;
renderer.render(scene, camera);
// get the current counts
const info = renderer.context.info;
countElem.textContent = ` VERTS: ${info.vertCount}
POINTS: ${info.primCount[0]}
LINES: ${info.primCount[1]}
TRIANGLES: ${info.primCount[2]}`;
// zero out the count
renderer.context.info.vertCount = 0;
renderer.context.info.primCount = [0, 0, 0];
requestAnimationFrame(render);
}
requestAnimationFrame(render);
function rand(min, max) {
if (max === undefined) {
max = min;
min = 0;
}
return Math.random() * (max - min) + min;
}
function resize() {
const canvas = renderer.domElement;
const width = canvas.clientWidth;
const height = canvas.clientHeight;
if (canvas.width !== width || canvas.height !== height) {
renderer.setSize(width, height, false);
camera.aspectRatio = width / height;
camera.updateProjectionMatrix();
}
}
body { border: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
#ui { position: absolute; left: 1em; top: 1em; background: rgba(0,0,0,.5); color: white; padding: .5em; width: 10em; }
<canvas></canvas>
<div id="ui">
<pre id="count"></pre>
</div>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/92/three.min.js"></script>
Of course you might want to add support for drawArraysInstanced etc... and support for WebGL2.
We removed the amount of processed vertices from renderer.info.render since the important measurement is the amount or rendered primitives (so triangles, points, lines). Please read https://github.com/mrdoob/three.js/pull/13404 and the related issues/PRs for more information. If you still want to know how many vertices were processed, you need to count manually. WebGL can't do this for you.
I want to create a draggable / resizable / rotatable component in Ionic2.pan and pinch events are working great, but rotate has a strange behaviour: if I touch the component with two fingers, but without doing any kind of rotation, I will still get a rotation number around 15 to 30 deg, making the component rotate. I don't know if it is a known issue or something to do with the sensitivity of the screen. The code I am using for the component is this:
import { Component, ElementRef, Input, Renderer2 } from '#angular/core';
import { DomController, Gesture } from 'ionic-angular';
const defaultScale: number = 1;
const defaultRotation: number = 0;
#Component({
selector: 'draggable',
template: `
<ng-content></ng-content>
`
})
export class DraggableComponent {
#Input()
private position: {
x: number;
y: number;
};
#Input()
private dimensions: {
width: number;
height: number;
};
#Input()
private transform: {
scale: number;
rotation: number;
};
#Input()
protected container: any;
private gesture: Gesture;
private deltaCenter: {
x: number;
y: number;
} = null;
// when pinch + rotate, we will have very quick successive event when we release
private updating: boolean = false;
constructor(
private element: ElementRef,
private renderer: Renderer2,
private domCtrl: DomController
) {}
ngOnDestroy() {
this.gesture.destroy();
}
ngAfterViewInit() {
this.renderer.setStyle(this.element.nativeElement, 'position', 'absolute');
this.renderer.setStyle(this.element.nativeElement, 'transform-origin', 'center');
if (this.dimensions) {
if (this.dimensions.width) {
this.renderer.setStyle(this.element.nativeElement, 'width', this.dimensions.width + 'px');
}
if (this.dimensions.height) {
this.renderer.setStyle(this.element.nativeElement, 'height', this.dimensions.height + 'px');
}
}
if (!this.transform) {
this.transform = {
scale: 1,
rotation: 0
};
}
this.gesture = new Gesture(this.element.nativeElement);
this.gesture.listen();
this.gesture.on('pinch', this.handleGesture.bind(this));
this.gesture.on('rotate', this.handleGesture.bind(this));
this.gesture.on('panmove', this.handleGesture.bind(this));
this.gesture.on('pinchend panend rotateend', this.gestureEnd.bind(this));
this.updateStyles();
}
private handleGesture(event: {center: {y: number, x: number}, scale: number, rotation: number}) {
if (this.updating) {
return;
}
// even without doing any kind of rotation, using 2 fingers will set event.rotation between 15 to 30 degrees
if (!this.deltaCenter) {
this.deltaCenter = {
y: this.position.y - event.center.y,
x: this.position.x - event.center.x
};
}
this.position.y = event.center.y;
this.position.x = event.center.x;
this.updateStyles(event.scale, event.rotation);
}
private gestureEnd(event: {scale: number, rotation: number}) {
if (this.updating) {
return;
}
this.updating = true;
this.position.y += this.deltaCenter.y;
this.position.x += this.deltaCenter.x;
this.transform.scale = this.transform.scale * event.scale;
this.transform.rotation = this.transform.rotation + event.rotation;
this.deltaCenter = null;
this.updateStyles();
setTimeout(() => {
this.updating = false;
}, 100);
}
private get cntEle(): HTMLElement {
let cntEle: HTMLElement = null;
if (!this.container) {
return null;
}
else if (this.container instanceof Node) {
return this.container as HTMLElement;
}
else if (this.container.getNativeElement) {
return this.container.getNativeElement();
}
return null;
}
private get containerBoundingClientRect(): ClientRect {
if (this.cntEle) {
return this.cntEle.getBoundingClientRect();
}
else if (this.container && 'top' in this.container) {
return this.container as ClientRect;
}
// bound to whole document
return {
top: 0,
left: 0,
bottom: document.documentElement.clientHeight,
right: document.documentElement.clientWidth,
width: document.documentElement.clientWidth,
height: document.documentElement.clientHeight
};
}
private get x(): number {
let x = this.position.x;
if (this.deltaCenter) {
x += this.deltaCenter.x;
}
if (x < this.containerBoundingClientRect.left) {
return this.containerBoundingClientRect.left;
}
else if (x > (this.containerBoundingClientRect.right - this.dimensions.width)) {
return this.containerBoundingClientRect.right - this.dimensions.width;
}
return x
}
private get y(): number {
let y = this.position.y;
if (this.deltaCenter) {
y += this.deltaCenter.y;
}
if (y < this.containerBoundingClientRect.top) {
return this.containerBoundingClientRect.top;
}
if (y > (this.containerBoundingClientRect.bottom - this.dimensions.height)) {
return this.containerBoundingClientRect.bottom - this.dimensions.height;
}
return y;
}
private updateStyles(scale: number = 1, rotation: number = 0) {
this.domCtrl.write(() => {
this.renderer.setStyle(this.element.nativeElement, 'top', this.y + 'px');
this.renderer.setStyle(this.element.nativeElement, 'left', this.x + 'px');
let transforms = [];
transforms.push(`scale(${this.transform.scale * scale})`);
transforms.push(`rotateZ(${this.transform.rotation + rotation}deg)`);
this.renderer.setStyle(this.element.nativeElement, 'transform', transforms.join(' '));
});
}
}
In appcelerator TI code - I have a month scrollable view to which I have added week views and to which I have added days as views.
The expectation on click event of a day I should be able to retrieve properties of the date. However on singletap event I am getting reference to the week view and not able to get the child view "days view. how can I get a reference to the days view on single tap to click event?
Code -Widget.js
var args = arguments[0] || {};
var Moment = require('alloy/moment');
var ROWS = 6;
var COLUMNS = 7;
_.defaults(args, {
// Data
current_date: Moment(),
active_dates: [],
min_date: Moment().subtract(6, 'months'),
max_date: Moment().add(6, 'months'),
// Style
backgroundColor: 'transparent',
dateBackgroundColor: 'transparent',
todayBackgroundColor: '#af80',
dateTextColor: '#fff',
todayTextColor: '#000',
activePinColor: '#f39911',
inactivePinColor: 'transparent',
selectedBackgroundColor: '#60f39911',
fontFamily: '',
// Behaviour
allowInactiveSelection: false,
fillMonth: false,
enablePastDays: false
});
var active_dates = args.active_dates ? getMomentDates(args.active_dates) : [];
var current_page = 0;
/////////////
// Methods //
/////////////
function refreshArrows() {
$.leftBtn.opacity = current_page <= 0 ? 0.4 : 1;
$.rightBtn.opacity = current_page >= $.monthScroll.views.length - 1 ? 0.4 : 1;
}
function getDayLabels() {
var days = Moment.weekdaysMin();
days.push(days.shift()); // Moment week has Sunday at index 0
_.each(days, function(day, i) {
var width = Math.floor($.calendar.rect.width / COLUMNS);
var $label = $.UI.create('Label', {
classes: ['dayLabel'],
width: width,
text: day.charAt(0),
left: i * width,
font: {
fontFamily: args.fontFamily
}
});
$.dayLabels.add($label);
});
}
function getMomentDates(dates) {
return _.map(dates, function(date) {
return Moment(date);
});
}
function isInMomentsList(date, dates) {
return _.find(dates, function(day) {
return date.isSame(day, 'day');
});
}
function getDayContainer(number) {
var $this = $.UI.create('View', {
classes: ['day'],
width: Math.floor($.monthScroll.rect.width / COLUMNS),
height: Math.floor($.monthScroll.rect.height / ROWS),
backgroundColor: args.dateBackgroundColor,
opacity: 1,
date: null,
active: null,
});
$this.add($.UI.create('Label', {
classes: ['dayNumber'],
color: '#fff',
text: number,
font: {
fontFamily: args.fontFamily
}
}));
$this.add($.UI.create('View', {
classes: ['dayDot'],
backgroundColor: 'transparent'
}));
return $this;
}
function setItemDate($item, date) {
$item.date = date;
$item.children[0].text = date.date();
}
function setItemActive($item, active) {
$item.active = active;
$item.children[1].backgroundColor = active ? args.activePinColor : args.inactivePinColor;
}
function setItemToday($item, is_today) {
$item.backgroundColor = is_today ? args.todayBackgroundColor : args.dateBackgroundColor;
$item.children[0].color = is_today ? args.todayTextColor : args.dateTextColor;
}
function setItemCurrent($item, current) {
$item.opacity = current ? 1 : 0.5;
}
function getMonthView(month, year) {
var month_rows = [];
var start_date = Moment().month(month).year(year).startOf('month').startOf('week');
var end_date = Moment().month(month).year(year).endOf('month').endOf('week');
// Month skeleton
var $month_view = $.UI.create('View', {
classes: ['month'],
month: month,
year: year,
backgroundColor: args.backgroundColor,
ready: false
});
// Month activity indicator
var $loader = Ti.UI.createActivityIndicator({
style: OS_IOS ? Ti.UI.iPhone.ActivityIndicatorStyle.BIG : Ti.UI.ActivityIndicatorStyle.BIG,
center: {
x: '50%',
y: '50%'
}
});
$month_view.add($loader);
$month_view.__loader = $loader;
$loader.show();
return $month_view;
}
function buildMonth($month_view, dates) {
if (!$month_view || $month_view.ready) return;
var start_date = Moment().month($month_view.month).year($month_view.year).startOf('month').startOf('week');
var end_date = Moment().month($month_view.month).year($month_view.year).endOf('month').endOf('week');
var $days_container = Ti.UI.createView({
height: Ti.UI.FILL,
width: Ti.UI.FILL
});
// Separators
for (var i = 0; i < ROWS; i++) {
$days_container.add($.UI.create('View', {
classes: ['hr'],
top: (i+1) * Math.floor($.monthScroll.rect.height / ROWS)
}));
}
// Add day containers
for (var d = 0; d < ROWS*COLUMNS; d++) {
var curday = Moment(start_date).add(d, 'days');
// If fillMonth is disabled, add only this month's days
if (curday.month() === $month_view.month || args.fillMonth == true) {
var $curview = getDayContainer(curday.date());
var row = Math.floor(d/COLUMNS);
var col = d % COLUMNS;
setItemDate($curview, curday);
setItemActive($curview, isInMomentsList(curday, dates));
setItemCurrent($curview, !curday.isBefore(Moment(), 'day') || (args.enablePastDays == true && (curday.month() === $month_view.month)));
setItemToday($curview, curday.isSame(Moment(), 'day'));
$curview.top = row * ($curview.height);
$curview.left = col * ($curview.width);
$days_container.add($curview);
}
}
$month_view.add($days_container);
$month_view.ready = true;
$month_view.__loader.hide();
}
function buildCalendar() {
$.main.removeEventListener('postlayout', buildCalendar);
// Add top labels
getDayLabels();
// Create the calendar views
var curmonth_index = -1; var i = 0;
for (var m = Moment(args.min_date); m.diff(Moment(args.max_date)) <= 0; m.add(1, 'months')) {
if (m.isSame(Moment(), 'month')) curmonth_index = i;
var monthview = getMonthView(m.month(), m.year());
$.monthScroll.addView(monthview);
i++;
}
$.monthScroll.currentPage = current_page = curmonth_index > 0 ? curmonth_index : 0;
refreshCalendarMonth(current_page);
refreshArrows();
}
function refreshCalendarMonth(m) {
var month_date = Moment().month($.monthScroll.views[m].month).year($.monthScroll.views[m].year);
$.monthName.text = month_date.format('MMMM').toUpperCase();
$.monthYear.text = month_date.format('YYYY');
buildMonth($.monthScroll.views[m], args.active_dates);
if (current_page - 1 > -1) buildMonth($.monthScroll.views[m-1], args.active_dates);
if (current_page + 1 < 12) buildMonth($.monthScroll.views[m+1], args.active_dates);
}
///////////////
// Listeners //
///////////////
$.main.addEventListener('postlayout', buildCalendar);
$.monthScroll.addEventListener('scroll', function(e) {
if (e.currentPage === current_page) return;
current_page = e.currentPage;
refreshArrows();
refreshCalendarMonth(current_page);
});
$.monthScroll.addEventListener('click', function(e) {
if (!e.source.date || (!e.source.active && !args.allowInactiveSelection) || (args.enablePastDays == false && e.source.date.isBefore(Moment(), 'day'))) return;
e.source.animate({ backgroundColor: args.selectedBackgroundColor, duration: 150, autoreverse: true });
$.trigger('selected', {
date: e.source.date,
active: e.source.active
});
});
$.leftBtn.addEventListener('click', function() {
$.monthScroll.movePrevious();
});
$.rightBtn.addEventListener('click', function() {
$.monthScroll.moveNext();
});
//////////
// Init //
//////////
$.monthName.font = {
fontFamily: args.fontFamily,
fontWeight: 'bold'
};
$.monthYear.font = {
fontFamily: args.fontFamily,
fontWeight: 'light'
};
Widget.xml
<Alloy>
<Window backgroundColor="#110ee1" class="container" exitOnClose="true" id="widget" title="DailyRead" top="0">
<View id="main">
<View class="bar" id="header">
<View class="hr" top="0"/>
<View class="ctrlBtn" id="leftBtn">
<ImageView id="leftArrow"/>
</View>
<View class="headerText">
<Label id="monthName"/>
<Label id="monthYear"/>
</View>
<View class="ctrlBtn" id="rightBtn">
<ImageView id="rightArrow"/>
</View>
<View bottom="0" class="hr"/>
</View>
<View class="sp1/2"/>
<View id="calendar">
<View id="dayLabels"/>
<View backgroundColor="#fff" class="hr" height="2"/>
<ScrollableView id="monthScroll"/>
</View>
</View>
</Window>
</Alloy>
As far as I understood the question I would add more information to the day view. E.g. if you create everything through a loop just add month, weeks to the day view as a property. Then set all views but the day view to touchEnabled:false and just add the click event to the day view. Then you can read event.source.day/event.source.week/event.source.month inside the click-event.
If that doesn't help please add some example code to your question.
In my project, I know have 3 discreet layers.
Background
Midground
Foreground
Each of these layers could potentially have vastly different unit scales and potentially overlapping geometry. But I don't want them to render as if they occupy the same "space". I want them to render in their logical layer order, not their order in 3D space.
For example, lets say the Background is a narrow tube, 10 units in radius, that I put over the camera to achieve a tunnel like effect. I then want to put a large cube as the Foreground, 100 units on each side, in the scene far from the camera.
In this scenario, the cube and the tunnel intersect and obscure each other. I'm looking for a way to render the entire tunnel, and then render an entire cube, and then put that rendered cube on top of the rendered tunnel. And I want any alpha transparency in textures/shaders in that cube to be cleanly composite, showing the rendered tunnel behind transparent pixels.
So:
Am I describing a technique or feature that exists? If so, what's that called?
Can WebGL do this?
Can three.js do this?
Will this cause any massive performance drops over rendering a whole frame in one go?
How would structure my rendering in three.js to set this up?
Still new the *GL graphics programming, so sorry if my vocabulary isn't accurate. Pedantic vocabulary corrections are appreciated, as it will help me google!
I think I managed this one, after some reverse engineering of this example:
http://mrdoob.github.com/three.js/examples/webgl_rtt.html
Basically
renderer.autoClear = false;
Then instead of rendering one scene like this:
render: function() {
renderer.render(scene, camera);
}
I manually clear before rendering multiple scenes:
render: function() {
renderer.clear()
renderer.render(background, camera);
renderer.render(midground, camera);
renderer.render(foreground, camera);
}
Still not totally sure of performance implications, however.
First off you could render the 3 different canvases and just set their z-index and position so they overlap and the browser will composite them.
If you want to do it all in 1 canvas then basically you just clear the depth buffer after drawing some stuff.
drawStuffInBack();
// clear the depth (and stencil buffers)
gl.clear(gl.DEPTH_BUFFER_BIT | gl.STENCIL_BUFFER_BIT);
drawStuffInMiddle();
// clear the depth (and stencil buffers)
gl.clear(gl.DEPTH_BUFFER_BIT | gl.STENCIL_BUFFER_BIT);
drawStuffInFront();
The following is a working example that has a VisualLayers class for managing any number of layers, and it uses the renderer.autoClear = false technique and clearing-depth technique as Alex Waynes and gman's answers hinted at.
This approach is nice because renderOrder of objects is not being modified (that's another approach) and thus will not introduce other issues.
Try playing with the options in the UI to see what it does:
// #ts-check
////////////////////////
// LAYER SYSTEM
////////////////////////
/** #typedef {{name: string, backingScene: THREE.Scene, order: number}} Layer */
class VisualLayers {
/**
* #type {Array<Layer>}
* #private
*/
__layers = [];
constructor(
/** #private #type {THREE.WebGLRenderer} */ __renderer,
/** #private #type {typeof THREE.Scene} */ __Scene = THREE.Scene
) {
this.__renderer = __renderer;
this.__Scene = __Scene;
}
defineLayer(/** #type {string} */ name, /** #type {number=} */ order = 0) {
const layer = this.__getLayer(name);
// The default layer always has order 0.
const previousOrder = layer.order;
layer.order = name === "default" ? 0 : order;
// Sort only if order changed.
if (previousOrder !== layer.order)
this.__layers.sort((a, b) => a.order - b.order);
return layer;
}
/**
* Get a layer by name (if it doesn't exist, creates it with default order 0).
* #private
*/
__getLayer(/** #type {string} */ name) {
let layer = this.__layers.find((l) => l.name === name);
if (!layer) {
layer = { name, backingScene: new this.__Scene(), order: 0 };
layer.backingScene.autoUpdate = false;
this.__layers.push(layer);
}
return layer;
}
removeLayer(/** #type {string} */ name) {
const index = this.__layers.findIndex((l) => {
if (l.name === name) {
l.backingScene.children.length = 0;
return true;
}
return false;
});
if (index >= 0) this.__layers.splice(index, 1);
}
hasLayer(/** #type {string} */ name) {
return this.__layers.some((l) => l.name === name);
}
/** #readonly */
get layerCount() {
return this.__layers.length;
}
addObjectToLayer(
/** #type {THREE.Object3D} */ obj,
/** #type {string | string[]} */ layers
) {
if (Array.isArray(layers)) {
for (const name of layers) this.__addObjectToLayer(obj, name);
return;
}
this.__addObjectToLayer(obj, layers);
}
addObjectsToLayer(
/** #type {THREE.Object3D[]} */ objects,
/** #type {string | string[]} */ layers
) {
for (const obj of objects) {
this.addObjectToLayer(obj, layers);
}
}
/** #private #readonly */
__emptyArray = Object.freeze([]);
/** #private */
__addObjectToLayer(
/** #type {THREE.Object3D} */ obj,
/** #type {string} */ name
) {
const layer = this.__getLayer(name);
const proxy = Object.create(obj, {
children: { get: () => this.__emptyArray }
});
layer.backingScene.children.push(proxy);
}
removeObjectFromLayer(
/** #type {THREE.Object3D} */ obj,
/** #type {string | string[]} */ nameOrNames
) {
if (Array.isArray(nameOrNames)) {
for (const name of nameOrNames) {
const layer = this.__layers.find((l) => l.name === name);
if (!layer) continue;
this.__removeObjectFromLayer(obj, layer);
}
return;
}
const layer = this.__layers.find((l) => l.name === nameOrNames);
if (!layer) return;
this.__removeObjectFromLayer(obj, layer);
}
/** #private */
__removeObjectFromLayer(
/** #type {THREE.Object3D} */ obj,
/** #type {Layer} */ layer
) {
const children = layer.backingScene.children;
const index = children.findIndex(
(proxy) => /** #type {any} */ (proxy).__proto__ === obj
);
if (index >= 0) {
children[index] = children[children.length - 1];
children.pop();
}
}
removeObjectsFromAllLayers(/** #type {THREE.Object3D[]} */ objects) {
for (const layer of this.__layers) {
for (const obj of objects) {
this.__removeObjectFromLayer(obj, layer);
}
}
}
render(
/** #type {THREE.Camera} */ camera,
/** #type {(layerName: string) => void} */ beforeEach,
/** #type {(layerName: string) => void} */ afterEach
) {
for (const layer of this.__layers) {
beforeEach(layer.name);
this.__renderer.render(layer.backingScene, camera);
afterEach(layer.name);
}
}
}
//////////////////////
// VARS
//////////////////////
let camera, stats, geometry, material, object, object2, root;
let time = 0;
/** #type {THREE.Scene} */
let scene;
/** #type {THREE.WebGLRenderer} */
let renderer;
/** #type {VisualLayers} */
let visualLayers;
const clock = new THREE.Clock();
const greenColor = "#27ae60";
const options = {
useLayers: true,
showMiddleBox: true,
rotate: true,
layer2Order: 2
};
//////////////////////
// INIT
//////////////////////
~(function init() {
setup3D();
renderLoop();
})();
////////////////////////////////
// SETUP 3D
////////////////////////////////
function setup3D() {
const container = document.createElement("div");
container.id = "container";
document.body.appendChild(container);
// CAMERA
camera = new THREE.PerspectiveCamera(
70,
window.innerWidth / window.innerHeight,
1,
10000
);
camera.position.x = 0;
camera.position.z = 500;
camera.position.y = 0;
scene = new THREE.Scene();
// RENDERERS
renderer = new THREE.WebGLRenderer({ antialias: true, alpha: true });
renderer.setClearColor(0x111111);
container.appendChild(renderer.domElement);
// LAYERS
visualLayers = new VisualLayers(renderer);
// Layers don't have to be defined. Adding an object to a layer will
// automatically create the layer with order 0. But let's define layers with
// order values.
visualLayers.defineLayer("layer1", 1);
visualLayers.defineLayer("layer2", 2);
visualLayers.defineLayer("layer3", 3);
// LIGHTS
const directionalLight = new THREE.DirectionalLight(0xffffff, 0.5);
directionalLight.position.set(300, 0, 300);
scene.add(directionalLight);
visualLayers.addObjectToLayer(directionalLight, [
"layer1",
"layer2",
"layer3"
]);
const ambientLight = new THREE.AmbientLight(0xffffff, 0.4);
scene.add(ambientLight);
visualLayers.addObjectToLayer(ambientLight, ["layer1", "layer2", "layer3"]);
// GEOMETRY
root = new THREE.Object3D();
scene.add(root);
geometry = new THREE.BoxGeometry(100, 100, 100);
material = new THREE.MeshPhongMaterial({
color: greenColor,
transparent: false,
opacity: 1
});
object = new THREE.Mesh(geometry, material);
root.add(object);
visualLayers.addObjectToLayer(object, "layer1");
object.position.y = 80;
object.position.z = -20;
// object.rotation.y = -Math.PI / 5
object2 = new THREE.Mesh(geometry, material);
object.add(object2);
visualLayers.addObjectToLayer(object2, "layer2");
object2.position.y -= 80;
object2.position.z = -20;
object2.rotation.y = -Math.PI / 5;
const object3 = new THREE.Mesh(geometry, material);
object2.add(object3);
visualLayers.addObjectToLayer(object3, "layer3");
object3.position.y -= 80;
object3.position.z = -20;
object3.rotation.y = -Math.PI / 5;
// GUI
const pane = new Tweakpane({
title: "VisualLayers"
});
pane.addInput(options, "useLayers", { label: "use layers" });
pane.addInput(options, "showMiddleBox", { label: "show middle box" });
pane.addInput(options, "rotate");
pane
.addInput(options, "layer2Order", {
label: "layer2 order",
options: {
0: 0,
2: 2,
4: 4
}
})
.on("change", () => visualLayers.defineLayer("layer2", options.layer2Order));
// STATS
// SEE: https://github.com/mrdoob/stats.js
stats = new Stats();
stats.domElement.style.position = "absolute";
stats.domElement.style.left = "0px";
stats.domElement.style.top = "0px";
stats.setMode(0);
document.body.appendChild(stats.domElement);
}
//////////////////////
// RESIZE
//////////////////////
(window.onresize = function (event) {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
})();
//////////////////////
// RAF RENDER LOOP
//////////////////////
function render() {
stats.begin();
if (options.rotate) {
time += clock.getDelta();
object.rotation.y += 0.02;
root.rotation.y = Math.PI / 2 + (Math.PI / 6) * Math.sin(time * 0.001);
}
object2.visible = options.showMiddleBox;
if (options.useLayers) {
scene.updateWorldMatrix(true, true);
renderer.autoClear = false;
renderer.clear();
visualLayers.render(camera, beforeEachLayerRender, afterEachLayerRender);
} else {
renderer.autoClear = true;
renderer.render(scene, camera);
}
stats.end();
}
function renderLoop() {
render();
requestAnimationFrame(renderLoop);
}
function beforeEachLayerRender(layer) {}
function afterEachLayerRender(layer) {
renderer.clearDepth();
}
html,
body,
#container {
margin: 0px;
padding: 0px;
width: 100%;
height: 100%;
}
canvas {
background: transparent;
display: block;
width: 100%;
height: 100%;
position: absolute;
left: 0;
top: 0;
}
<script src="https://cdn.jsdelivr.net/npm/tweakpane#1.5.5/dist/tweakpane.min.js"></script>
<script src="//cdnjs.cloudflare.com/ajax/libs/stats.js/r11/Stats.min.js"></script>
<script src="//unpkg.com/three#0.121.1/build/three.min.js"></script>
<script src="//unpkg.com/postprocessing#6.17.4/build/postprocessing.js"></script>
(example on codepen)