I want to create a draggable / resizable / rotatable component in Ionic2.pan and pinch events are working great, but rotate has a strange behaviour: if I touch the component with two fingers, but without doing any kind of rotation, I will still get a rotation number around 15 to 30 deg, making the component rotate. I don't know if it is a known issue or something to do with the sensitivity of the screen. The code I am using for the component is this:
import { Component, ElementRef, Input, Renderer2 } from '#angular/core';
import { DomController, Gesture } from 'ionic-angular';
const defaultScale: number = 1;
const defaultRotation: number = 0;
#Component({
selector: 'draggable',
template: `
<ng-content></ng-content>
`
})
export class DraggableComponent {
#Input()
private position: {
x: number;
y: number;
};
#Input()
private dimensions: {
width: number;
height: number;
};
#Input()
private transform: {
scale: number;
rotation: number;
};
#Input()
protected container: any;
private gesture: Gesture;
private deltaCenter: {
x: number;
y: number;
} = null;
// when pinch + rotate, we will have very quick successive event when we release
private updating: boolean = false;
constructor(
private element: ElementRef,
private renderer: Renderer2,
private domCtrl: DomController
) {}
ngOnDestroy() {
this.gesture.destroy();
}
ngAfterViewInit() {
this.renderer.setStyle(this.element.nativeElement, 'position', 'absolute');
this.renderer.setStyle(this.element.nativeElement, 'transform-origin', 'center');
if (this.dimensions) {
if (this.dimensions.width) {
this.renderer.setStyle(this.element.nativeElement, 'width', this.dimensions.width + 'px');
}
if (this.dimensions.height) {
this.renderer.setStyle(this.element.nativeElement, 'height', this.dimensions.height + 'px');
}
}
if (!this.transform) {
this.transform = {
scale: 1,
rotation: 0
};
}
this.gesture = new Gesture(this.element.nativeElement);
this.gesture.listen();
this.gesture.on('pinch', this.handleGesture.bind(this));
this.gesture.on('rotate', this.handleGesture.bind(this));
this.gesture.on('panmove', this.handleGesture.bind(this));
this.gesture.on('pinchend panend rotateend', this.gestureEnd.bind(this));
this.updateStyles();
}
private handleGesture(event: {center: {y: number, x: number}, scale: number, rotation: number}) {
if (this.updating) {
return;
}
// even without doing any kind of rotation, using 2 fingers will set event.rotation between 15 to 30 degrees
if (!this.deltaCenter) {
this.deltaCenter = {
y: this.position.y - event.center.y,
x: this.position.x - event.center.x
};
}
this.position.y = event.center.y;
this.position.x = event.center.x;
this.updateStyles(event.scale, event.rotation);
}
private gestureEnd(event: {scale: number, rotation: number}) {
if (this.updating) {
return;
}
this.updating = true;
this.position.y += this.deltaCenter.y;
this.position.x += this.deltaCenter.x;
this.transform.scale = this.transform.scale * event.scale;
this.transform.rotation = this.transform.rotation + event.rotation;
this.deltaCenter = null;
this.updateStyles();
setTimeout(() => {
this.updating = false;
}, 100);
}
private get cntEle(): HTMLElement {
let cntEle: HTMLElement = null;
if (!this.container) {
return null;
}
else if (this.container instanceof Node) {
return this.container as HTMLElement;
}
else if (this.container.getNativeElement) {
return this.container.getNativeElement();
}
return null;
}
private get containerBoundingClientRect(): ClientRect {
if (this.cntEle) {
return this.cntEle.getBoundingClientRect();
}
else if (this.container && 'top' in this.container) {
return this.container as ClientRect;
}
// bound to whole document
return {
top: 0,
left: 0,
bottom: document.documentElement.clientHeight,
right: document.documentElement.clientWidth,
width: document.documentElement.clientWidth,
height: document.documentElement.clientHeight
};
}
private get x(): number {
let x = this.position.x;
if (this.deltaCenter) {
x += this.deltaCenter.x;
}
if (x < this.containerBoundingClientRect.left) {
return this.containerBoundingClientRect.left;
}
else if (x > (this.containerBoundingClientRect.right - this.dimensions.width)) {
return this.containerBoundingClientRect.right - this.dimensions.width;
}
return x
}
private get y(): number {
let y = this.position.y;
if (this.deltaCenter) {
y += this.deltaCenter.y;
}
if (y < this.containerBoundingClientRect.top) {
return this.containerBoundingClientRect.top;
}
if (y > (this.containerBoundingClientRect.bottom - this.dimensions.height)) {
return this.containerBoundingClientRect.bottom - this.dimensions.height;
}
return y;
}
private updateStyles(scale: number = 1, rotation: number = 0) {
this.domCtrl.write(() => {
this.renderer.setStyle(this.element.nativeElement, 'top', this.y + 'px');
this.renderer.setStyle(this.element.nativeElement, 'left', this.x + 'px');
let transforms = [];
transforms.push(`scale(${this.transform.scale * scale})`);
transforms.push(`rotateZ(${this.transform.rotation + rotation}deg)`);
this.renderer.setStyle(this.element.nativeElement, 'transform', transforms.join(' '));
});
}
}
Related
In my Nuxt.js 3 project, I want to implement single-page navigation. And I followed following articles but it didn't work. any suggestions?
https://dev.to/dimer191996/nuxt-js-smooth-scrolling-with-hash-links-94a
https://levelup.gitconnected.com/nuxt-js-how-to-retain-scroll-position-when-returning-to-page-without-navigation-history-7f0250886d27
The correct way to do it in Nuxt.js 3 is to create the "router.scrollBehaviour.js" file in the plugin directory. Its content should be
import { defineNuxtPlugin } from "#app";
export default defineNuxtPlugin((nuxtApp) => {
nuxtApp.$router.options.scrollBehavior = async (to, from, savedPosition) => {
if (savedPosition) {
return savedPosition;
}
const findEl = async (hash, x = 0) => {
return (
document.querySelector(hash) ||
new Promise((resolve) => {
if (x > 0) {
return resolve(document.querySelector("#app"));
}
setTimeout(() => {
resolve(findEl(hash, 1));
}, 300);
})
);
};
if (to.hash) {
const el = await findEl(to.hash);
if ("scrollBehavior" in document.documentElement.style) {
console.log("hash path hit scroll to");
return window.scrollTo({ top: el.offsetTop, behavior: "smooth" });
} else {
return window.scrollTo(0, el.offsetTop);
}
}
return { left: 0, top: 0, behaviour: "smooth" };
};
})
In Nuxt.js 3 you can do this without a plugin. Simply place a "app/router.options.ts" within the root of your project and add following code to it:
import type { RouterConfig } from "#nuxt/schema";
export default {
scrollBehavior(to, from, savedPosition) {
if(savedPosition)
return savedPosition;
if (to.hash && to.path == from.path) {
const el = document.querySelector(to.hash);
return { top: el.offsetTop, left: 0, behavior: "smooth" };
}
return {
top: 0,
left: 0
}
}
};
I'm trying to get into WebXR programming. I'm trying to make a simple wrapper which allows for a VR headset and also a Google Cardboard style for smartphones. I got the VR headset working good so far, but I have two issues when using the smartphone in landscape mode. Portrait mode works fine.
The camera breaks when going above the horizon. ** fixed **
Tilting the phone doesn't tilt. It pans left right half way.
Codesandbox.io code: https://codesandbox.io/s/webxr-7vw5q6
Codesandbox.io app: https://7vw5q6.csb.app/
Update
I managed to fix the jumping and flipped image in landscape mode by adding some code.
if(rotType == "YZX")
{
if(orientation_g >= 0) {
screenOrientation = -90;
} else {
screenOrientation = 90;
orientation_a = orientation_a + 180;
}
}
However, I still have the issue when rolling the device left or right.
// Variables
// ---------
let camera, renderer, scene, loop;
let container;
let controls;
let controller1, controller2;
let teleportmarker, raycaster, INTERSECTION;
let baseReferenceSpace;
let tempMatrix = new THREE.Matrix4();
let effect;
let action;
// fakeVR
let fakeVR = false;
let orientation_a, orientation_b, orientation_g;
/* landscape fix test */
let alphaOffset = 0;
let screenOrientation = 0;
/* ====================================================================================================
* Controller
* ==================================================================================================== */
class Controller {
constructor(i) {
this.controllers = this.createController(i);
this.controllerGrip = this.createControllerGrip(i);
if(i == 0) { this.controllers.name = "Right"; }
if(i == 1) { this.controllers.name = "Left"; }
this.axes = new THREE.Vector2();
this.viewDirection = camera.getWorldDirection( new THREE.Vector3() );
const scope = this;
raycaster = new THREE.Raycaster();
this.initInputListenerXR();
this.group = new THREE.Group();
this.teleportmarker = new THREE.Mesh(
new THREE.RingGeometry(0.2, 0.25, 32).rotateX(-Math.PI / 2),
new THREE.MeshBasicMaterial({color: 0xFF00FF})
);
scene.add(this.teleportmarker);
}
createController(i) {
const controllers = renderer.xr.getController(i);
if (0) {
this.group.add(controllers);
controllers.visible = true;
}
return controllers;
}
createControllerGrip(i) {
const controllerModelFactory = new THREE.XRControllerModelFactory();
const controllerGrip = renderer.xr.getControllerGrip(i);
controllerGrip.add(controllerModelFactory.createControllerModel(controllerGrip));
return controllerGrip;
}
initInputListenerXR() {
const listenerFor = name => event => {
const cb = this._eventListeners[name];
if (cb) {
const uuid = event.target.uuid;
const cont = this.controllers;
if (cont && cont.uuid === uuid) cb(idx);
}
};
this._addSelectListener('selectstart', this.onSelectStart);
this._addSelectListener('selectend', this.onSelectEnd);
this._addSelectListener('connected', function(event) {
this.controllers.add(this.buildController(event.data));
this.controllers.children[0].visible = false;
});
this._addSelectListener( 'disconnected', function () {
this.controllers.remove(this.controllers.children[0]);
});
}
_addSelectListener(eventName, listener) {
this.controllers.addEventListener(eventName, listener.bind(this));
}
onSelectStart() {
console.log(this.controllers.name + ' was pressed.');
this.controllers.userData.isSelecting = true;
}
onSelectEnd() {
console.log(this.controllers.name + ' was released.');
this.controllers.userData.isSelecting = false;
if ( INTERSECTION ) {
const offsetPosition = { x: - INTERSECTION.x, y: - INTERSECTION.y, z: - INTERSECTION.z, w: 1 };
const offsetRotation = new THREE.Quaternion();
const transform = new XRRigidTransform( offsetPosition, offsetRotation );
const teleportSpaceOffset = baseReferenceSpace.getOffsetReferenceSpace( transform );
renderer.xr.setReferenceSpace( teleportSpaceOffset );
}
}
update() {
INTERSECTION = undefined;
if ( controller1.controllers.userData.isSelecting === true ) {
tempMatrix.identity().extractRotation( controller1.controllers.matrixWorld );
raycaster.ray.origin.setFromMatrixPosition( controller1.controllers.matrixWorld );
raycaster.ray.direction.set( 0, 0, - 1 ).applyMatrix4( tempMatrix );
const intersects = raycaster.intersectObjects([floor]);
if (intersects.length > 0) {
INTERSECTION = intersects[0].point;
}
}
else if ( controller2.controllers.userData.isSelecting === true ) {
tempMatrix.identity().extractRotation( controller2.controllers.matrixWorld );
raycaster.ray.origin.setFromMatrixPosition( controller2.controllers.matrixWorld );
raycaster.ray.direction.set( 0, 0, - 1 ).applyMatrix4( tempMatrix );
const intersects = raycaster.intersectObjects([floor]);
if (intersects.length > 0) {
INTERSECTION = intersects[0].point;
}
}
if (INTERSECTION) this.teleportmarker.position.copy(INTERSECTION);
this.teleportmarker.visible = INTERSECTION !== undefined;
}
// updateArc() {
// }
buildController(data) {
switch(data.targetRayMode) {
// case 'screen':
// return;
case 'tracked-pointer':
const geometry = new THREE.BufferGeometry();
geometry.setAttribute('position', new THREE.Float32BufferAttribute([0,0,0,0,0,-1],3));
geometry.setAttribute('color', new THREE.Float32BufferAttribute([0.5,0.5,0.5,0,0,0],3));
const material = new THREE.LineBasicMaterial({vertexColors:true,blending:THREE.AdditiveBlending});
return new THREE.Line(geometry,material);
case 'gaze':
const gaze_geometry = new THREE.RingGoemetry(0.02,0.04,32).translate(0,0,-1);
const gaze_material = new THREE.MeshBesicMaterial({opacity:0.5,transparent:true});
return new THREE.Mesh(gaze_geometry,gaze_material);
}
}
setAction(button, functionName) {
}
action(button, functionName) {
}
}
/* ====================================================================================================
* Resizer
* ==================================================================================================== */
class Resizer {
constructor(container, camera, renderer) {
this.setSize(container, camera, renderer);
window.addEventListener('resize', () => {
this.setSize(container, camera, renderer);
this.onResize();
});
}
onResize() {
}
setSize(container, camera, renderer) {
camera.aspect = container.clientWidth / container.clientHeight;
camera.updateProjectionMatrix();
renderer.setSize(container.clientWidth, container.clientHeight);
renderer.setPixelRatio(window.devicePixelRatio);
}
}
/* ====================================================================================================
* Camera
* ==================================================================================================== */
class Camera extends THREE.PerspectiveCamera {
constructor() {
super();
this.onCreate();
}
onCreate() {
new THREE.PerspectiveCamera(35, 1, 0.1, 100);
}
}
/* ====================================================================================================
* Model
* ==================================================================================================== */
class Model extends THREE.Group {
constructor(data) {
super();
this.modelUrl = data;
this.onCreate();
}
onCreate() {
const dracoLoader = new THREE.DRACOLoader();
dracoLoader.setDecoderPath( 'vendor/three/examples/js/libs/draco/' );
dracoLoader.setDecoderConfig({ type: 'js' });
new THREE.GLTFLoader().
setDRACOLoader( dracoLoader ).
load(this.modelUrl,
gltf => {
this.updateTransform();
this.add(gltf.scene);
console.log(this);
});
}
updateMaterials(model) {
model.traverse(child => {
child.material = new THREE.MeshNormalMaterial();
});
}
updateTransform() {
}
dispose() {
}
rotate(x, y, z) {
this.rotation.x = THREE.MathUtils.degToRad(x);
this.rotation.y = THREE.MathUtils.degToRad(y);
this.rotation.z = THREE.MathUtils.degToRad(z);
}
scale(x, y, z) {
this.scale.set(x, y, z);
}
}
/* ====================================================================================================
* Loop
* ==================================================================================================== */
const clock = new THREE.Clock();
class Loop {
constructor(camera, scene, renderer) {
this.updatables = [];
}
start() {
renderer.setAnimationLoop(() => {
this.tick();
if(fakeVR) {
effect.render( scene, camera );
} else {
renderer.render(scene, camera);
}
});
}
stop() {
renderer.setAnimationLoop(null);
}
tick() {
const delta = clock.getDelta();
controller1.update();
controller2.update();
}
}
/* ====================================================================================================
* Scene
* ==================================================================================================== */
class Scene extends THREE.Scene {
constructor() {
super();
this.onCreate();
}
onCreate() {
new THREE.Scene();
this.background = new THREE.Color('skyblue');
}
createLights() {
const ambientLight = new THREE.HemisphereLight(
'white',
'darkslategrey',
5
);
const mainLight = new THREE.DirectionalLight('white', 4);
mainLight.position.set(100, 100, 100);
return { ambientLight, mainLight };
}
}
/* ====================================================================================================
* Application
* ==================================================================================================== */
class App {
constructor(i) {
// Setup <body> CSS style
document.getElementsByTagName("body")[0].style.cssText = 'width: 100vw; height: 100vh; margin: 0; padding: 0; overflow:
hidden;';
// Create VR scene <div>
const VRdiv = document.createElement('div');
VRdiv.id = "VRScene";
VRdiv.style.cssText = 'position: absolute; width: 100vw; height: 100vh; display: block;';
document.body.insertAdjacentElement('afterbegin', VRdiv);
// Controls
controls = new Controls();
// Setup Camera
camera = new Camera();
camera.position.set(0, 0, 0);
camera.up.set(0, 1, 0);
renderer = this.createRenderer();
scene = new Scene();
loop = new Loop(camera, scene, renderer);
container = document.querySelector('#VRScene');
container.append(renderer.domElement);
const { ambientLight, mainLight } = scene.createLights();
loop.updatables.push(controls);
scene.add(ambientLight, mainLight);
const resizer = new Resizer(container, camera, renderer);
this.init(i);
this.start();
}
init(i) {
this.setupXR(i);
}
setupXR(i) {
renderer.xr.addEventListener("sessionstart", () => (baseReferenceSpace = renderer.xr.getReferenceSpace()));
document.write(`
<button id='VRIcon' class='toggleVR' style=" position: fixed; bottom: 10px; left: 10px; outline: none; border:
none; background: none; width: 60px; z-index: 10000;" onclick='` + i
+ `.toggleVR()' title='Toggle VR Mode for Mobile Devices Only'>
<svg style="width: 100%; fill: white; stroke: rgba(0,0,0,0.25);" xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" x="0px"
y="0px" viewBox="0 0 62.7 52.375" enable-background="new 0 0 62.7
41.9" xml:space="preserve"><path d="M53.4,5.5h-44c-2.1,0-3.7,1.7-3.7,3.7v22.6c0,2.1,1.7,3.7,3.7,3.7h13.4c1.1,0,2.1-0.6,2.5-1.6l3-7.5c1.2-2.6,4.9-2.5,6,0.1
l2.6,7.3c0.4,1,1.4,1.7,2.5,1.7h13.9c2.1,0,3.7-1.7,3.7-3.7V9.3C57.2,7.2,55.5,5.5,53.4,5.5z
M20.4,27c-3.2,0-5.7-2.6-5.7-5.7
s2.6-5.7,5.7-5.7s5.7,2.6,5.7,5.7S23.6,27,20.4,27z
M42.4,27c-3.2,0-5.7-2.6-5.7-5.7s2.6-5.7,5.7-5.7s5.7,2.6,5.7,5.7
S45.6,27,42.4,27z"/></svg>
</button>
<svg id="VROverlay" xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="none
meet" width="100vw" height="100vh" viewBox="0, 0, 2000, 1000"
style="position: absolute; top: 0; left: 0; bottom: 0; right: 0;
z-index: 9999; display: none;"><g id="svgg"><path id="path0" d="M 0 0
L 0 1000 L 1000000 1000 L 1000000 0 L 0 0 z M 500.04492 15 C
636.69612 15.006191 768.82704 43.380704 892.76562 99.34375 C 896.20268 100.89576 898.95249 103.64562 900.50391 107.08398 C 1013.1637 356.78574 1013.1657 643.21219 900.50781 892.91602 C 898.9564 896.35438 896.20466 899.10424 892.76758 900.65625 C 768.82901 956.61724 636.69909 984.9898 499.95508 985 C 363.30182 984.99379 231.171 956.61724 107.23242 900.65625 C 103.79536 899.10424 101.04557 896.35438 99.494141 892.91602 C -13.163603 643.21219 -13.163603 356.78574 99.494141 107.08398 C 101.04557 103.64562 103.79536 100.89576 107.23242 99.34375 C 231.171 43.380704 363.3009 15.0062 500.04492 15 z M 1500.0449 15 C 1636.6961 15.006191 1768.827 43.380704 1892.7656 99.34375 C 1896.2026 100.89576 1898.9525 103.64562 1900.5039 107.08398 L 1900.5078 107.08398 C 2013.1656 356.78574 2013.1656 643.21219 1900.5078 892.91602 C 1898.9564 896.35438 1896.2047 899.10424 1892.7676 900.65625 C 1768.8291 956.61724 1636.6991 984.9898 1499.9551 985 C 1363.3019 984.99379 1231.1709 956.61724 1107.2324 900.65625 C 1103.7953 899.10424 1101.0455 896.35438 1099.4941 892.91602 C 986.8364 643.21219 986.8364 356.78574 1099.4941 107.08398 C 1101.0455 103.64562 1103.7953 100.89576 1107.2324 99.34375 C 1231.1709 43.380704 1363.3009 15.0062 1500.0449 15 z " stroke="none" fill="#000000" fill-rule="evenodd"></path></g></svg>
`);
if ('xr' in navigator) {
navigator.xr.isSessionSupported('immersive-vr').then(function(supported)
{
if(supported) {
renderer.xr.enabled = true;
new THREE.VRButton(renderer);
document.body.appendChild(THREE.VRButton.createButton(renderer));
document.getElementById('VRButton').style.display = 'block';
document.getElementById('VRIcon').style.display = 'block';
}
});
}
controller1 = new Controller(0);
controller2 = new Controller(1);
scene.add(controller1.controllers);
scene.add(controller1.controllerGrip);
scene.add(controller2.controllers);
scene.add(controller2.controllerGrip);
controls = new Controls();
action = new Action();
// loop.tick();
}
start() {
loop.start();
}
stop() {
loop.stop();
}
createRenderer() {
const renderer = new THREE.WebGLRenderer({ antialias: true });
renderer.setPixelRatio( window.devicePixelRatio );
renderer.physicallyCorrectLights = true;
effect = new THREE.StereoEffect( renderer );
effect.setSize( window.innerWidth, window.innerHeight );
return renderer;
}
toggleVR() {
if(DeviceMotionEvent && typeof DeviceMotionEvent.requestPermission === "function") {
DeviceMotionEvent.requestPermission();
}
if (fakeVR) {
fakeVR = false;
document.getElementById('VROverlay').style.display = 'none';
window.removeEventListener("deviceorientation", this.handleOrientation);
} else {
fakeVR = true;
window.addEventListener("deviceorientation", this.handleOrientation);
document.getElementById('VROverlay').style.display = 'block';
}
loop.stop();
loop.start();
}
handleMotion(event) {
}
handleOrientation(event) {
if (window.screen.orientation) {
screenOrientation = window.screen.orientation.angle;
} else if (typeof window.orientation === "number") {
screenOrientation = window.orientation;
} else if (window.screen.mozOrientationn) {
screenOrientation = {
"portrait-primary": 0,
"portrait-secondary": 180,
"landscape-primary": 90,
"landscape-secondary": 270,
}[window.screen.mozOrientation];
}
var eyem = new THREE.Quaternion().setFromEuler(new THREE.Euler(-Math.PI / 2, 0, 0));
var d2r = Math.PI / 180;
orientation_a = event.alpha;
orientation_b = event.beta;
orientation_g = event.gamma;
var rotType = (screenOrientation === 0 || screenOrientation === 180) ? "YXZ" : "YZX";
if(rotType == "YZX")
{
if(orientation_g >= 0) {
screenOrientation = -90;
} else {
screenOrientation = 90;
orientation_a = orientation_a + 180;
}
}
var rotm = new THREE.Quaternion().setFromEuler(
new THREE.Euler(orientation_b * d2r, orientation_a * d2r, -orientation_g * d2r, rotType)
);
var devm = new THREE.Quaternion().setFromEuler(
new THREE.Euler(0, -screenOrientation * d2r, 0)
);
rotm.multiply(devm).multiply(eyem); //rot = (rot x dev) x eye
camera.quaternion.copy(rotm);
document.getElementById("Orientation_a1").innerHTML = orientation_a.toFixed(3);
document.getElementById("Orientation_b1").innerHTML = orientation_b.toFixed(3);
document.getElementById("Orientation_g1").innerHTML = orientation_g.toFixed(3);
document.getElementById("Orientation_o1").innerHTML = screenOrientation;
document.getElementById("Orientation_a2").innerHTML = orientation_a.toFixed(3);
document.getElementById("Orientation_b2").innerHTML = orientation_b.toFixed(3);
document.getElementById("Orientation_g2").innerHTML = orientation_g.toFixed(3);
document.getElementById("Orientation_o2").innerHTML = screenOrientation;
}
}
ul {
padding-inline-start: 15px;
}
li {
list-style-type: none;
overflow: hidden;
}
<script src="https://7vw5q6.csb.app/webxr.three.js"></script>
<div style="display: block; position: fixed; top: 10%; left: 10%; right: 60%; background: rgba(255,255,255,.65);">
<ul>
<li>X-axis (β): <span id="Orientation_b1">0</span><span>°</span></li>
<li>Y-axis (γ): <span id="Orientation_g1">0</span><span>°</span></li>
<li>Z-axis (α): <span id="Orientation_a1">0</span><span>°</span></li>
<li>Orientation: <span id="Orientation_o1">0</span><span>°</span></li>
</ul>
</div>
<div style="display: block; position: fixed; top: 10%; left: 60%; right: 10%; background: rgba(255,255,255,.65);">
<ul>
<li>X-axis (β): <span id="Orientation_b2">0</span><span>°</span></li>
<li>Y-axis (γ): <span id="Orientation_g2">0</span><span>°</span></li>
<li>Z-axis (α): <span id="Orientation_a2">0</span><span>°</span></li>
<li>Orientation: <span id="Orientation_o2">0</span><span>°</span></li>
</ul>
</div>
I am trying to create a scene with react-three/fiber and react-three/drei. I want to use a PerspectiveCamera and be able to pan/zoom/rotate with the mouse, but I am also trying to add some buttons that can update the camera position and target in order to have different views (eg. top view, bottom view, side view, etc). I have achieved the latter part and my buttons seem to be working as I update the target x,y,z and position x,y,z using props.
The only problem is that the camera is not responding to the mouse so I only get a fixed camera position and target.
I have included all the scene codes below.
import React,{ useRef, useState, useEffect} from 'react'
import * as THREE from 'three';
import PropTypes from 'prop-types';
import { withRouter } from 'react-router-dom';
import { PerspectiveCamera, Icosahedron, OrbitControls } from '#react-three/drei'
import { Canvas, useThree } from "#react-three/fiber";
function VisualizationComponent(props) {
const width = window.innerWidth;
const height = window.innerHeight;
const [controls, setControls] = useState(null);
const [threeState, setThreeState] = useState(null);
const [treeStateInitialized, setThreeStateInitialized] = useState(false);
useEffect(()=>{
if(threeState){
_.forOwn(props.objects, (value, key) => {
threeState.scene.current.add(value);
});
}
return () => {
if(controls) controls.dispose();
}
},[])
function usePrevious(value) {
const ref = useRef();
useEffect(() => {
ref.current = value;
});
return ref.current;
}
const { objects } = props
const prevState = usePrevious({objects});
const mainCamera = useRef();
useEffect(() => {
if(!threeState) return;
if (
!treeStateInitialized ||
shouldUpdateObjects(props.objects, prevState.objects)
) {
setThreeStateInitialized(true);
garbageCollectOldObjects();
addDefaultObjects();
_.forOwn(props.objects, (value, key) => {
threeState.scene.add(value);
});
}
})
const addDefaultObjects = () => {
if (threeState) {
var hemiLight = new THREE.HemisphereLight( 0xffffbb, 0x080820, 0.2 );
hemiLight.position.set( 0, 0, 1 );
threeState.scene.add( hemiLight );
}
}
const garbageCollectOldObjects = () => {
while (threeState && threeState.scene.children.length) {
const oldObject = threeState.scene.children[0];
oldObject.traverse((child) => {
if (child.geometry) {
child.geometry?.dispose();
if(child.material && Array.isArray(child.material)){
child.material.forEach(d => d.dispose());
}else{
child.material?.dispose();
}
}
});
threeState.scene.remove(oldObject);
}
}
const shouldUpdateObjects = (currentObjects,nextObjects) => {
const result = false;
let currentDigest = 1;
let nextDigest = 1;
_.forIn(currentObjects, (value, key) => {
currentDigest *= value.id;
});
_.forIn(nextObjects, (value, key) => {
nextDigest *= value.id;
});
return currentDigest !== nextDigest;
}
const hasAncestorWhichDisablesThreeJs = (element) => {
if (!element) return false;
let isEditable = false;
for (let i = 0; i < element.classList.length; i++) {
if (element.classList[i] === 'disable-threejs-controls') {
isEditable = true;
}
}
return isEditable ||
hasAncestorWhichDisablesThreeJs(element.parentElement);
}
const initializeScene = (state) => {
setThreeState(state);
addDefaultObjects();
}
return (
<div
id="threejs-controllers-div"
className='threejs-container'
onMouseOver={ (e) => {
const target = e.target;
if (!target || !controls) return true;
if (hasAncestorWhichDisablesThreeJs(target)) {
controls.enabled = false;
} else {
controls.enabled = true;
}
} }
>
<Canvas
className='threejs'
onCreated={ (state) => {initializeScene(state)}}
shadows={true}
gl={
{
'shadowMap.enabled' : true,
'alpha' : true
}
}
>
<PerspectiveCamera
makeDefault
ref={mainCamera}
position-x={props.cameraX || 0}
position-y={props.cameraY || -20}
position-z={props.cameraZ || 20}
up={[0, 0, 1]}
fov={ 15 }
aspect={ width / height }
near={ 1 }
far={ 10000 }
visible={false}
controls={controls}
/>
<OrbitControls
ref={controls}
camera={mainCamera.current}
domElement={document.getElementById("threejs-controllers-div")}
enabled={true}
enablePan={true}
enableZoom={true}
enableRotate={true}
target-x={props.targetX || 0}
target-y={props.targetY || 0}
target-z={props.targetZ || 0}
/>
</Canvas>
<div className='threejs-react-container'>
{ props.children }
</div>
</div>
)
}
VisualizationComponent.propTypes = {
children: PropTypes.node.isRequired,
objects: PropTypes.object.isRequired,
cameraX: PropTypes.number,
cameraY: PropTypes.number,
cameraZ: PropTypes.number,
targetX: PropTypes.number,
targetY: PropTypes.number,
targetZ: PropTypes.number,
};
export default withRouter(VisualizationComponent);
I am working on nativescript , but There are serious lack of libraries in it. I am not able to find north direction.
i have tried plugin
import * as geolocation from "nativescript-geolocation";
import { Accuracy } from "tns-core-modules/ui/enums";
test:function() {
var a = geolocation.getCurrentLocation({ desiredAccuracy: Accuracy.high, maximumAge: 5000, timeout: 20000 })
;
a.then( return_a => {
console.log("this------------------------------->",return_a);
//distance(return_a,);
var degree = angleFromCoordinate(return_a.latitude,return_a.longitude , 21.4225,39.8262);
console.log(degree);
this.gaugeValue = degree;//return_a.verticalAccuracy
});
}
});
function angleFromCoordinate( lat1, long1, lat2, long2) {
var dLon = (long2 - long1);
var y = Math.sin(dLon) * Math.cos(lat2);
var x = Math.cos(lat1) * Math.sin(lat2) - Math.sin(lat1)
* Math.cos(lat2) * Math.cos(dLon);
var brng = Math.atan2(y, x);
brng = brng * 180 / Math.PI;
brng = (brng + 360) % 360;
brng = 360 - brng; // count degrees counter-clockwise - remove to make clockwise
return brng;
}
There is direction property also available.but it is always -1 .
i am using Typescript , javascript as template language.
Here I found the solution. You need to do it manually.
import * as app from "tns-core-modules/application";
import { isAndroid, isIOS } from "tns-core-modules/platform";
declare const android: any;
declare const CLLocationManager: any;
export class MyClass {
private sensorUpdate: any;
private sensorManager: any;
startHeadingUpdates() {
if (this.sensorManager || this.sensorUpdate) {
return;
}
if (isIOS) {
this.sensorManager = CLLocationManager.alloc().init();
if (this.sensorManager.headingAvailable) {
this.sensorManager.startUpdatingHeading();
this.sensorUpdate = setInterval(() => {
console.log(this.sensorManager.heading.trueHeading);
}, 100);
} else {
console.log("Heading not available.");
}
return;
}
if (isAndroid) {
this.sensorManager = app.android.foregroundActivity.getSystemService(
android.content.Context.SENSOR_SERVICE
);
this.sensorUpdate = new android.hardware.SensorEventListener({
onAccuracyChanged: (sensor: any, accuracy: any) => {
// console.log(accuracy)
},
onSensorChanged: (event: any) => {
console.log(event.values[0]);
}
});
const orientationSensor = this.sensorManager.getDefaultSensor(
android.hardware.Sensor.TYPE_ORIENTATION
);
this.sensorManager.registerListener(
this.sensorUpdate,
orientationSensor,
android.hardware.SensorManager.SENSOR_DELAY_UI
);
}
}
stopUpdatingHeading() {
if (!this.sensorManager || !this.sensorUpdate) {
return;
}
if (isIOS) {
this.sensorManager.stopUpdatingHeading();
clearInterval(this.sensorUpdate);
this.sensorManager = null;
return;
}
if (isAndroid) {
this.sensorManager.unregisterListener(this.sensorUpdate);
this.sensorManager = null;
}
}
}
When I'm taking an photo with my Windows Phone the landscape mode, it's perfect. The problem occurs when I'm taking a photo in portrait mode.
The photo gets rotated 90 degrees. It even occurs in the simulator as shown below.
Now this doesn't occur on Android or iOS so I assume this is because Windows is using the CameraProxy.js instead of/from cordova-plugin-camera.
My entire CameraProxy.js (Giant file, does contain 'rotate' stuff but method names are only about videos)
cordova.define("cordova-plugin-camera.CameraProxy", function(require, exports, module) {
var Camera = require('./Camera');
var getAppData = function () {
return Windows.Storage.ApplicationData.current;
};
var encodeToBase64String = function (buffer) {
return Windows.Security.Cryptography.CryptographicBuffer.encodeToBase64String(buffer);
};
var OptUnique = Windows.Storage.CreationCollisionOption.generateUniqueName;
var CapMSType = Windows.Media.Capture.MediaStreamType;
var webUIApp = Windows.UI.WebUI.WebUIApplication;
var fileIO = Windows.Storage.FileIO;
var pickerLocId = Windows.Storage.Pickers.PickerLocationId;
module.exports = {
// args will contain :
// ... it is an array, so be careful
// 0 quality:50,
// 1 destinationType:Camera.DestinationType.FILE_URI,
// 2 sourceType:Camera.PictureSourceType.CAMERA,
// 3 targetWidth:-1,
// 4 targetHeight:-1,
// 5 encodingType:Camera.EncodingType.JPEG,
// 6 mediaType:Camera.MediaType.PICTURE,
// 7 allowEdit:false,
// 8 correctOrientation:false,
// 9 saveToPhotoAlbum:false,
// 10 popoverOptions:null
// 11 cameraDirection:0
takePicture: function (successCallback, errorCallback, args) {
var sourceType = args[2];
if (sourceType != Camera.PictureSourceType.CAMERA) {
takePictureFromFile(successCallback, errorCallback, args);
} else {
takePictureFromCamera(successCallback, errorCallback, args);
}
}
};
// https://msdn.microsoft.com/en-us/library/windows/apps/ff462087(v=vs.105).aspx
var windowsVideoContainers = [".avi", ".flv", ".asx", ".asf", ".mov", ".mp4", ".mpg", ".rm", ".srt", ".swf", ".wmv", ".vob"];
var windowsPhoneVideoContainers = [".avi", ".3gp", ".3g2", ".wmv", ".3gp", ".3g2", ".mp4", ".m4v"];
// Default aspect ratio 1.78 (16:9 hd video standard)
var DEFAULT_ASPECT_RATIO = '1.8';
// Highest possible z-index supported across browsers. Anything used above is converted to this value.
var HIGHEST_POSSIBLE_Z_INDEX = 2147483647;
// Resize method
function resizeImage(successCallback, errorCallback, file, targetWidth, targetHeight, encodingType) {
var tempPhotoFileName = "";
var targetContentType = "";
if (encodingType == Camera.EncodingType.PNG) {
tempPhotoFileName = "camera_cordova_temp_return.png";
targetContentType = "image/png";
} else {
tempPhotoFileName = "camera_cordova_temp_return.jpg";
targetContentType = "image/jpeg";
}
var storageFolder = getAppData().localFolder;
file.copyAsync(storageFolder, file.name, Windows.Storage.NameCollisionOption.replaceExisting)
.then(function (storageFile) {
return fileIO.readBufferAsync(storageFile);
})
.then(function(buffer) {
var strBase64 = encodeToBase64String(buffer);
var imageData = "data:" + file.contentType + ";base64," + strBase64;
var image = new Image();
image.src = imageData;
image.onload = function() {
var ratio = Math.min(targetWidth / this.width, targetHeight / this.height);
var imageWidth = ratio * this.width;
var imageHeight = ratio * this.height;
var canvas = document.createElement('canvas');
var storageFileName;
canvas.width = imageWidth;
canvas.height = imageHeight;
canvas.getContext("2d").drawImage(this, 0, 0, imageWidth, imageHeight);
var fileContent = canvas.toDataURL(targetContentType).split(',')[1];
var storageFolder = getAppData().localFolder;
storageFolder.createFileAsync(tempPhotoFileName, OptUnique)
.then(function (storagefile) {
var content = Windows.Security.Cryptography.CryptographicBuffer.decodeFromBase64String(fileContent);
storageFileName = storagefile.name;
return fileIO.writeBufferAsync(storagefile, content);
})
.done(function () {
successCallback("ms-appdata:///local/" + storageFileName);
}, errorCallback);
};
})
.done(null, function(err) {
errorCallback(err);
}
);
}
function takePictureFromFile(successCallback, errorCallback, args) {
// Detect Windows Phone
if (navigator.appVersion.indexOf('Windows Phone 8.1') >= 0) {
takePictureFromFileWP(successCallback, errorCallback, args);
} else {
takePictureFromFileWindows(successCallback, errorCallback, args);
}
}
function takePictureFromFileWP(successCallback, errorCallback, args) {
var mediaType = args[6],
destinationType = args[1],
targetWidth = args[3],
targetHeight = args[4],
encodingType = args[5];
var filePickerActivationHandler = function(eventArgs) {
if (eventArgs.kind === Windows.ApplicationModel.Activation.ActivationKind.pickFileContinuation) {
var file = eventArgs.files[0];
if (!file) {
errorCallback("User didn't choose a file.");
webUIApp.removeEventListener("activated", filePickerActivationHandler);
return;
}
if (destinationType == Camera.DestinationType.FILE_URI || destinationType == Camera.DestinationType.NATIVE_URI) {
if (targetHeight > 0 && targetWidth > 0) {
resizeImage(successCallback, errorCallback, file, targetWidth, targetHeight, encodingType);
}
else {
var storageFolder = getAppData().localFolder;
file.copyAsync(storageFolder, file.name, Windows.Storage.NameCollisionOption.replaceExisting).done(function (storageFile) {
if(destinationType == Camera.DestinationType.NATIVE_URI) {
successCallback("ms-appdata:///local/" + storageFile.name);
}
else {
successCallback(URL.createObjectURL(storageFile));
}
}, function () {
errorCallback("Can't access localStorage folder.");
});
}
}
else {
if (targetHeight > 0 && targetWidth > 0) {
resizeImageBase64(successCallback, errorCallback, file, targetWidth, targetHeight);
} else {
fileIO.readBufferAsync(file).done(function (buffer) {
var strBase64 =encodeToBase64String(buffer);
successCallback(strBase64);
}, errorCallback);
}
}
webUIApp.removeEventListener("activated", filePickerActivationHandler);
}
};
var fileOpenPicker = new Windows.Storage.Pickers.FileOpenPicker();
if (mediaType == Camera.MediaType.PICTURE) {
fileOpenPicker.fileTypeFilter.replaceAll([".png", ".jpg", ".jpeg"]);
fileOpenPicker.suggestedStartLocation = pickerLocId.picturesLibrary;
}
else if (mediaType == Camera.MediaType.VIDEO) {
fileOpenPicker.fileTypeFilter.replaceAll(windowsPhoneVideoContainers);
fileOpenPicker.suggestedStartLocation = pickerLocId.videosLibrary;
}
else {
fileOpenPicker.fileTypeFilter.replaceAll(["*"]);
fileOpenPicker.suggestedStartLocation = pickerLocId.documentsLibrary;
}
webUIApp.addEventListener("activated", filePickerActivationHandler);
fileOpenPicker.pickSingleFileAndContinue();
}
function takePictureFromFileWindows(successCallback, errorCallback, args) {
var mediaType = args[6],
destinationType = args[1],
targetWidth = args[3],
targetHeight = args[4],
encodingType = args[5];
var fileOpenPicker = new Windows.Storage.Pickers.FileOpenPicker();
if (mediaType == Camera.MediaType.PICTURE) {
fileOpenPicker.fileTypeFilter.replaceAll([".png", ".jpg", ".jpeg"]);
fileOpenPicker.suggestedStartLocation = pickerLocId.picturesLibrary;
}
else if (mediaType == Camera.MediaType.VIDEO) {
fileOpenPicker.fileTypeFilter.replaceAll(windowsVideoContainers);
fileOpenPicker.suggestedStartLocation = pickerLocId.videosLibrary;
}
else {
fileOpenPicker.fileTypeFilter.replaceAll(["*"]);
fileOpenPicker.suggestedStartLocation = pickerLocId.documentsLibrary;
}
fileOpenPicker.pickSingleFileAsync().done(function (file) {
if (!file) {
errorCallback("User didn't choose a file.");
return;
}
if (destinationType == Camera.DestinationType.FILE_URI || destinationType == Camera.DestinationType.NATIVE_URI) {
if (targetHeight > 0 && targetWidth > 0) {
resizeImage(successCallback, errorCallback, file, targetWidth, targetHeight, encodingType);
}
else {
var storageFolder = getAppData().localFolder;
file.copyAsync(storageFolder, file.name, Windows.Storage.NameCollisionOption.replaceExisting).done(function (storageFile) {
if(destinationType == Camera.DestinationType.NATIVE_URI) {
successCallback("ms-appdata:///local/" + storageFile.name);
}
else {
successCallback(URL.createObjectURL(storageFile));
}
}, function () {
errorCallback("Can't access localStorage folder.");
});
}
}
else {
if (targetHeight > 0 && targetWidth > 0) {
resizeImageBase64(successCallback, errorCallback, file, targetWidth, targetHeight);
} else {
fileIO.readBufferAsync(file).done(function (buffer) {
var strBase64 =encodeToBase64String(buffer);
successCallback(strBase64);
}, errorCallback);
}
}
}, function () {
errorCallback("User didn't choose a file.");
});
}
function takePictureFromCamera(successCallback, errorCallback, args) {
// Check if necessary API available
if (!Windows.Media.Capture.CameraCaptureUI) {
takePictureFromCameraWP(successCallback, errorCallback, args);
} else {
takePictureFromCameraWindows(successCallback, errorCallback, args);
}
}
function takePictureFromCameraWP(successCallback, errorCallback, args) {
// We are running on WP8.1 which lacks CameraCaptureUI class
// so we need to use MediaCapture class instead and implement custom UI for camera
var destinationType = args[1],
targetWidth = args[3],
targetHeight = args[4],
encodingType = args[5],
saveToPhotoAlbum = args[9],
cameraDirection = args[11],
capturePreview = null,
cameraCaptureButton = null,
cameraCancelButton = null,
capture = null,
captureSettings = null,
CaptureNS = Windows.Media.Capture,
sensor = null;
}
function continueVideoOnFocus() {
// if preview is defined it would be stuck, play it
if (capturePreview) {
capturePreview.play();
}
}
function startCameraPreview() {
// Search for available camera devices
// This is necessary to detect which camera (front or back) we should use
var DeviceEnum = Windows.Devices.Enumeration;
var expectedPanel = cameraDirection === 1 ? DeviceEnum.Panel.front : DeviceEnum.Panel.back;
// Add focus event handler to capture the event when user suspends the app and comes back while the preview is on
window.addEventListener("focus", continueVideoOnFocus);
DeviceEnum.DeviceInformation.findAllAsync(DeviceEnum.DeviceClass.videoCapture).then(function (devices) {
if (devices.length <= 0) {
destroyCameraPreview();
errorCallback('Camera not found');
return;
}
devices.forEach(function(currDev) {
if (currDev.enclosureLocation.panel && currDev.enclosureLocation.panel == expectedPanel) {
captureSettings.videoDeviceId = currDev.id;
}
});
captureSettings.photoCaptureSource = Windows.Media.Capture.PhotoCaptureSource.photo;
return capture.initializeAsync(captureSettings);
}).then(function () {
// create focus control if available
var VideoDeviceController = capture.videoDeviceController;
var FocusControl = VideoDeviceController.focusControl;
if (FocusControl.supported === true) {
capturePreview.addEventListener('click', function () {
// Make sure function isn't called again before previous focus is completed
if (this.getAttribute('clicked') === '1') {
return false;
} else {
this.setAttribute('clicked', '1');
}
var preset = Windows.Media.Devices.FocusPreset.autoNormal;
var parent = this;
FocusControl.setPresetAsync(preset).done(function () {
// set the clicked attribute back to '0' to allow focus again
parent.setAttribute('clicked', '0');
});
});
}
// msdn.microsoft.com/en-us/library/windows/apps/hh452807.aspx
capturePreview.msZoom = true;
capturePreview.src = URL.createObjectURL(capture);
capturePreview.play();
// Bind events to controls
sensor = Windows.Devices.Sensors.SimpleOrientationSensor.getDefault();
if (sensor !== null) {
sensor.addEventListener("orientationchanged", onOrientationChange);
}
// add click events to capture and cancel buttons
cameraCaptureButton.addEventListener('click', onCameraCaptureButtonClick);
cameraCancelButton.addEventListener('click', onCameraCancelButtonClick);
// Change default orientation
if (sensor) {
setPreviewRotation(sensor.getCurrentOrientation());
} else {
setPreviewRotation(Windows.Graphics.Display.DisplayInformation.getForCurrentView().currentOrientation);
}
// Get available aspect ratios
var aspectRatios = getAspectRatios(capture);
// Couldn't find a good ratio
if (aspectRatios.length === 0) {
destroyCameraPreview();
errorCallback('There\'s not a good aspect ratio available');
return;
}
// add elements to body
document.body.appendChild(capturePreview);
document.body.appendChild(cameraCaptureButton);
document.body.appendChild(cameraCancelButton);
if (aspectRatios.indexOf(DEFAULT_ASPECT_RATIO) > -1) {
return setAspectRatio(capture, DEFAULT_ASPECT_RATIO);
} else {
// Doesn't support 16:9 - pick next best
return setAspectRatio(capture, aspectRatios[0]);
}
}).done(null, function (err) {
destroyCameraPreview();
errorCallback('Camera intitialization error ' + err);
});
}
function destroyCameraPreview() {
// If sensor is available, remove event listener
if (sensor !== null) {
sensor.removeEventListener('orientationchanged', onOrientationChange);
}
// Pause and dispose preview element
capturePreview.pause();
capturePreview.src = null;
// Remove event listeners from buttons
cameraCaptureButton.removeEventListener('click', onCameraCaptureButtonClick);
cameraCancelButton.removeEventListener('click', onCameraCancelButtonClick);
// Remove the focus event handler
window.removeEventListener("focus", continueVideoOnFocus);
// Remove elements
[capturePreview, cameraCaptureButton, cameraCancelButton].forEach(function (elem) {
if (elem /* && elem in document.body.childNodes */) {
document.body.removeChild(elem);
}
});
// Stop and dispose media capture manager
if (capture) {
capture.stopRecordAsync();
capture = null;
}
}
function getAspectRatios(capture) {
var videoDeviceController = capture.videoDeviceController;
var photoAspectRatios = videoDeviceController.getAvailableMediaStreamProperties(CapMSType.photo).map(function (element) {
return (element.width / element.height).toFixed(1);
}).filter(function (element, index, array) { return (index === array.indexOf(element)); });
var videoAspectRatios = videoDeviceController.getAvailableMediaStreamProperties(CapMSType.videoRecord).map(function (element) {
return (element.width / element.height).toFixed(1);
}).filter(function (element, index, array) { return (index === array.indexOf(element)); });
var videoPreviewAspectRatios = videoDeviceController.getAvailableMediaStreamProperties(CapMSType.videoPreview).map(function (element) {
return (element.width / element.height).toFixed(1);
}).filter(function (element, index, array) { return (index === array.indexOf(element)); });
var allAspectRatios = [].concat(photoAspectRatios, videoAspectRatios, videoPreviewAspectRatios);
var aspectObj = allAspectRatios.reduce(function (map, item) {
if (!map[item]) {
map[item] = 0;
}
map[item]++;
return map;
}, {});
return Object.keys(aspectObj).filter(function (k) {
return aspectObj[k] === 3;
});
}
function setAspectRatio(capture, aspect) {
// Max photo resolution with desired aspect ratio
var videoDeviceController = capture.videoDeviceController;
var photoResolution = videoDeviceController.getAvailableMediaStreamProperties(CapMSType.photo)
.filter(function (elem) {
return ((elem.width / elem.height).toFixed(1) === aspect);
})
.reduce(function (prop1, prop2) {
return (prop1.width * prop1.height) > (prop2.width * prop2.height) ? prop1 : prop2;
});
// Max video resolution with desired aspect ratio
var videoRecordResolution = videoDeviceController.getAvailableMediaStreamProperties(CapMSType.videoRecord)
.filter(function (elem) {
return ((elem.width / elem.height).toFixed(1) === aspect);
})
.reduce(function (prop1, prop2) {
return (prop1.width * prop1.height) > (prop2.width * prop2.height) ? prop1 : prop2;
});
// Max video preview resolution with desired aspect ratio
var videoPreviewResolution = videoDeviceController.getAvailableMediaStreamProperties(CapMSType.videoPreview)
.filter(function (elem) {
return ((elem.width / elem.height).toFixed(1) === aspect);
})
.reduce(function (prop1, prop2) {
return (prop1.width * prop1.height) > (prop2.width * prop2.height) ? prop1 : prop2;
});
return videoDeviceController.setMediaStreamPropertiesAsync(CapMSType.photo, photoResolution)
.then(function () {
return videoDeviceController.setMediaStreamPropertiesAsync(CapMSType.videoPreview, videoPreviewResolution);
})
.then(function () {
return videoDeviceController.setMediaStreamPropertiesAsync(CapMSType.videoRecord, videoRecordResolution);
});
}
/**
* When the phone orientation change, get the event and change camera preview rotation
* #param {Object} e - SimpleOrientationSensorOrientationChangedEventArgs
*/
function onOrientationChange(e) {
setPreviewRotation(e.orientation);
}
/**
* Converts SimpleOrientation to a VideoRotation to remove difference between camera sensor orientation
* and video orientation
* #param {number} orientation - Windows.Devices.Sensors.SimpleOrientation
* #return {number} - Windows.Media.Capture.VideoRotation
*/
function orientationToRotation(orientation) {
// VideoRotation enumerable and BitmapRotation enumerable have the same values
// https://msdn.microsoft.com/en-us/library/windows/apps/windows.media.capture.videorotation.aspx
// https://msdn.microsoft.com/en-us/library/windows/apps/windows.graphics.imaging.bitmaprotation.aspx
switch (orientation) {
// portrait
case Windows.Devices.Sensors.SimpleOrientation.notRotated:
return Windows.Media.Capture.VideoRotation.clockwise90Degrees;
// landscape
case Windows.Devices.Sensors.SimpleOrientation.rotated90DegreesCounterclockwise:
return Windows.Media.Capture.VideoRotation.none;
// portrait-flipped (not supported by WinPhone Apps)
case Windows.Devices.Sensors.SimpleOrientation.rotated180DegreesCounterclockwise:
// Falling back to portrait default
return Windows.Media.Capture.VideoRotation.clockwise90Degrees;
// landscape-flipped
case Windows.Devices.Sensors.SimpleOrientation.rotated270DegreesCounterclockwise:
return Windows.Media.Capture.VideoRotation.clockwise180Degrees;
// faceup & facedown
default:
// Falling back to portrait default
return Windows.Media.Capture.VideoRotation.clockwise90Degrees;
}
}
/**
* Rotates the current MediaCapture's video
* #param {number} orientation - Windows.Devices.Sensors.SimpleOrientation
*/
function setPreviewRotation(orientation) {
capture.setPreviewRotation(orientationToRotation(orientation));
}
try {
createCameraUI();
startCameraPreview();
} catch (ex) {
errorCallback(ex);
}
}
function takePictureFromCameraWindows(successCallback, errorCallback, args) {
var destinationType = args[1],
targetWidth = args[3],
targetHeight = args[4],
encodingType = args[5],
allowCrop = !!args[7],
saveToPhotoAlbum = args[9],
WMCapture = Windows.Media.Capture,
cameraCaptureUI = new WMCapture.CameraCaptureUI();
cameraCaptureUI.photoSettings.allowCropping = allowCrop;
if (encodingType == Camera.EncodingType.PNG) {
cameraCaptureUI.photoSettings.format = WMCapture.CameraCaptureUIPhotoFormat.png;
} else {
cameraCaptureUI.photoSettings.format = WMCapture.CameraCaptureUIPhotoFormat.jpeg;
}
// decide which max pixels should be supported by targetWidth or targetHeight.
var maxRes = null;
var UIMaxRes = WMCapture.CameraCaptureUIMaxPhotoResolution;
var totalPixels = targetWidth * targetHeight;
if (targetWidth == -1 && targetHeight == -1) {
maxRes = UIMaxRes.highestAvailable;
}
// Temp fix for CB-10539
/*else if (totalPixels <= 320 * 240) {
maxRes = UIMaxRes.verySmallQvga;
}*/
else if (totalPixels <= 640 * 480) {
maxRes = UIMaxRes.smallVga;
} else if (totalPixels <= 1024 * 768) {
maxRes = UIMaxRes.mediumXga;
} else if (totalPixels <= 3 * 1000 * 1000) {
maxRes = UIMaxRes.large3M;
} else if (totalPixels <= 5 * 1000 * 1000) {
maxRes = UIMaxRes.veryLarge5M;
} else {
maxRes = UIMaxRes.highestAvailable;
}
cameraCaptureUI.photoSettings.maxResolution = maxRes;
var cameraPicture;
// define focus handler for windows phone 10.0
var savePhotoOnFocus = function () {
window.removeEventListener("focus", savePhotoOnFocus);
// call only when the app is in focus again
savePhoto(cameraPicture, {
destinationType: destinationType,
targetHeight: targetHeight,
targetWidth: targetWidth,
encodingType: encodingType,
saveToPhotoAlbum: saveToPhotoAlbum
}, successCallback, errorCallback);
};
cameraCaptureUI.captureFileAsync(WMCapture.CameraCaptureUIMode.photo).done(function (picture) {
if (!picture) {
errorCallback("User didn't capture a photo.");
// Remove the focus handler if present
window.removeEventListener("focus", savePhotoOnFocus);
return;
}
cameraPicture = picture;
// If not windows 10, call savePhoto() now. If windows 10, wait for the app to be in focus again
if (navigator.appVersion.indexOf('Windows Phone 10.0') < 0) {
savePhoto(cameraPicture, {
destinationType: destinationType,
targetHeight: targetHeight,
targetWidth: targetWidth,
encodingType: encodingType,
saveToPhotoAlbum: saveToPhotoAlbum
}, successCallback, errorCallback);
}
}, function () {
errorCallback("Fail to capture a photo.");
window.removeEventListener("focus", savePhotoOnFocus);
});
}
require("cordova/exec/proxy").add("Camera",module.exports);
});
Does anyone know how I can keep my image rotation in Windows?
In your CameraProxy.js make changes in orientationToRotation function line number 569.
case Windows.Devices.Sensors.SimpleOrientation.notRotated:
if (cameraDirection == 0) {
return Windows.Media.Capture.VideoRotation.clockwise90Degrees;
}
else {
return Windows.Media.Capture.VideoRotation.clockwise270Degrees;
}
For More Info you can refer this Solution