How to make Compass in nativescript when i am unable to find north direction? - nativescript

I am working on nativescript , but There are serious lack of libraries in it. I am not able to find north direction.
i have tried plugin
import * as geolocation from "nativescript-geolocation";
import { Accuracy } from "tns-core-modules/ui/enums";
test:function() {
var a = geolocation.getCurrentLocation({ desiredAccuracy: Accuracy.high, maximumAge: 5000, timeout: 20000 })
;
a.then( return_a => {
console.log("this------------------------------->",return_a);
//distance(return_a,);
var degree = angleFromCoordinate(return_a.latitude,return_a.longitude , 21.4225,39.8262);
console.log(degree);
this.gaugeValue = degree;//return_a.verticalAccuracy
});
}
});
function angleFromCoordinate( lat1, long1, lat2, long2) {
var dLon = (long2 - long1);
var y = Math.sin(dLon) * Math.cos(lat2);
var x = Math.cos(lat1) * Math.sin(lat2) - Math.sin(lat1)
* Math.cos(lat2) * Math.cos(dLon);
var brng = Math.atan2(y, x);
brng = brng * 180 / Math.PI;
brng = (brng + 360) % 360;
brng = 360 - brng; // count degrees counter-clockwise - remove to make clockwise
return brng;
}
There is direction property also available.but it is always -1 .
i am using Typescript , javascript as template language.

Here I found the solution. You need to do it manually.
import * as app from "tns-core-modules/application";
import { isAndroid, isIOS } from "tns-core-modules/platform";
declare const android: any;
declare const CLLocationManager: any;
export class MyClass {
private sensorUpdate: any;
private sensorManager: any;
startHeadingUpdates() {
if (this.sensorManager || this.sensorUpdate) {
return;
}
if (isIOS) {
this.sensorManager = CLLocationManager.alloc().init();
if (this.sensorManager.headingAvailable) {
this.sensorManager.startUpdatingHeading();
this.sensorUpdate = setInterval(() => {
console.log(this.sensorManager.heading.trueHeading);
}, 100);
} else {
console.log("Heading not available.");
}
return;
}
if (isAndroid) {
this.sensorManager = app.android.foregroundActivity.getSystemService(
android.content.Context.SENSOR_SERVICE
);
this.sensorUpdate = new android.hardware.SensorEventListener({
onAccuracyChanged: (sensor: any, accuracy: any) => {
// console.log(accuracy)
},
onSensorChanged: (event: any) => {
console.log(event.values[0]);
}
});
const orientationSensor = this.sensorManager.getDefaultSensor(
android.hardware.Sensor.TYPE_ORIENTATION
);
this.sensorManager.registerListener(
this.sensorUpdate,
orientationSensor,
android.hardware.SensorManager.SENSOR_DELAY_UI
);
}
}
stopUpdatingHeading() {
if (!this.sensorManager || !this.sensorUpdate) {
return;
}
if (isIOS) {
this.sensorManager.stopUpdatingHeading();
clearInterval(this.sensorUpdate);
this.sensorManager = null;
return;
}
if (isAndroid) {
this.sensorManager.unregisterListener(this.sensorUpdate);
this.sensorManager = null;
}
}
}

Related

Failed import using amCharts5 and Nuxt 3

I am trying to implement amCharts3 in Nuxt 3 and am getting the following error in the console:
"Uncaught SyntaxError: The requested module '/_nuxt/#fs/Users/[...]/ui/client/node_modules/regression/dist/regression.js?v=7015616a' does not provide an export named 'default' (at RegressionSeries.ts:5:8)".
Snapshot of RegressionSeries.ts file
The RegressionSeries.ts is imbedded in the amCharts module...amcharts5>.internal.charts.stock.drawing>RegressionSeries.js. Here the file which is the source of the error:
import { SimpleLineSeries } from "./SimpleLineSeries";
import regression from "regression";
export class RegressionSeries extends SimpleLineSeries {
constructor() {
super(...arguments);
Object.defineProperty(this, "_tag", {
enumerable: true,
configurable: true,
writable: true,
value: "regression"
});
}
_updateSegment(index) {
const diP1 = this._di[index]["p1"];
const diP2 = this._di[index]["p2"];
const series = this.get("series");
if (series && diP1 && diP2) {
const xAxis = series.get("xAxis");
let x1 = this._getXValue(diP1.get("valueX"));
let x2 = this._getXValue(diP2.get("valueX"));
const di1 = xAxis.getSeriesItem(series, xAxis.valueToPosition(x1));
const di2 = xAxis.getSeriesItem(series, xAxis.valueToPosition(x2));
const field = this.get("field") + "Y";
if (di1 && di2) {
const dataItems = series.dataItems;
let startIndex = dataItems.indexOf(di1);
let endIndex = dataItems.indexOf(di2);
let inversed = false;
if (startIndex > endIndex) {
inversed = true;
[startIndex, endIndex] = [endIndex, startIndex];
}
const points = [];
let ii = 0;
for (let i = startIndex; i <= endIndex; i++) {
const dataItem = dataItems[i];
points.push([ii, dataItem.get(field)]);
ii++;
}
const result = regression.linear(points);
const resultPoints = result.points;
const len = resultPoints.length;
if (len > 1) {
const p1 = resultPoints[0];
const p2 = resultPoints[resultPoints.length - 1];
if (p1 && p2) {
let y1 = p1[1];
let y2 = p2[1];
if (inversed) {
[y1, y2] = [y2, y1];
}
this._setContext(diP1, "valueY", y1, true);
this._setContext(diP2, "valueY", y2, true);
this._setContext(diP1, "valueX", x1);
this._setContext(diP2, "valueX", x2);
this._positionBullets(diP1);
this._positionBullets(diP2);
}
}
}
}
}
// need to override so that location would not be set
_setXLocation() {
}
}
Object.defineProperty(RegressionSeries, "className", {
enumerable: true,
configurable: true,
writable: true,
value: "RegressionSeries"
});
Object.defineProperty(RegressionSeries, "classNames", {
enumerable: true,
configurable: true,
writable: true,
value: SimpleLineSeries.classNames.concat([RegressionSeries.className])
});
//# sourceMappingURL=RegressionSeries.js.map
added the amcharts.client.ts plugin like this:
import * as am5 from '#amcharts/amcharts5'
import * as am5xy from '#amcharts/amcharts5/xy'
import * as am5radar from '#amcharts/amcharts5/radar'
import * as am5stock from '#amcharts/amcharts5/stock'
import am5themes_Animated from '#amcharts/amcharts5/themes/Animated'
export default defineNuxtPlugin(() => {
return {
provide: {
am5,
am5xy,
am5radar,
am5stock,
am5themes_Animated,
},
}
})
and other charts have worked, so I am fairly certain that the setup is correct.
For those interested:
I had amCharts5 set set up for transpiling in the NuxtConfig file. Removing it resolved the error.

Drag&Drop element into canvas from HTML

I've got a very trivial task to drag&drop the element from a gallery into the Three.js canvas.
Nothing seems tricky until I face the problem that when I drag the item and add it to the scene I cannot update the item's coordinates until I finish the drag&drop event
I already played around with all events that watch the mouse event mousemove, drag, dragover but the element is just stuck at the initial coordinates, the ones I applied in the dragenter event
export const params = {
devicePixelRatio: Math.min(window.devicePixelRatio, 2),
size: getSizeParams(),
grid: {
size: 20,
divisions: 20,
},
}
const itemProtos = ['Box', 'Sphere', 'Cone']
export const canvas = document.querySelector(`#canvas`)
const raycaster = new Raycaster()
const pointer = new Vector2()
const scene = new Scene()
const camera = new PerspectiveCamera(75, params.size.width / params.size.height, 0.1, 100)
camera.position.z = 5
camera.position.y = 2
/**
* Variable for Drag&Drop - just created object that's being moved around
*/
let newObjectType = null
let newObject = null
/**
* Groups
*/
export const itemGroup = new Group()
scene.add(itemGroup)
/**
* Grid
*/
export const gridHelper = new GridHelper(params.grid.size, params.grid.divisions)
scene.add(gridHelper)
/**
* Renderer
*/
const renderer = new WebGLRenderer({
canvas,
antialias: true,
})
renderer.setSize(params.size.width, params.size.height)
renderer.setPixelRatio(params.devicePixelRatio)
/**
* Resizing updates to fit the screen
*/
window.addEventListener('resize', () => {
params.size = getSizeParams()
camera.aspect = params.size.width / params.size.height
camera.updateProjectionMatrix()
renderer.setSize(params.size.width, params.size.height)
renderer.setPixelRatio()
})
canvas.addEventListener('mouseenter', () => {
canvas.style.cursor = 'grab'
})
/**
* Controls
*/
const orbitControls = new OrbitControls(camera, canvas)
orbitControls.enableDamping = true
orbitControls.addEventListener('start', () => {
canvas.style.cursor = 'grabbing'
})
orbitControls.addEventListener('end', () => {
canvas.style.cursor = 'grab'
})
const tick = () => {
orbitControls.update()
requestAnimationFrame(tick)
renderer.render(scene, camera)
}
window.onload = tick
/**
* Raycaster functions
*/
const refreshMouseCoords = (event) => {
pointer.x = (event.clientX / params.size.width) * 2 - 1
pointer.y = -(event.clientY / params.size.height) * 2 + 1
}
let currentIntersect = null
let currentPick = null
canvas.addEventListener('mousemove', (event) => {
refreshMouseCoords(event)
raycaster.setFromCamera(pointer, camera)
const intersects = raycaster.intersectObjects(itemGroup.children, false)
if(intersects.length && intersects[0].object instanceof Mesh) {
if(!currentIntersect) {
canvas.style.cursor = 'all-scroll'
intersects[0].object.material.color.set('red')
console.log(`mouse enter`)
}
if(currentIntersect && currentIntersect !== intersects[0].object) {
currentIntersect.material.color.set('blue')
intersects[0].object.material.color.set('red')
}
currentIntersect = intersects[0].object
} else {
if(currentIntersect) {
console.log(`mouse leave`)
currentIntersect.material.color.set('blue')
canvas.style.cursor = 'grab'
}
currentIntersect = null
}
moveItem(currentPick)
})
/**
* Function to move items around GridHelper
*/
const moveItem = (item) => {
const intersectsGround = raycaster.intersectObject(gridHelper, false)
if(item && intersectsGround[0]) {
item.position.z = intersectsGround[0].point.z
item.position.x = intersectsGround[0].point.x
}
}
canvas.addEventListener('mousedown', (event) => {
event.preventDefault()
refreshMouseCoords(event)
raycaster.setFromCamera(pointer, camera)
const intersects = raycaster.intersectObjects(itemGroup.children, false)
if(intersects.length && intersects[0].object instanceof Mesh) {
currentPick = intersects[0].object
}
if(currentIntersect) {
canvas.style.cursor = 'all-scroll'
orbitControls.enabled = false
}
})
canvas.addEventListener('mouseup', () => {
if(currentIntersect) {
canvas.style.cursor = 'all-scroll'
orbitControls.enabled = true
}
if(currentPick) {
currentPick = null
}
})
// *** Drag&Drop *** //
const gallery = document.querySelector(`#gallery`)
setGallery(itemProtos, gallery)
canvas.addEventListener('dragenter', (event) => {
event.preventDefault()
console.log('Drag&Drop: dragenter')
refreshMouseCoords(event)
raycaster.setFromCamera(pointer, camera)
const intersects = raycaster.intersectObject(gridHelper)
if (intersects.length && newObjectType) {
add3DEl(intersects[0].point, newObjectType, itemGroup)
}
})
canvas.addEventListener('dragover', (event) => {
event.preventDefault()
event.stopPropagation()
if(newObject) {
moveItem(newObject)
}
})
function setGallery(itemProtos, gallery) {
for (let i of itemProtos) {
const el = createProto()
gallery.appendChild(el)
el.addEventListener('dragstart', function (event) {
event.dataTransfer.setData('text/plain', i)
newObjectType = i
})
el.addEventListener('dragend', function () {
newObjectType = null
newObject = null
})
}
}
function add3DEl({ x, z }, type = 'Box', scene) {
const geometry = new itemObjects[`${type}Geometry`]()
const material = new MeshBasicMaterial({ color: 0x0000ff }) // Shared material for all items
material.wireframe = true
const el = new Mesh(geometry, material)
el.position.x = x
el.position.y = type == 'Sphere' ? 1 : .5
el.position.z = z
el.userData.name = `${type}_${Date.now()}`
newObject = el
scene.add(el)
}
Here you can find a playgroud with what I've got so far: Playground

Camera rotation and OrbitControls in react-three/fiber

I am trying to create a scene with react-three/fiber and react-three/drei. I want to use a PerspectiveCamera and be able to pan/zoom/rotate with the mouse, but I am also trying to add some buttons that can update the camera position and target in order to have different views (eg. top view, bottom view, side view, etc). I have achieved the latter part and my buttons seem to be working as I update the target x,y,z and position x,y,z using props.
The only problem is that the camera is not responding to the mouse so I only get a fixed camera position and target.
I have included all the scene codes below.
import React,{ useRef, useState, useEffect} from 'react'
import * as THREE from 'three';
import PropTypes from 'prop-types';
import { withRouter } from 'react-router-dom';
import { PerspectiveCamera, Icosahedron, OrbitControls } from '#react-three/drei'
import { Canvas, useThree } from "#react-three/fiber";
function VisualizationComponent(props) {
const width = window.innerWidth;
const height = window.innerHeight;
const [controls, setControls] = useState(null);
const [threeState, setThreeState] = useState(null);
const [treeStateInitialized, setThreeStateInitialized] = useState(false);
useEffect(()=>{
if(threeState){
_.forOwn(props.objects, (value, key) => {
threeState.scene.current.add(value);
});
}
return () => {
if(controls) controls.dispose();
}
},[])
function usePrevious(value) {
const ref = useRef();
useEffect(() => {
ref.current = value;
});
return ref.current;
}
const { objects } = props
const prevState = usePrevious({objects});
const mainCamera = useRef();
useEffect(() => {
if(!threeState) return;
if (
!treeStateInitialized ||
shouldUpdateObjects(props.objects, prevState.objects)
) {
setThreeStateInitialized(true);
garbageCollectOldObjects();
addDefaultObjects();
_.forOwn(props.objects, (value, key) => {
threeState.scene.add(value);
});
}
})
const addDefaultObjects = () => {
if (threeState) {
var hemiLight = new THREE.HemisphereLight( 0xffffbb, 0x080820, 0.2 );
hemiLight.position.set( 0, 0, 1 );
threeState.scene.add( hemiLight );
}
}
const garbageCollectOldObjects = () => {
while (threeState && threeState.scene.children.length) {
const oldObject = threeState.scene.children[0];
oldObject.traverse((child) => {
if (child.geometry) {
child.geometry?.dispose();
if(child.material && Array.isArray(child.material)){
child.material.forEach(d => d.dispose());
}else{
child.material?.dispose();
}
}
});
threeState.scene.remove(oldObject);
}
}
const shouldUpdateObjects = (currentObjects,nextObjects) => {
const result = false;
let currentDigest = 1;
let nextDigest = 1;
_.forIn(currentObjects, (value, key) => {
currentDigest *= value.id;
});
_.forIn(nextObjects, (value, key) => {
nextDigest *= value.id;
});
return currentDigest !== nextDigest;
}
const hasAncestorWhichDisablesThreeJs = (element) => {
if (!element) return false;
let isEditable = false;
for (let i = 0; i < element.classList.length; i++) {
if (element.classList[i] === 'disable-threejs-controls') {
isEditable = true;
}
}
return isEditable ||
hasAncestorWhichDisablesThreeJs(element.parentElement);
}
const initializeScene = (state) => {
setThreeState(state);
addDefaultObjects();
}
return (
<div
id="threejs-controllers-div"
className='threejs-container'
onMouseOver={ (e) => {
const target = e.target;
if (!target || !controls) return true;
if (hasAncestorWhichDisablesThreeJs(target)) {
controls.enabled = false;
} else {
controls.enabled = true;
}
} }
>
<Canvas
className='threejs'
onCreated={ (state) => {initializeScene(state)}}
shadows={true}
gl={
{
'shadowMap.enabled' : true,
'alpha' : true
}
}
>
<PerspectiveCamera
makeDefault
ref={mainCamera}
position-x={props.cameraX || 0}
position-y={props.cameraY || -20}
position-z={props.cameraZ || 20}
up={[0, 0, 1]}
fov={ 15 }
aspect={ width / height }
near={ 1 }
far={ 10000 }
visible={false}
controls={controls}
/>
<OrbitControls
ref={controls}
camera={mainCamera.current}
domElement={document.getElementById("threejs-controllers-div")}
enabled={true}
enablePan={true}
enableZoom={true}
enableRotate={true}
target-x={props.targetX || 0}
target-y={props.targetY || 0}
target-z={props.targetZ || 0}
/>
</Canvas>
<div className='threejs-react-container'>
{ props.children }
</div>
</div>
)
}
VisualizationComponent.propTypes = {
children: PropTypes.node.isRequired,
objects: PropTypes.object.isRequired,
cameraX: PropTypes.number,
cameraY: PropTypes.number,
cameraZ: PropTypes.number,
targetX: PropTypes.number,
targetY: PropTypes.number,
targetZ: PropTypes.number,
};
export default withRouter(VisualizationComponent);

Ionic3 Gesture pinch pan and rotate together

I want to create a draggable / resizable / rotatable component in Ionic2.pan and pinch events are working great, but rotate has a strange behaviour: if I touch the component with two fingers, but without doing any kind of rotation, I will still get a rotation number around 15 to 30 deg, making the component rotate. I don't know if it is a known issue or something to do with the sensitivity of the screen. The code I am using for the component is this:
import { Component, ElementRef, Input, Renderer2 } from '#angular/core';
import { DomController, Gesture } from 'ionic-angular';
const defaultScale: number = 1;
const defaultRotation: number = 0;
#Component({
selector: 'draggable',
template: `
<ng-content></ng-content>
`
})
export class DraggableComponent {
#Input()
private position: {
x: number;
y: number;
};
#Input()
private dimensions: {
width: number;
height: number;
};
#Input()
private transform: {
scale: number;
rotation: number;
};
#Input()
protected container: any;
private gesture: Gesture;
private deltaCenter: {
x: number;
y: number;
} = null;
// when pinch + rotate, we will have very quick successive event when we release
private updating: boolean = false;
constructor(
private element: ElementRef,
private renderer: Renderer2,
private domCtrl: DomController
) {}
ngOnDestroy() {
this.gesture.destroy();
}
ngAfterViewInit() {
this.renderer.setStyle(this.element.nativeElement, 'position', 'absolute');
this.renderer.setStyle(this.element.nativeElement, 'transform-origin', 'center');
if (this.dimensions) {
if (this.dimensions.width) {
this.renderer.setStyle(this.element.nativeElement, 'width', this.dimensions.width + 'px');
}
if (this.dimensions.height) {
this.renderer.setStyle(this.element.nativeElement, 'height', this.dimensions.height + 'px');
}
}
if (!this.transform) {
this.transform = {
scale: 1,
rotation: 0
};
}
this.gesture = new Gesture(this.element.nativeElement);
this.gesture.listen();
this.gesture.on('pinch', this.handleGesture.bind(this));
this.gesture.on('rotate', this.handleGesture.bind(this));
this.gesture.on('panmove', this.handleGesture.bind(this));
this.gesture.on('pinchend panend rotateend', this.gestureEnd.bind(this));
this.updateStyles();
}
private handleGesture(event: {center: {y: number, x: number}, scale: number, rotation: number}) {
if (this.updating) {
return;
}
// even without doing any kind of rotation, using 2 fingers will set event.rotation between 15 to 30 degrees
if (!this.deltaCenter) {
this.deltaCenter = {
y: this.position.y - event.center.y,
x: this.position.x - event.center.x
};
}
this.position.y = event.center.y;
this.position.x = event.center.x;
this.updateStyles(event.scale, event.rotation);
}
private gestureEnd(event: {scale: number, rotation: number}) {
if (this.updating) {
return;
}
this.updating = true;
this.position.y += this.deltaCenter.y;
this.position.x += this.deltaCenter.x;
this.transform.scale = this.transform.scale * event.scale;
this.transform.rotation = this.transform.rotation + event.rotation;
this.deltaCenter = null;
this.updateStyles();
setTimeout(() => {
this.updating = false;
}, 100);
}
private get cntEle(): HTMLElement {
let cntEle: HTMLElement = null;
if (!this.container) {
return null;
}
else if (this.container instanceof Node) {
return this.container as HTMLElement;
}
else if (this.container.getNativeElement) {
return this.container.getNativeElement();
}
return null;
}
private get containerBoundingClientRect(): ClientRect {
if (this.cntEle) {
return this.cntEle.getBoundingClientRect();
}
else if (this.container && 'top' in this.container) {
return this.container as ClientRect;
}
// bound to whole document
return {
top: 0,
left: 0,
bottom: document.documentElement.clientHeight,
right: document.documentElement.clientWidth,
width: document.documentElement.clientWidth,
height: document.documentElement.clientHeight
};
}
private get x(): number {
let x = this.position.x;
if (this.deltaCenter) {
x += this.deltaCenter.x;
}
if (x < this.containerBoundingClientRect.left) {
return this.containerBoundingClientRect.left;
}
else if (x > (this.containerBoundingClientRect.right - this.dimensions.width)) {
return this.containerBoundingClientRect.right - this.dimensions.width;
}
return x
}
private get y(): number {
let y = this.position.y;
if (this.deltaCenter) {
y += this.deltaCenter.y;
}
if (y < this.containerBoundingClientRect.top) {
return this.containerBoundingClientRect.top;
}
if (y > (this.containerBoundingClientRect.bottom - this.dimensions.height)) {
return this.containerBoundingClientRect.bottom - this.dimensions.height;
}
return y;
}
private updateStyles(scale: number = 1, rotation: number = 0) {
this.domCtrl.write(() => {
this.renderer.setStyle(this.element.nativeElement, 'top', this.y + 'px');
this.renderer.setStyle(this.element.nativeElement, 'left', this.x + 'px');
let transforms = [];
transforms.push(`scale(${this.transform.scale * scale})`);
transforms.push(`rotateZ(${this.transform.rotation + rotation}deg)`);
this.renderer.setStyle(this.element.nativeElement, 'transform', transforms.join(' '));
});
}
}

Image gets rotated 90 degrees when taking portrait photo

When I'm taking an photo with my Windows Phone the landscape mode, it's perfect. The problem occurs when I'm taking a photo in portrait mode.
The photo gets rotated 90 degrees. It even occurs in the simulator as shown below.
Now this doesn't occur on Android or iOS so I assume this is because Windows is using the CameraProxy.js instead of/from cordova-plugin-camera.
My entire CameraProxy.js (Giant file, does contain 'rotate' stuff but method names are only about videos)
cordova.define("cordova-plugin-camera.CameraProxy", function(require, exports, module) {
var Camera = require('./Camera');
var getAppData = function () {
return Windows.Storage.ApplicationData.current;
};
var encodeToBase64String = function (buffer) {
return Windows.Security.Cryptography.CryptographicBuffer.encodeToBase64String(buffer);
};
var OptUnique = Windows.Storage.CreationCollisionOption.generateUniqueName;
var CapMSType = Windows.Media.Capture.MediaStreamType;
var webUIApp = Windows.UI.WebUI.WebUIApplication;
var fileIO = Windows.Storage.FileIO;
var pickerLocId = Windows.Storage.Pickers.PickerLocationId;
module.exports = {
// args will contain :
// ... it is an array, so be careful
// 0 quality:50,
// 1 destinationType:Camera.DestinationType.FILE_URI,
// 2 sourceType:Camera.PictureSourceType.CAMERA,
// 3 targetWidth:-1,
// 4 targetHeight:-1,
// 5 encodingType:Camera.EncodingType.JPEG,
// 6 mediaType:Camera.MediaType.PICTURE,
// 7 allowEdit:false,
// 8 correctOrientation:false,
// 9 saveToPhotoAlbum:false,
// 10 popoverOptions:null
// 11 cameraDirection:0
takePicture: function (successCallback, errorCallback, args) {
var sourceType = args[2];
if (sourceType != Camera.PictureSourceType.CAMERA) {
takePictureFromFile(successCallback, errorCallback, args);
} else {
takePictureFromCamera(successCallback, errorCallback, args);
}
}
};
// https://msdn.microsoft.com/en-us/library/windows/apps/ff462087(v=vs.105).aspx
var windowsVideoContainers = [".avi", ".flv", ".asx", ".asf", ".mov", ".mp4", ".mpg", ".rm", ".srt", ".swf", ".wmv", ".vob"];
var windowsPhoneVideoContainers = [".avi", ".3gp", ".3g2", ".wmv", ".3gp", ".3g2", ".mp4", ".m4v"];
// Default aspect ratio 1.78 (16:9 hd video standard)
var DEFAULT_ASPECT_RATIO = '1.8';
// Highest possible z-index supported across browsers. Anything used above is converted to this value.
var HIGHEST_POSSIBLE_Z_INDEX = 2147483647;
// Resize method
function resizeImage(successCallback, errorCallback, file, targetWidth, targetHeight, encodingType) {
var tempPhotoFileName = "";
var targetContentType = "";
if (encodingType == Camera.EncodingType.PNG) {
tempPhotoFileName = "camera_cordova_temp_return.png";
targetContentType = "image/png";
} else {
tempPhotoFileName = "camera_cordova_temp_return.jpg";
targetContentType = "image/jpeg";
}
var storageFolder = getAppData().localFolder;
file.copyAsync(storageFolder, file.name, Windows.Storage.NameCollisionOption.replaceExisting)
.then(function (storageFile) {
return fileIO.readBufferAsync(storageFile);
})
.then(function(buffer) {
var strBase64 = encodeToBase64String(buffer);
var imageData = "data:" + file.contentType + ";base64," + strBase64;
var image = new Image();
image.src = imageData;
image.onload = function() {
var ratio = Math.min(targetWidth / this.width, targetHeight / this.height);
var imageWidth = ratio * this.width;
var imageHeight = ratio * this.height;
var canvas = document.createElement('canvas');
var storageFileName;
canvas.width = imageWidth;
canvas.height = imageHeight;
canvas.getContext("2d").drawImage(this, 0, 0, imageWidth, imageHeight);
var fileContent = canvas.toDataURL(targetContentType).split(',')[1];
var storageFolder = getAppData().localFolder;
storageFolder.createFileAsync(tempPhotoFileName, OptUnique)
.then(function (storagefile) {
var content = Windows.Security.Cryptography.CryptographicBuffer.decodeFromBase64String(fileContent);
storageFileName = storagefile.name;
return fileIO.writeBufferAsync(storagefile, content);
})
.done(function () {
successCallback("ms-appdata:///local/" + storageFileName);
}, errorCallback);
};
})
.done(null, function(err) {
errorCallback(err);
}
);
}
function takePictureFromFile(successCallback, errorCallback, args) {
// Detect Windows Phone
if (navigator.appVersion.indexOf('Windows Phone 8.1') >= 0) {
takePictureFromFileWP(successCallback, errorCallback, args);
} else {
takePictureFromFileWindows(successCallback, errorCallback, args);
}
}
function takePictureFromFileWP(successCallback, errorCallback, args) {
var mediaType = args[6],
destinationType = args[1],
targetWidth = args[3],
targetHeight = args[4],
encodingType = args[5];
var filePickerActivationHandler = function(eventArgs) {
if (eventArgs.kind === Windows.ApplicationModel.Activation.ActivationKind.pickFileContinuation) {
var file = eventArgs.files[0];
if (!file) {
errorCallback("User didn't choose a file.");
webUIApp.removeEventListener("activated", filePickerActivationHandler);
return;
}
if (destinationType == Camera.DestinationType.FILE_URI || destinationType == Camera.DestinationType.NATIVE_URI) {
if (targetHeight > 0 && targetWidth > 0) {
resizeImage(successCallback, errorCallback, file, targetWidth, targetHeight, encodingType);
}
else {
var storageFolder = getAppData().localFolder;
file.copyAsync(storageFolder, file.name, Windows.Storage.NameCollisionOption.replaceExisting).done(function (storageFile) {
if(destinationType == Camera.DestinationType.NATIVE_URI) {
successCallback("ms-appdata:///local/" + storageFile.name);
}
else {
successCallback(URL.createObjectURL(storageFile));
}
}, function () {
errorCallback("Can't access localStorage folder.");
});
}
}
else {
if (targetHeight > 0 && targetWidth > 0) {
resizeImageBase64(successCallback, errorCallback, file, targetWidth, targetHeight);
} else {
fileIO.readBufferAsync(file).done(function (buffer) {
var strBase64 =encodeToBase64String(buffer);
successCallback(strBase64);
}, errorCallback);
}
}
webUIApp.removeEventListener("activated", filePickerActivationHandler);
}
};
var fileOpenPicker = new Windows.Storage.Pickers.FileOpenPicker();
if (mediaType == Camera.MediaType.PICTURE) {
fileOpenPicker.fileTypeFilter.replaceAll([".png", ".jpg", ".jpeg"]);
fileOpenPicker.suggestedStartLocation = pickerLocId.picturesLibrary;
}
else if (mediaType == Camera.MediaType.VIDEO) {
fileOpenPicker.fileTypeFilter.replaceAll(windowsPhoneVideoContainers);
fileOpenPicker.suggestedStartLocation = pickerLocId.videosLibrary;
}
else {
fileOpenPicker.fileTypeFilter.replaceAll(["*"]);
fileOpenPicker.suggestedStartLocation = pickerLocId.documentsLibrary;
}
webUIApp.addEventListener("activated", filePickerActivationHandler);
fileOpenPicker.pickSingleFileAndContinue();
}
function takePictureFromFileWindows(successCallback, errorCallback, args) {
var mediaType = args[6],
destinationType = args[1],
targetWidth = args[3],
targetHeight = args[4],
encodingType = args[5];
var fileOpenPicker = new Windows.Storage.Pickers.FileOpenPicker();
if (mediaType == Camera.MediaType.PICTURE) {
fileOpenPicker.fileTypeFilter.replaceAll([".png", ".jpg", ".jpeg"]);
fileOpenPicker.suggestedStartLocation = pickerLocId.picturesLibrary;
}
else if (mediaType == Camera.MediaType.VIDEO) {
fileOpenPicker.fileTypeFilter.replaceAll(windowsVideoContainers);
fileOpenPicker.suggestedStartLocation = pickerLocId.videosLibrary;
}
else {
fileOpenPicker.fileTypeFilter.replaceAll(["*"]);
fileOpenPicker.suggestedStartLocation = pickerLocId.documentsLibrary;
}
fileOpenPicker.pickSingleFileAsync().done(function (file) {
if (!file) {
errorCallback("User didn't choose a file.");
return;
}
if (destinationType == Camera.DestinationType.FILE_URI || destinationType == Camera.DestinationType.NATIVE_URI) {
if (targetHeight > 0 && targetWidth > 0) {
resizeImage(successCallback, errorCallback, file, targetWidth, targetHeight, encodingType);
}
else {
var storageFolder = getAppData().localFolder;
file.copyAsync(storageFolder, file.name, Windows.Storage.NameCollisionOption.replaceExisting).done(function (storageFile) {
if(destinationType == Camera.DestinationType.NATIVE_URI) {
successCallback("ms-appdata:///local/" + storageFile.name);
}
else {
successCallback(URL.createObjectURL(storageFile));
}
}, function () {
errorCallback("Can't access localStorage folder.");
});
}
}
else {
if (targetHeight > 0 && targetWidth > 0) {
resizeImageBase64(successCallback, errorCallback, file, targetWidth, targetHeight);
} else {
fileIO.readBufferAsync(file).done(function (buffer) {
var strBase64 =encodeToBase64String(buffer);
successCallback(strBase64);
}, errorCallback);
}
}
}, function () {
errorCallback("User didn't choose a file.");
});
}
function takePictureFromCamera(successCallback, errorCallback, args) {
// Check if necessary API available
if (!Windows.Media.Capture.CameraCaptureUI) {
takePictureFromCameraWP(successCallback, errorCallback, args);
} else {
takePictureFromCameraWindows(successCallback, errorCallback, args);
}
}
function takePictureFromCameraWP(successCallback, errorCallback, args) {
// We are running on WP8.1 which lacks CameraCaptureUI class
// so we need to use MediaCapture class instead and implement custom UI for camera
var destinationType = args[1],
targetWidth = args[3],
targetHeight = args[4],
encodingType = args[5],
saveToPhotoAlbum = args[9],
cameraDirection = args[11],
capturePreview = null,
cameraCaptureButton = null,
cameraCancelButton = null,
capture = null,
captureSettings = null,
CaptureNS = Windows.Media.Capture,
sensor = null;
}
function continueVideoOnFocus() {
// if preview is defined it would be stuck, play it
if (capturePreview) {
capturePreview.play();
}
}
function startCameraPreview() {
// Search for available camera devices
// This is necessary to detect which camera (front or back) we should use
var DeviceEnum = Windows.Devices.Enumeration;
var expectedPanel = cameraDirection === 1 ? DeviceEnum.Panel.front : DeviceEnum.Panel.back;
// Add focus event handler to capture the event when user suspends the app and comes back while the preview is on
window.addEventListener("focus", continueVideoOnFocus);
DeviceEnum.DeviceInformation.findAllAsync(DeviceEnum.DeviceClass.videoCapture).then(function (devices) {
if (devices.length <= 0) {
destroyCameraPreview();
errorCallback('Camera not found');
return;
}
devices.forEach(function(currDev) {
if (currDev.enclosureLocation.panel && currDev.enclosureLocation.panel == expectedPanel) {
captureSettings.videoDeviceId = currDev.id;
}
});
captureSettings.photoCaptureSource = Windows.Media.Capture.PhotoCaptureSource.photo;
return capture.initializeAsync(captureSettings);
}).then(function () {
// create focus control if available
var VideoDeviceController = capture.videoDeviceController;
var FocusControl = VideoDeviceController.focusControl;
if (FocusControl.supported === true) {
capturePreview.addEventListener('click', function () {
// Make sure function isn't called again before previous focus is completed
if (this.getAttribute('clicked') === '1') {
return false;
} else {
this.setAttribute('clicked', '1');
}
var preset = Windows.Media.Devices.FocusPreset.autoNormal;
var parent = this;
FocusControl.setPresetAsync(preset).done(function () {
// set the clicked attribute back to '0' to allow focus again
parent.setAttribute('clicked', '0');
});
});
}
// msdn.microsoft.com/en-us/library/windows/apps/hh452807.aspx
capturePreview.msZoom = true;
capturePreview.src = URL.createObjectURL(capture);
capturePreview.play();
// Bind events to controls
sensor = Windows.Devices.Sensors.SimpleOrientationSensor.getDefault();
if (sensor !== null) {
sensor.addEventListener("orientationchanged", onOrientationChange);
}
// add click events to capture and cancel buttons
cameraCaptureButton.addEventListener('click', onCameraCaptureButtonClick);
cameraCancelButton.addEventListener('click', onCameraCancelButtonClick);
// Change default orientation
if (sensor) {
setPreviewRotation(sensor.getCurrentOrientation());
} else {
setPreviewRotation(Windows.Graphics.Display.DisplayInformation.getForCurrentView().currentOrientation);
}
// Get available aspect ratios
var aspectRatios = getAspectRatios(capture);
// Couldn't find a good ratio
if (aspectRatios.length === 0) {
destroyCameraPreview();
errorCallback('There\'s not a good aspect ratio available');
return;
}
// add elements to body
document.body.appendChild(capturePreview);
document.body.appendChild(cameraCaptureButton);
document.body.appendChild(cameraCancelButton);
if (aspectRatios.indexOf(DEFAULT_ASPECT_RATIO) > -1) {
return setAspectRatio(capture, DEFAULT_ASPECT_RATIO);
} else {
// Doesn't support 16:9 - pick next best
return setAspectRatio(capture, aspectRatios[0]);
}
}).done(null, function (err) {
destroyCameraPreview();
errorCallback('Camera intitialization error ' + err);
});
}
function destroyCameraPreview() {
// If sensor is available, remove event listener
if (sensor !== null) {
sensor.removeEventListener('orientationchanged', onOrientationChange);
}
// Pause and dispose preview element
capturePreview.pause();
capturePreview.src = null;
// Remove event listeners from buttons
cameraCaptureButton.removeEventListener('click', onCameraCaptureButtonClick);
cameraCancelButton.removeEventListener('click', onCameraCancelButtonClick);
// Remove the focus event handler
window.removeEventListener("focus", continueVideoOnFocus);
// Remove elements
[capturePreview, cameraCaptureButton, cameraCancelButton].forEach(function (elem) {
if (elem /* && elem in document.body.childNodes */) {
document.body.removeChild(elem);
}
});
// Stop and dispose media capture manager
if (capture) {
capture.stopRecordAsync();
capture = null;
}
}
function getAspectRatios(capture) {
var videoDeviceController = capture.videoDeviceController;
var photoAspectRatios = videoDeviceController.getAvailableMediaStreamProperties(CapMSType.photo).map(function (element) {
return (element.width / element.height).toFixed(1);
}).filter(function (element, index, array) { return (index === array.indexOf(element)); });
var videoAspectRatios = videoDeviceController.getAvailableMediaStreamProperties(CapMSType.videoRecord).map(function (element) {
return (element.width / element.height).toFixed(1);
}).filter(function (element, index, array) { return (index === array.indexOf(element)); });
var videoPreviewAspectRatios = videoDeviceController.getAvailableMediaStreamProperties(CapMSType.videoPreview).map(function (element) {
return (element.width / element.height).toFixed(1);
}).filter(function (element, index, array) { return (index === array.indexOf(element)); });
var allAspectRatios = [].concat(photoAspectRatios, videoAspectRatios, videoPreviewAspectRatios);
var aspectObj = allAspectRatios.reduce(function (map, item) {
if (!map[item]) {
map[item] = 0;
}
map[item]++;
return map;
}, {});
return Object.keys(aspectObj).filter(function (k) {
return aspectObj[k] === 3;
});
}
function setAspectRatio(capture, aspect) {
// Max photo resolution with desired aspect ratio
var videoDeviceController = capture.videoDeviceController;
var photoResolution = videoDeviceController.getAvailableMediaStreamProperties(CapMSType.photo)
.filter(function (elem) {
return ((elem.width / elem.height).toFixed(1) === aspect);
})
.reduce(function (prop1, prop2) {
return (prop1.width * prop1.height) > (prop2.width * prop2.height) ? prop1 : prop2;
});
// Max video resolution with desired aspect ratio
var videoRecordResolution = videoDeviceController.getAvailableMediaStreamProperties(CapMSType.videoRecord)
.filter(function (elem) {
return ((elem.width / elem.height).toFixed(1) === aspect);
})
.reduce(function (prop1, prop2) {
return (prop1.width * prop1.height) > (prop2.width * prop2.height) ? prop1 : prop2;
});
// Max video preview resolution with desired aspect ratio
var videoPreviewResolution = videoDeviceController.getAvailableMediaStreamProperties(CapMSType.videoPreview)
.filter(function (elem) {
return ((elem.width / elem.height).toFixed(1) === aspect);
})
.reduce(function (prop1, prop2) {
return (prop1.width * prop1.height) > (prop2.width * prop2.height) ? prop1 : prop2;
});
return videoDeviceController.setMediaStreamPropertiesAsync(CapMSType.photo, photoResolution)
.then(function () {
return videoDeviceController.setMediaStreamPropertiesAsync(CapMSType.videoPreview, videoPreviewResolution);
})
.then(function () {
return videoDeviceController.setMediaStreamPropertiesAsync(CapMSType.videoRecord, videoRecordResolution);
});
}
/**
* When the phone orientation change, get the event and change camera preview rotation
* #param {Object} e - SimpleOrientationSensorOrientationChangedEventArgs
*/
function onOrientationChange(e) {
setPreviewRotation(e.orientation);
}
/**
* Converts SimpleOrientation to a VideoRotation to remove difference between camera sensor orientation
* and video orientation
* #param {number} orientation - Windows.Devices.Sensors.SimpleOrientation
* #return {number} - Windows.Media.Capture.VideoRotation
*/
function orientationToRotation(orientation) {
// VideoRotation enumerable and BitmapRotation enumerable have the same values
// https://msdn.microsoft.com/en-us/library/windows/apps/windows.media.capture.videorotation.aspx
// https://msdn.microsoft.com/en-us/library/windows/apps/windows.graphics.imaging.bitmaprotation.aspx
switch (orientation) {
// portrait
case Windows.Devices.Sensors.SimpleOrientation.notRotated:
return Windows.Media.Capture.VideoRotation.clockwise90Degrees;
// landscape
case Windows.Devices.Sensors.SimpleOrientation.rotated90DegreesCounterclockwise:
return Windows.Media.Capture.VideoRotation.none;
// portrait-flipped (not supported by WinPhone Apps)
case Windows.Devices.Sensors.SimpleOrientation.rotated180DegreesCounterclockwise:
// Falling back to portrait default
return Windows.Media.Capture.VideoRotation.clockwise90Degrees;
// landscape-flipped
case Windows.Devices.Sensors.SimpleOrientation.rotated270DegreesCounterclockwise:
return Windows.Media.Capture.VideoRotation.clockwise180Degrees;
// faceup & facedown
default:
// Falling back to portrait default
return Windows.Media.Capture.VideoRotation.clockwise90Degrees;
}
}
/**
* Rotates the current MediaCapture's video
* #param {number} orientation - Windows.Devices.Sensors.SimpleOrientation
*/
function setPreviewRotation(orientation) {
capture.setPreviewRotation(orientationToRotation(orientation));
}
try {
createCameraUI();
startCameraPreview();
} catch (ex) {
errorCallback(ex);
}
}
function takePictureFromCameraWindows(successCallback, errorCallback, args) {
var destinationType = args[1],
targetWidth = args[3],
targetHeight = args[4],
encodingType = args[5],
allowCrop = !!args[7],
saveToPhotoAlbum = args[9],
WMCapture = Windows.Media.Capture,
cameraCaptureUI = new WMCapture.CameraCaptureUI();
cameraCaptureUI.photoSettings.allowCropping = allowCrop;
if (encodingType == Camera.EncodingType.PNG) {
cameraCaptureUI.photoSettings.format = WMCapture.CameraCaptureUIPhotoFormat.png;
} else {
cameraCaptureUI.photoSettings.format = WMCapture.CameraCaptureUIPhotoFormat.jpeg;
}
// decide which max pixels should be supported by targetWidth or targetHeight.
var maxRes = null;
var UIMaxRes = WMCapture.CameraCaptureUIMaxPhotoResolution;
var totalPixels = targetWidth * targetHeight;
if (targetWidth == -1 && targetHeight == -1) {
maxRes = UIMaxRes.highestAvailable;
}
// Temp fix for CB-10539
/*else if (totalPixels <= 320 * 240) {
maxRes = UIMaxRes.verySmallQvga;
}*/
else if (totalPixels <= 640 * 480) {
maxRes = UIMaxRes.smallVga;
} else if (totalPixels <= 1024 * 768) {
maxRes = UIMaxRes.mediumXga;
} else if (totalPixels <= 3 * 1000 * 1000) {
maxRes = UIMaxRes.large3M;
} else if (totalPixels <= 5 * 1000 * 1000) {
maxRes = UIMaxRes.veryLarge5M;
} else {
maxRes = UIMaxRes.highestAvailable;
}
cameraCaptureUI.photoSettings.maxResolution = maxRes;
var cameraPicture;
// define focus handler for windows phone 10.0
var savePhotoOnFocus = function () {
window.removeEventListener("focus", savePhotoOnFocus);
// call only when the app is in focus again
savePhoto(cameraPicture, {
destinationType: destinationType,
targetHeight: targetHeight,
targetWidth: targetWidth,
encodingType: encodingType,
saveToPhotoAlbum: saveToPhotoAlbum
}, successCallback, errorCallback);
};
cameraCaptureUI.captureFileAsync(WMCapture.CameraCaptureUIMode.photo).done(function (picture) {
if (!picture) {
errorCallback("User didn't capture a photo.");
// Remove the focus handler if present
window.removeEventListener("focus", savePhotoOnFocus);
return;
}
cameraPicture = picture;
// If not windows 10, call savePhoto() now. If windows 10, wait for the app to be in focus again
if (navigator.appVersion.indexOf('Windows Phone 10.0') < 0) {
savePhoto(cameraPicture, {
destinationType: destinationType,
targetHeight: targetHeight,
targetWidth: targetWidth,
encodingType: encodingType,
saveToPhotoAlbum: saveToPhotoAlbum
}, successCallback, errorCallback);
}
}, function () {
errorCallback("Fail to capture a photo.");
window.removeEventListener("focus", savePhotoOnFocus);
});
}
require("cordova/exec/proxy").add("Camera",module.exports);
});
Does anyone know how I can keep my image rotation in Windows?
In your CameraProxy.js make changes in orientationToRotation function line number 569.
case Windows.Devices.Sensors.SimpleOrientation.notRotated:
if (cameraDirection == 0) {
return Windows.Media.Capture.VideoRotation.clockwise90Degrees;
}
else {
return Windows.Media.Capture.VideoRotation.clockwise270Degrees;
}
For More Info you can refer this Solution

Resources