Electron check if a window is already open and close before creating - windows

Is it possible to detect in electron if window is already created and close before creating another one?
here is my sample code
// video window listener
ipcMain.on("load-video-window", (event, data) => {
// create the window
//window.close() if window exist;
let videoPlayer = new BrowserWindow({
show: true,
width: 840,
height: 622,
webPreferences: {
nodeIntegration: true,
plugins: true,
},
});
if (process.env.WEBPACK_DEV_SERVER_URL) {
// Load the url of the dev server if in development mode
videoPlayer.loadURL(
process.env.WEBPACK_DEV_SERVER_URL + "video_player.html"
);
if (!process.env.IS_TEST) videoPlayer.webContents.openDevTools();
} else {
videoPlayer.loadURL(`app://./video_player`);
}
videoPlayer.on("closed", () => {
videoPlayer = null;
});
// here we can send the data to the new window
videoPlayer.webContents.on("did-finish-load", () => {
videoPlayer.webContents.send("data", data);
});
});

I think this should work
let playerWindow;
ipcMain.on("load-video-window", (event, data) => {
if (playerWindow) {
playerWindow.close();
}
playerWindow = new BrowserWindow();
});

Extending #Lord Midi code, we can check to see if a window is not destroyed and is still focusable. You can do that with the following code:
let playerWindow;
const isPlayerWindowOpened = () => !playerWindow?.isDestroyed() && playerWindow?.isFocusable();
ipcMain.on("load-video-window", (event, data) => {
if (isPlayerWindowOpened()) {
playerWindow.close();
}
playerWindow = new BrowserWindow();
})

Related

Restoring Main Window on Electron JS

I created an app (done for OSX) that will send a message even when the main window is hidden, currently my main.js is like this:
const { app, shell, BrowserWindow, dialog } = require('electron')
const path = require('path')
const electron = require('electron')
// Enable live reload for all the files inside your project directory
require('electron-reload')(__dirname);
let win = null
function createWindow () {
win = new BrowserWindow({
width: 420,
height: 420,
resizable: false,
fullscreenable: false,
webPreferences: {
preload: path.join(__dirname, 'preload.js'),
nodeIntegration: true
}
})
win.on('close', (event) => {
if (app.quitting) {
win = null
} else {
event.preventDefault()
win.hide()
}
})
win.loadFile('index.html')
win.webContents.on('new-window', function(e, url) {
// make sure local urls stay in electron perimeter
if('file://' === url.substr(0, 'file://'.length)) {
return;
}
// and open every other protocols on the browser
e.preventDefault();
shell.openExternal(url);
});
}
app.whenReady().then(() => {
createWindow()
app.on('activate', () => {
if (BrowserWindow.getAllWindows().length === 0) {
createWindow()
}
})
})
app.on('window-all-closed', () => {
if (process.platform !== 'darwin') {
app.quit()
}
})
app.on('activate', () => { win.show() })
app.on('before-quit', () => app.quitting = true)
The idea is that when the user try to close the app it will just hide it. I'll need to add something that at certain hours of the day the app will launch the main window again.
Is there a way to make the main window be reopened or to run a code in the background so at certain time of the day the window will be unhidden?
Sure....this will reopen your minimized window
if (win) {
if (win.isMinimized()) win.restore()
win.focus()
}

Nativescript angular keep app playing in background

[nativescript] How do I keep a native script - angular app running in the background in IOS. I have a button that opens the in-app browser and there I have a webpage that plays music, I'd like the music to carry on playing when the user leaves the application.
here is my component.ts
export class BrowseComponent implements OnInit {
openLink = async () => {
try {
const url = ''
if (await InAppBrowser.isAvailable()) {
const result = await InAppBrowser.open(url, {
// iOS Properties
dismissButtonStyle: 'done',
preferredBarTintColor: '#3e00be',
preferredControlTintColor: 'white',
readerMode: false,
animated: true,
modalPresentationStyle: 'fullScreen',
modalTransitionStyle: 'crossDissolve',
modalEnabled: true,
enableBarCollapsing: false,
// Android Properties
showTitle: true,
toolbarColor: '#3e00be',
secondaryToolbarColor: 'white',
enableUrlBarHiding: true,
enableDefaultShare: true,
forceCloseOnRedirection: false,
// Specify full animation resource identifier(package:anim/name)
// or only resource name(in case of animation bundled with app).
animations: {
startEnter: 'slide_in_right',
startExit: 'slide_out_left',
endEnter: 'slide_in_left',
endExit: 'slide_out_right'
},
headers: {
'my-custom-header': 'my custom header value'
}
})
// alert({
// title: 'Response',
// message: JSON.stringify(result),
// okButtonText: 'Ok'
// })
}
else {
MyDelegate.prototype.applicationDidEnterBackground = function (application: UIApplication) {
console.log('App is running in background');openUrl(url);
AVAudioSession.sharedInstance().setCategoryWithOptionsError(
AVAudioSessionCategoryPlayAndRecord,
AVAudioSessionCategoryOptions.DefaultToSpeaker
);
};
}
}
catch(error) {
// alert({
// title: 'Error',
// message: error.message,
// okButtonText: 'Ok'
// })
}
}
}

vue-cli-plugin-electron-builder open child component in a new window

I'm using vue-cli-plugin-electron-builder for my electron app and I try to figure out how would I deal to open a component in a new window, which practically is a video player. The scenario is the following:
I have a list of movie dialogues with start and end timestamps. As I click on start timestamp per row the new window should open and the video player should start.
At this state I'm able to open a new window as it follows:
import { remote } from "electron";
export default {
methods: {
startVideo(id, startTimestamp) {
// eslint-disable-next-line no-console
console.log(id);
// eslint-disable-next-line no-console
console.log(startTimestamp);
let videoPlayerWindow = new remote.BrowserWindow({
show: true,
width: 1440,
height: 900,
webPreferences: { plugins: true }
});
}
}
}
but I don't know how to inject in this case the video-player child component.
I have done something similar so I should be able to set you on the right path here. You may just need to fill in some gaps
You will need to create a sub page using vue-cli.
So in your vue.config.js add
module.exports = {
//...
pages: {
index: 'src/main.js', // your main window
video_player: 'src/video_player/main.js' // your video window
}
}
Then, an example component in your src/video_player/main.js (This is where you would load your video player component)
import Vue from 'vue'
Vue.config.productionTip = false
const ipc = require('electron').ipcRenderer;
new Vue({
render: h => h('div', 'This is your video player, use a component here instead')
}).$mount('#app')
// listen for data from the main process if you want to, put this in your component if necessary
ipc.on('data', (event, data) => {
// use data
})
Now in your main process or src/background.js you will need to add an event listener to open the window from the renderer process.
import { app, protocol, BrowserWindow, ipcMain } from 'electron' // import ipcMain
...
// create the listener
ipcMain.on('load-video-window', (event, data) => {
// create the window
let video_player = new BrowserWindow({ show: true,
width: 1440,
height: 900,
webPreferences: {
nodeIntegration: true,
plugins: true
} })
if (process.env.WEBPACK_DEV_SERVER_URL) {
// Load the url of the dev server if in development mode
video_player.loadURL(process.env.WEBPACK_DEV_SERVER_URL + 'video_player.html')
if (!process.env.IS_TEST) video_player.webContents.openDevTools()
} else {
video_player.loadURL(`app://./video_player`)
}
video_player.on('closed', () => {
video_player = null
})
// here we can send the data to the new window
video_player.webContents.on('did-finish-load', () => {
video_player.webContents.send('data', data);
});
});
Finally, in your renderer process, emit the event to open the window
import { ipcRenderer } from "electron"
export default {
methods: {
startVideo(id, startTimestamp) {
// eslint-disable-next-line no-console
console.log(id);
// eslint-disable-next-line no-console
console.log(startTimestamp);
let data = {id, startTimestamp}
// emit the event
ipcRenderer.send('load-video-window', data);
}
}
}
Hopefully that helps.
Noah Klayman has done a complete example here https://github.com/nklayman/electron-multipage-example.
You will just need to adapt.

How use MediaFilePicker and PhotoEditor plugins in Nativescript

I am trying to use MediaFilePicker on nativescript and at the same time use the PhotoEditor plugin to crop/edit the photo taken from the camera but I don't make it work... here is part of my code:
let options: ImagePickerOptions = {
android: {
isCaptureMood: true, // if true then camera will open directly.
isNeedCamera: true,
maxNumberFiles: 1,
isNeedFolderList: true
}, ios: {
isCaptureMood: true, // if true then camera will open directly.
maxNumberFiles: 1
}
};
let mediafilepicker = new Mediafilepicker();
mediafilepicker.openImagePicker(options);
mediafilepicker.on("getFiles", function (res) {
let results = res.object.get('results');
let result = results[0];
let source = new imageSourceModule.ImageSource();
source.fromAsset(result.rawData).then((source) => {
const photoEditor = new PhotoEditor();
photoEditor.editPhoto({
imageSource: source,
hiddenControls: [],
}).then((newImage) => {
}).catch((e) => {
reject();
});
});
});
The result object of the FilePicker comes like:
{
"type": "capturedImage",
"file": {},
"rawData": "[Circular]"
}
I believe if the picture was taken from the camera, then use the rawData field, but I dont know which format is coming and how to give it to PhotoEditor pluging to play with it.
Any suggestions?
Thanks!
The issue was at this line source.fromAsset(result.rawData) here, result.rawData is not an ImageAsset but it's PHAsset. You will have to create an ImageAsset from PHAsset and pass it on to fromAsset. So it would look like,
import { ImageAsset } from "tns-core-modules/image-asset";
....
....
imgSource.fromAsset(new ImageAsset(img)).then((source) => {
const photoEditor = new PhotoEditor();
console.log(source === imgSource);
photoEditor.editPhoto({
imageSource: source,
hiddenControls: [],
}).then((newImage: ImageSource) => {
console.log('Get files...');
// Here you can save newImage, send it to your backend or simply display it in your app
}).catch((e) => {
//reject();
});
});

nativescript image-picker not working

I'm using the plugin image-picker for nativescript and I copied the example code to see how it works and to adapt it to my code. But the code doesn't work. When I tap the button it's supposed that the screen gallery from my device should be opened, but nothing happen when I tap the button.
The code below is how I implements this.
album_list.component.ts
import { Component } from '#angular/core';
import { RouterExtensions } from 'nativescript-angular/router';
//image picker
var imagepicker = require("nativescript-imagepicker");
#Component({
selector:'album_list',
moduleId: module.id,
templateUrl: "album_list.component.html",
})
export class AlbumListComponent{
constructor(private routerExt: RouterExtensions ){}
ngOnInit() {
}
onSelectMultipleTap() {
console.log('Im in');
function selectImages() {
var context = imagepicker.create({
mode: "multiple"
});
context
.authorize()
.then(function() {
return context.present();
})
.then(function(selection) {
console.log("Selection done:");
selection.forEach(function(selected) {
console.log(" - " + selected.uri);
});
}).catch(function (e) {
console.log(e);
});
}
}
}
album_list.component.html
<StackLayout>
<Button text="Pick Multiple Images" (tap)="onSelectMultipleTap()" > </Button>
</StackLayout>
As I said, when I tap the button in the html the log from the function onSelectMultipleTap appears, but nothing else.
Thanks!!
You arent calling selectImages(), you just declare it. Replace with this:
onSelectMultipleTap() {
console.log('Im in');
function selectImages() {
var context = imagepicker.create({
mode: "multiple"
});
context
.authorize()
.then(function() {
return context.present();
})
.then(function(selection) {
console.log("Selection done:");
selection.forEach(function(selected) {
console.log(" - " + selected.uri);
});
}).catch(function (e) {
console.log(e);
});
}
selectImages()
}
I had a slightly different issue that only occurred on iOS. I'm working on an upgraded Nativescript project from 4 to 6, and yes I know NS 8 is out right now, but some of the libraries being used aren't supported on the latest NS.
My application had a modal list view that popped up to allow the user to select between camera and gallery, and once the user clicked one of the options the list modal would close. At that time the camera or gallery modal should have appeared but it didn't. What was happening was the closing of the first model was somehow blocking the second modal from opening. My fix was to add a conditional async timeout in my method before calling the context.present(). See my code below:
public takePicture() {
// const options = { width: 1280, height: 720, keepAspectRatio: false, saveToGallery: false};
const self = this;
camera.requestPermissions()
.then(async function () {
//This iOS pause is needed so the camera modal popup will not be stopped by the list option modal closing
if (isIOS) {
await new Promise(resolve => setTimeout(() => resolve(), 1000));
}
})
.then (function() {
camera.takePicture()
.then((imageAsset) => {
const imagePost = new TripMessagePostModel();
ImageSource.fromAsset(imageAsset).then((result) => {
const time = new Date();
imagePost.image = result.toBase64String("jpeg", 50);
imagePost.imageFileName = `${self.userId}-${time.getTime()}.jpeg`;
self.addPost(imagePost);
});
}).catch((err) => {
console.log("Error -> " + err.message);
});
}
)
}
public selectImage() {
const context = imagepicker.create({
mode: "single",
});
const imagePost = new TripMessagePostModel();
context
.authorize()
.then(async function() {
//This iOS pause is needed so the camera modal popup will not be stopped by the list option modal closing
if (isIOS) {
await new Promise(resolve => setTimeout(() => resolve(), 1000));
}
return context.present();
})
.then(function(selection) {
selection.forEach(async (selected) => {
ImageSource.fromAsset(selected).then((result) => {
//console.log(selected.android.toString());
const time = new Date();
imagePost.image = result.toBase64String("jpeg", 40);
imagePost.imageFileName = `${this.userId}-${time.getTime()}.jpeg`;
this.addPost(imagePost);
});
});
}).catch((e) => {
console.log(e);
});
}

Resources