upload multiple files to sftp server using Rxjs - rxjs

Good Day! I would like to implement a convenient method for uploading a multiple files to an sftp-server with methods of calling back each ofuploaded files to server.
I have already tried to implement some code that works, but I saw that there is a memory leak that does not allow to successfully close the connection to the sftp server server after all download.
it is absolutely not critical to constantly open the connection and close it for me.
I tweaked the code a little bit from here: how do I send (put) multiple files using nodejs ssh2-sftp-client?
code:
function sftpPutFiles(config, files, pathToDir, callbackStep, callbackFinish, callbackError) {
let Client = require('ssh2-sftp-client');
let PromisePool = require('es6-promise-pool');
const sendFile = (config, pathFrom, pathTo) => {
return new Promise(function (resolve, reject) {
let sftp = new Client();
console.log(pathFrom, pathTo);
sftp.on('keyboard-interactive', (name, instructions, instructionsLang, prompts, finish) => { finish([config.password]); });
sftp.connect(config).then(() => {
return sftp.put(pathFrom, pathTo);
}).then(() => {
console.log('finish '+pathTo);
callbackStep(pathTo);
sftp.end();
resolve(pathTo);
}).catch((err) => {
console.log(err, 'catch error');
callbackError(err);
});
});
};
// Create a pool.
let indexFile = 0;
let pool = new PromisePool(() => {
while (indexFile < files.length) {
let file = files[indexFile];
indexFile++;
return sendFile(config, file.path, `${pathToDir}/${file.name}`);
}
return null;
}, 10);
pool.start().then(function () {
console.log({"message":"OK"}); // res.send('{"message":"OK"}');
callbackFinish();
});
}
using
input.addEventListener('change', function (e) {
e.preventDefault();
sftpPutFiles(
{host: '192.168.2.201', username: 'crestron', password: 'ehAdmin'},
this.files,
`./Program01/test/`,
pathTo => {
let tr = document.createElement('tr');
let bodyTable = document.querySelector('.body');
tr.innerHTML = `<td>${bodyTable.children.length+1}</td><td>${pathTo}</td><td>OK</td>`;
bodyTable.appendChild(tr);
}, () => {
alert('Всё файлы загружены');
},
err => {
alert('Ошибка: '+err);
}
);
});
If there is an error uploading the file to the sftp server, the connection does not close and I cannot reconnect when I open the custom console. I would like to translate the code to Rxjs to better support and I think I can solve the problem of closing the connection and responsiveness of the application.

make sure your using the latest version of ssh2-sftp-client - there has been a fair amount of updates recently, including fixes to handle errors more consistently and ensure connections are closed correctly. (v4.1.0).
You are using sftp.on('keyboard-interaction', ...). There is nothing which emits events of this type in the module, so this listener will not fire.
If you just want to upload files, use the fastPut() method. It is much faster. Make sure the destination path includes the remote file name and not just the remote directory.
Have a look at Promise.all(). You could use this instead of the promise-pool and I think it would be a lot cleaner. Something like (untested)
let localPath = '/path/to/src-dir';
let remotePath = '/path/to/dst-dir';
let files = ['file1.txt', file2.txt','file3.txt'];
let client = new Client();
client.connect(config)
.then(() => {
let promises = [];
files.forEach(f => {
let from = path.join(localPath, f);
let to = path.join(remotePath, f);
promise.push(client.fastPut(from, to));
});
return Promise.all(promises);
}).then(res => { // res is array of resoved promise results
client.end();
}).catch(err => {
// deal with error
});

Related

IndexedDB breaks in Firefox after trying to save autoIncremented Blob

I am trying to implement Blob storage via IndexedDB for long Media recordings.
My code works fine in Chrome and Edge (not tested in Safari yet) - but won't do anything in Firefox. There are no errors, it just doesn't try to fulfill my requests past the initial DB Connection (which is successful). Intuitively, it seems that the processing is blocked by something. But I don't have anything in my code which would be blocking.
Simplified version of the code (without heavy logging and excessive error checks which I have added trying to debug):
const dbName = 'recording'
const storeValue = 'blobs'
let connection = null
const handler = window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB
function connect() {
return new Promise((resolve, reject) => {
const request = handler.open(dbName)
request.onupgradeneeded = (event) => {
const db = event.target.result
if (db.objectStoreNames.contains(storeValue)) {
db.deleteObjectStore(storeValue)
}
db.createObjectStore(storeValue, {
keyPath: 'id',
autoIncrement: true,
})
}
request.onerror = () => {
reject()
}
request.onsuccess = () => {
connection = request.result
connection.onerror = () => {
connection = null
}
connection.onclose = () => {
connection = null
}
resolve()
}
})
}
async function saveChunk(chunk) {
if (!connection) await connect()
return new Promise((resolve, reject) => {
const store = connection.transaction(
storeValue,
'readwrite'
).objectStore(storeValue)
const req = store.add(chunk)
req.onsuccess = () => {
console.warn('DONE!') // Fires in Chrome and Edge - not in Firefox
resolve(req.result)
}
req.onerror = () => {
reject()
}
req.transaction.oncomplete = () => {
console.warn('DONE!') // Fires in Chrome and Edge - not in Firefox
}
})
}
// ... on blob available
await saveChunk(blob)
What I tried so far:
close any other other browser windows, anything that could count as on "open connection" that might be blocking execution
refresh Firefox profile
let my colleague test the code on his own machine => same result
Additional information that might useful:
Running in Nuxt 2.15.8 dev environment (localhost:3000). Code is used in the component as a Mixin. The project is rather large and uses a bunch of different browser APIs. There might be some kind of collision ?! This is the only place where we use IndexedDB, though, so to get to the bottom of this without any errors being thrown seems almost impossible.
Edit:
When I create a brand new Database, there is a brief window in which Transactions complete fine, but after some time has passed/something triggered, it goes back to being queued indefinitely.
I found out this morning when I had this structure:
...
clearDatabase() {
// get the store
const req = store.clear()
req.transaction.oncomplete = () => console.log('all good!')
}
await this.connect()
await this.clearDatabase()
'All good' fired. But any subsequent requests were broken same as before.
On page reload, even the clearDatabase request was broken again.
Something breaks with ongoing usage.
Edit2:
It's clearly connected to saving a Blob instance without an id with the autoIncrement option. Not only does it fail silently, it basically completely corrupts the DB. If I manually assign an incrementing ID to a Blob object, it works! If I leave out the id field for a regular simple object, it also works! Anyone knows about this? I feel like saving blobs is a common use-case so this should have been found already?!
I've concluded, unless proven otherwise, that it's a Firefox bug and opened a ticket on Bugzilla.
This happens with Blobs but might also be true for other instances. If you find yourself in the same situation there is a workaround. Don't rely on autoIncrement and assign IDs manually before trying to save them to the DB.

Using puppeteer to screenshot local files, but it's still making web requests?

I am using Puppeteer to take screenshots of a web page for my company. I need to test multiple people's accounts so that means visiting the page multiple times (150 times in this case). This results in our firewall kicking me out for making too many requests.
My solution is to just fetch the contents of the page and save them locally. Then I use puppeteer on that local file, overriding the function used to get data from our servers to instead just use data already loaded into Node from a CSV.
All of this works, but it looks like it's still making requests to our servers.
I tried giving it a userDataDir so it could cache any resources. In theory, if it's loading it from file://, it's caching the resources and there's no Ajax requests, it shouldn't be making any further requests, right?
I also tried installing a debugging proxy but since it's https I can't see what it's trying to request.
This is how I start it:
puppeteer.launch({
userDataDir: "temp/"
})
.then(browser => {
next(browser, links);
)
.catch(error => {
cb(error, null);
});
next will iterate through any links it needs to visit.
This part saves the page locally:
if (this._linkCache[baseLink] === undefined) {
fetch(baseLink)
.then(resp => resp.text())
.then(contents => {
fs.writeFile(fullFileName, contents, 'utf8', err => {
if (err) {
cb(err, null);
} else {
this._linkCache[baseLink] = fileUrl;
gotoPage(fileUrl);
}
});
})
.catch(error => {
cb(error, null);
});
}
// Go to the cached version
else {
gotoPage(this._linkCache[baseLink] + queryParams);
}
And this gets the screenshots:
const gotoPage = async(url) => {
try {
const page = await browser.newPage();
// Override 'fetchAccountData' function
await page.evaluateOnNewDocument(testData => {
window["fetchAccountData"] = (cb: (err: any, data: any)=>void) => {
cb(null, testData);
};
}, data);
// Go to page and get screenshot
await page.goto(url);
const screenie = `${outputPath}${uuid()}.png`;
await page.screenshot({ fullPage: true, path: screenie, type: "png" });
pageHtml.push(`<img src="file://${screenie}" />`);
next(browser, rest);
} catch (e) {
cb(e, null);
}
};
I was hoping this would be able to only make a few requests at the beginning while it saves the html locally and caches all the resources, but it seems to make a request for every link.
How can I stop it?

Feathers custom service response works in console but getting empty array on API response

I have a custom service which fetches data from multiple mongoose models and creates an array using these data and gives back to API.
Everything works well.. I can console the new object-array 'result.rolePermissions'. you can see that in the code below.
My problem is when I check the data on the frontend side am getting a blank array as a response.
I have double checked if I missed a variable or used a different API call, but no. I am exactly calling this custom service function but for some reason, I can't get the data.
Is it because of the usage of await/Promise on the 'result.roles', 'result.permission'? Did I use some bad practices?
async find (params) {
let result = {};
result.roles = [];
result.rolePermissions = [];
result.permissionsModules = [];
const permissionGroupModel = this.app.service('permission-groups').Model;
const rolesModel = this.app.service('roles').Model;
const permissionsModel = this.app.service('permissions').Model;
//all permission-groups are getting fine
result.permissionsModules = await new Promise(function(resolve, reject) {
permissionGroupModel.find().populate('permissions').exec((err,response)=>{
resolve(response);
})
});
//all roles are getting fine
result.roles = await new Promise(function(resolve, reject) {
rolesModel.find().then(response=>{
resolve(response);
})
});
//all permissions are getting fine
result.permissions = await new Promise(function(resolve, reject) {
permissionsModel.find().then(response=>{
resolve(response);
})
});
//here is iam having trouble..
result.rolePermissions = await new Promise(function(resolve, reject) {
let rolePerms = [];
result.roles.forEach(role => {
rolePerms[role.name] = {};
result.permissions.forEach(permission => {
rolePerms[role.name][permission.name] = (role.permissions.indexOf(permission._id) > -1)?true:false;
});
});
resolve(rolePerms);
});
//I have consoled result.rolePermissions and it shows the currect data.but when this data getting as null array in api call response
console.log('result.rolePermissions')
console.log(result.rolePermissions)
return {
status:true,
test:data.rolePermissions,
data:result
}
}
There are a couple things that can be done to improve this code. First, anything of the form...
x = await new Promise(function(resolve, reject) {
promiseReturningFunction(params).then(response => {
resolve(response);
});
});
...can be rewritten as...
x = await promiseReturningFunction(params);
...because, when you have a promise-returning-function, there's no need to create another promise (with new Promise).
The code where you indicate trouble doesn't appear to need a promise at all...
//here is iam having trouble..
result.rolePermissions = [];
result.roles.forEach(role => {
rolePerms[role.name] = {};
result.permissions.forEach(permission => {
rolePerms[role.name][permission.name] (role.permissions.indexOf(permission._id) > -1)?true:false;
});
});
Finally, I'm puzzled by the use of data in this code...
return {
status:true,
test:data.rolePermissions,
data:result
}
Is it possible that, while your use of Promises was non-standard and confusing, that code was pretty much working, and the real problem comes from passing data.rolePermissions rather than the object you just built and tested with the log (result.rolePermissions)?
The issue was fixed when i restarted the feathers service. I am not deleting the question because the other answer may share some knowledge

scope management in RxJs

Here, I'm mixing both RxJS and promise-mysql..
const mysql = require('promise-mysql');
const Rx = require('rxjs/Rx');
I establish a connection with this:
var conn;
var queryString = select product_id, set_complete_in from mi_product limit 2;
var conn;
const obs = Rx.Observable.fromPromise(mysql.createConnection({
host: 'somehost',
user: 'someuser',
password: 'some password',
database: 'someday'
}));
at this point the obs is a promise that may or not be resolve.
Here I set up a series of queries.. (basically a fan out, in of queries in parallel)
const responseStream = obs
.flatMap(connection => {
conn = connection;
//submitArray is an array of IDs
var requestStream = submitArray.map(id => Rx.Observable.fromPromise(connection.query(queryString, [id])));
//run these in parallel
return Rx.Observable.forkJoin(requestStream);
});
//it's an array of arrays so flatten..
responseStream.subscribe(resp =>
{
returnValue = resp.reduce((accum, arr) => {return accum.concat(arr);}, []);
console.log('returnValue is ', JSON.stringify(returnValue));
//call back here to return the data
}
,
err => {console.log('!!!!!!err is ', err);},
() => {console.log('connection end!'); conn.end();});
this all works, but my question is how to handle the connection. As you can see, I define the connection at a higher scope so it's available in the subscribe callback. It doesn't seem very functional to call conn.end() in the subscription.. It seems like I should be handling it within the responseStream definition. Does this seem correct?
Your code is a little bit hard to follow, but I think your problem is that you need access to your connection, from your promise, but you won't have access to it in a completion callback without mixing up scopes. If I were implementing this feature, I would solve this by ditching the fromPromise usage and creating my own stream. When subscribers were done with my stream, I'd clean up the connection.
Rx.Observable.create( observer => {
const createConnection = mysql.createConnection({
host: 'somehost',
user: 'someuser',
password: 'some password',
database: 'someday'
});
/* forward the connection and any errors to observer */
createConnection.then( observer.next, observer.error );
return function cleanUp() {
/* end the connection when we are done */
createConnection.then( connection => connection.end() );
};
});
This ensures that any "connection close" operation is handled by the source stream, a pattern that is common and advantageous when working with observables.

Promise resolves before looping calls to provider finish

I'm running into an issue with some code for an Ionic 3 app.
Basically, I have a list of objects that all have a unique id. The unique id for each object must be sent through a GET request so that we can get the appropriate data back for each object from the server. This has to be done on a per object basis; I can't bundle them into one request because there is no API endpoint for that.
Therefore the objects are all stored in an array, so I've been trying to loop through the array and call the provider for each one. Note that the provider is returning an observable.
Since the provider is an asynchronous function the promise will resolve before the loop is finished, unless I time out the promise resolution. This defeats the whole point of the promise.
What is the correct way that I should go about doing this so that the looping provider calls are done before the promise resolves?
If I have an inner promise to resolve when the looping is done, won't it also resolve prematurely?
I also read that it is bad to have a bunch of observables open. Should I instead return each observable as a promise using toPromise()?
Here is the code to build the data:
asyncBuildData() {
var promise = new Promise((resolve, reject) => {
let completedRequests = 0;
for (let i = 0; i < 10; i++) {
this.provider.getStuffById(listOfStuff[i].id).subscribe(val => {
list.push(val)
completedRequests++;
})
}
console.log('cp', completedRequests); // completedRequests = 0
setTimeout(() => {
console.log('cp', completedRequests); // completedRequests = 10
let done = true;
if (done) {
resolve('Done');
} else {
reject('Not done');
}
}, 1500)
})
return promise;
}
Code from Provider:
getStuffById(stuffId) {
let url = url + stuffId;
return this.http.get(url)
.map(res => res.json());
}
Even though you can't bundle them into one request, you can still bundle them into one observable, of which those requests are fired in parallel, using .forkJoin():
buildData$() {
let parallelRequests = listOfStuffs.map(stuff => this.provider.getStuffById(stuff.id));
return Observable.forkJoin([...parallelRequests]);
}
and then in your component, you can just call:
buildData$.subscribe(val=>{
//val is an array of all the results of this.provider.getStuffById()
list =val;
})
Note that Obersvable.forkJoin() will for all requests to complete before emitting any values.
If I understand correctly then the following code should get you on your way. This will execute a promise, one at a time, for each element in the array.
var ids = [1,2,3,4,5];
ids.reduce(function (promise, id) {
return promise.then(function () {
let url = url + id;
return this.http.get(url)
.map(res => res.json());
});
}, Promise.resolve()).then(function(last) {
// handle last result
}, function(err) {
// handle errors
});
I tested this with a jQuery post and replaced it with yours from Ionic. If it fails then let me know.

Resources