Upgrading Cloud function from Parse-server 2.0 to 3.0 - parse-platform

I am in the middle of upgrading my Parse-server 2.0 Cloud functions to 3.0.
One of the functions working in 2.0 is:
Parse.Cloud.define("countOfObservations", function(request, response) {
var query = new Parse.Query("GCUR_OBSERVATION");
query.count({
success: function(count) {
// The count request succeeded. Show the count
response.success(count);
},
error: function(error) {
response.error("GCUR_OBSERVATION table lookup failed");
}
});
});
As the Parse-server 3.0 has removed response and leveraged native promises or async/await for asynchronous validation. I tried to rewrite the function as below.
Parse.Cloud.define("countOfObservations", (request) => {
var query = new Parse.Query("GCUR_OBSERVATION");
var countOfObs = 0;
query.count({ useMasterKey: true }).then( (count) => {
countOfObs = count;
console.log("*** count=" + countOfObs);
return new Promise(function(resolve, reject) {
if (countOfObs >= 0)
return resolve(countOfObs);
else
return reject();
})
});
});
When I called the function from client, it returned {} instead of {"result":2882} that I'd expected. However, the console did print *** count=2882.
How can I make the function work again using native promises?

The reason it returns an empty object is because your function does not wait for query.count to complete and immediately returns with undefined.
You can write return await query.count(...).then(...); or even better, transform the whole function into Promise syntax and save some code lines. The Parse Team has developed a helpful migration guide with examples to make the transition easier for you.
Then this would be all you need:
Parse.Cloud.define("countOfObservations", async (request) => {
const query = new Parse.Query("GCUR_OBSERVATION");
const count = await query.count({ useMasterKey: true });
console.log("*** count=" + count);
return {"count": count};
// or if you really need to reject:
// return count >= 0 ? Promise.resolve(count) : Promise.reject();
});

Related

NextJS API Route Returns Before Data Received?

I'm not sure what's going on here. I have set up an API route in NextJS that returns before the data has been loaded. Can anyone point out any error here please?
I have this function that calls the data from makeRequest():
export async function getVendors() {
const vendors = await makeRequest(`Vendor.json`);
console.log({ vendors });
return vendors;
}
Then the route: /api/vendors.js
export default async (req, res) => {
const response = await getVendors();
return res.json(response);
};
And this is the makeRequest function:
const makeRequest = async (url) => {
// Get Auth Header
const axiosConfig = await getHeader();
// Intercept Rate Limited API Errors & Retry
api.interceptors.response.use(
function (response) {
return response;
},
async function (error) {
await new Promise(function (res) {
setTimeout(function () {
res();
}, 2000);
});
const originalRequest = error.config;
if (error.response.status === 401 && !originalRequest._retry) {
token[n] = null;
originalRequest._retry = true;
const refreshedHeader = await getHeader();
api.defaults.headers = refreshedHeader;
originalRequest.headers = refreshedHeader;
return Promise.resolve(api(originalRequest));
}
return Promise.reject(error);
}
);
// Call paginated API and return number of requests needed.
const getQueryCount = await api.get(url, axiosConfig).catch((error) => {
throw error;
});
const totalItems = parseInt(getQueryCount.data['#attributes'].count);
const queriesNeeded = Math.ceil(totalItems / 100);
// Loop through paginated API and push data to dataToReturn
const dataToReturn = [];
for (let i = 0; i < queriesNeeded; i++) {
setTimeout(async () => {
try {
const res = await api.get(`${url}?offset=${i * 100}`, axiosConfig);
console.log(`adding items ${i * 100} through ${(i + 1) * 100}`);
const { data } = res;
const arrayName = Object.keys(data)[1];
const selectedData = await data[arrayName];
selectedData.map((item) => {
dataToReturn.push(item);
});
if (i + 1 === queriesNeeded) {
console.log(dataToReturn);
return dataToReturn;
}
} catch (error) {
console.error(error);
}
}, 3000 * i);
}
};
The issue that I'm having is that getVendors() is returned before makeRequest() has finished getting the data.
Looks like your issue stems from your use of setTimeout. You're trying to return the data from inside the setTimeout call, and this won't work for a few reasons. So in this answer, I'll go over why I think it's not working as well as a potential solution for you.
setTimeout and the event loop
Take a look at this code snippet, what do you think will happen?
console.log('start')
setTimeout(() => console.log('timeout'), 1000)
console.log('end')
When you use setTimeout, the inner code is pulled out of the current event loop to run later. That's why end is logged before the timeout.
So when you use setTimeout to return the data, the function has already ended before the code inside the timeout even starts.
If you're new to the event loop, here's a really great talk: https://youtu.be/cCOL7MC4Pl0
returning inside setTimeout
However, there's another fundamental problem here. And it's that data returned inside of the setTimeout is the return value of the setTimeout function, not your parent function. Try running this, what do you think will happen?
const foo = () => {
setTimeout(() => {
return 'foo timeout'
}, 1000)
}
const bar = () => {
setTimeout(() => {
return 'bar timeout'
}, 1000)
return 'bar'
}
console.log(foo())
console.log(bar())
This is a result of a) the event loop mentioned above, and b) inside of the setTimeout, you're creating a new function with a new scope.
The solution
If you really need the setTimeout at the end, use a Promise. With a Promise, you can use the resolve parameter to resolve the outer promise from within the setTimeout.
const foo = () => {
return new Promise((resolve) => {
setTimeout(() => resolve('foo'), 1000)
})
}
const wrapper = async () => {
const returnedValue = await foo()
console.log(returnedValue)
}
wrapper()
Quick note
Since you're calling the setTimeout inside of an async function, you will likely want to move the setTimeout into it's own function. Otherwise, you are returning a nested promise.
// don't do this
const foo = async () => {
return new Promise((resolve) => resolve(true))
}
// because then the result is a promise
const result = await foo()
const trueResult = await result()

d3.js v5 - Promise.all replaced d3.queue

I've been using d3.js v4 for sometime now and I've learned that Mike Bostock has replaced the d3.queue in the v5 release with the Promise native JavaScript object. I would like to check with you if this code that I have written is properly queuing (asynchronously) these URL's:
var makeRequest = function() {
"use strict";
var bli = [
"http://stats.oecd.org/sdmx-json/data/BLI2013/all/all",
"http://stats.oecd.org/sdmx-json/data/BLI2014/all/all",
"http://stats.oecd.org/sdmx-json/data/BLI2015/all/all",
"http://stats.oecd.org/sdmx-json/data/BLI2016/all/all",
"http://stats.oecd.org/sdmx-json/data/BLI/all/all"
];
var promises = [];
bli.forEach(function(url) {
promises.push(
new Promise(function(resolve, reject) {
d3
.json(url)
.then(function(response) {
resolve(response);
})
.catch(function(error) {
console.log("Error on: " + url + ". Error: " + error);
reject(error);
});
})
);
});
Promise.all(promises).then(function(values) {
console.log(values);
});
};
makeRequest();
The code seems to function properly, but, is this proper code or is there a better way (a best practice approach) for queuing with Promise.all and d3.js? Is the catch error properly implemented?
You can simplify that code a lot: you don't net to use new Promise with d3.json, since d3.json will itself create the promise.
So, you can just do:
var files = ["data1.json", "data2.json", "data3.json"];
var promises = [];
files.forEach(function(url) {
promises.push(d3.json(url))
});
Promise.all(promises).then(function(values) {
console.log(values)
});
Or, if you're into the code golf, even shorter:
var files = ["data1.json", "data2.json", "data3.json"];
Promise.all(files.map(url => d3.json(url))).then(function(values) {
console.log(values)
});
Since I cannot use JSON files in the S.O. snippet, check the console in this bl.ocks: https://bl.ocks.org/GerardoFurtado/f08993c9c729b0b3452ef1803ad9dcbf/c4b45c5acce6033085a667cbb7d34203d15de0f0
Here's an approach with ES6 async functions and ES6 array destructuring:
async function chart() {
const [first, second] = await Promise.all([
d3.json('data1.json'),
d3.json('data2.json'),
])
console.log('data2.json :', second)
}
chart()
You can also add a formatting function for your data as such if you want to clean up your data to your preference.
.then() will have your data in a nice array which you can use later.
const myData = d3.csv("data.csv", formatterFunction)
.then(data => /* do whatever*/ )
function formatterFunction(row){
// do formatting
return // formatted data
}

fetch returning promise rather than value

Hopefully the code below communicates the problem clearly. The issue is that in the module which uses the get method of fetchData, the value being returned is the actual Promise, rather than the JSON as desired. Any thoughts on this?
// fetchData.js module
var _ = require('lodash');
function get() {
var endpoint1 = `/endpoint1`;
var endpoint2 = `/endpoint2`;
return fetch(endpoint1)
.then((endpoint1Response) => {
return endpoint1Response.json()
.then((endpoint1JSON) => {
return fetch(endpoint2)
.then((endpoint2Response) => {
return endpoint2Response.json()
.then((endpoint2JSON) => {
var data = _.merge({}, {json1: endpoint1JSON}, {json2: endpoint2JSON});
console.log('data in fetch', data); // this logs the json
return data;
});
});
});
});
}
exports.get = get;
// module which uses get method of fetchData get
var fetchData = require('fetchData');
var data = fetchData.get();
console.log('returned from fetchData', data); // this logs a Promise
Yes, that's exactly what's supposed to happen. The whole point of promises is that their result value is not immediately available and that doesn't change just because you're obtaining one from a separate module.
You can access the value like this:
var fetchData = require('fetchData');
fetchData.get().then(data =>
console.log('returned from fetchData', data);
);
Also note that you are using promises in a non-idiomatic way and creating a "tower of doom." This is much easier on the eyes and accomplishes the same thing:
function fetchJson(endpoint) {
return fetch(endpoint)
.then(endpointResponse => endpointResponse.json());
}
function get() {
var endpoint1 = `/endpoint1`;
var endpoint2 = `/endpoint2`;
return Promise.all([fetchJson(endpoint1), fetchJson(endpoint2)])
.then(responses => {
var data = { json1: responses[0], json2: responses[1] };
console.log('data in fetch', data); // this logs the json
return data;
});
}
Edit I haven't used async/await in JavaScript, but to answer your question, I presume this would work:
async function fetchJson(endpoint) {
var res = await fetch(endpoint);
return res.json();
}
async function get() {
var endpoint1 = `/endpoint1`;
var endpoint2 = `/endpoint2`;
var data = {
json1: await fetchJson(endpoint1),
json2: await fetchJson(endpoint2)
};
console.log('data in fetch', data); // this logs the json
return data;
}
// module which uses get method of fetchData get
async function main() {
var fetchData = require('fetchData');
var data = await fetchData.get();
console.log('returned from fetchData', data);
}
return main();

Parse Cloud: Query not running in exported function from save() callback

I'm using Parse to represent the state of a beer keg (among other things). I'd like to check the user's notifications, stored in a "Notifications" table, to see if they'd like to receive a notification when the keg is filled.
I have all of the logic for setting the user's notification settings as well as sending notifications in cloud/notifications.js. All of the logic for updating the keg is in cloud/beer.js. I created an exported function called "sendKegRefillNotification" which performs a query.find() on the Notifications table and gets called from beer.js.
The problem is that it doesn't seem to be executing query.find() when I call the function from beer.js, however when I call the same function from a job within notifications.js, it works just fine.
main.js:
require("cloud/beer.js");
require("cloud/notifications.js");
beer.js:
var notify = require("cloud/notifications.js");
var Keg = Parse.Object.extend("Keg");
var fillKeg = function(beerName) {
var promise = new Parse.Promise();
var keg = new Keg();
keg.set("beerName", beerName)
keg.set("kickedReports", []);
keg.save(null, { useMasterKey: true }).then(function(keg) {
console.log("Keg updated to " + beerName + ".");
promise.resolve(keg);
notify.sendKegRefillNotification(keg);
},
function(keg, error) {
promise.reject(error);
});
return promise;
}
Parse.Cloud.define("beerFillKeg", function(request, response) {
var beerName = request.params.name;
if (!beerName) {
response.error("No beer was specified.");
return;
}
if (!util.isUserAdmin(request.user)) {
response.error("User does not have permission to update the keg.");
return;
}
fillKeg(beerName).then(function(keg) {
kegResponse(keg).then(function(result) {
response.success(result);
});
},
function(error) {
response.error(error);
});
});
function kegResponse(keg) {
var promise = new Parse.Promise();
var result = {
id: keg.id,
beer: {
name: keg.get("beerName")
},
filled: keg.createdAt,
kickedReports: []
};
var kickedReports = keg.get("kickedReports");
if (!kickedReports || kickedReports.length == 0) {
promise.resolve(result);
} else {
util.findUsers(kickedReports).then(function(users) {
result.kickedReports = util.infoForUsers(users);
promise.resolve(result);
}, function(users, error) {
console.log(error);
promise.resolve(result);
});
}
return promise;
}
notifications.js:
var Keg = Parse.Object.extend("Keg");
var Notification = Parse.Object.extend("Notifications");
exports.sendKegRefillNotification = function(keg) {
var beerName = keg.get("beerName");
console.log("Sending notifications that keg is refilled to '" + beerName + "'.");
var promise = new Parse.Promise();
var query = new Parse.Query(Notification);
query.include("user");
query.equalTo("keg_filled", true);
query.find({ useMasterKey: true }).then(function(notifications) {
console.log("Found notifications!");
promise.resolve("Found notifications!");
},
function(notifications, error) {
console.error("No notifications");
console.error(error);
promise.reject(error);
});
return promise;
}
Parse.Cloud.job("beerSendRefillNotification", function(request, status) {
var query = new Parse.Query(Keg);
query.descending("createdAt");
query.first().then(function(keg) {
if (!keg) {
status.error("No keg");
return;
}
exports.sendKegRefillNotification(keg);
},
function(keg, error) {
response.error(error);
});
});
When I run the job "beerSendRefillNotification" from the Parse dashboard, I can tell that query.find() is getting called because it prints "Found notifications!":
E2015-02-23T06:59:49.006Z]v1564 Ran job beerSendRefillNotification with:
Input: {}
Result: success/error was not called
I2015-02-23T06:59:49.055Z]false
I2015-02-23T06:59:49.190Z]Sending notifications that keg is refilled to 'test'.
I2015-02-23T06:59:49.243Z]Found notifications!
However, when I call the cloud function "beerFillKeg", it isn't because it's not printing "Found notifications!" or "No notifications":
I2015-02-23T07:00:17.414Z]v1564 Ran cloud function beerFillKeg for user HKePOEWZvC with:
Input: {"name":"Duff"}
Result: {"beer":{"name":"Duff"},"filled":{"__type":"Date","iso":"2015-02-23T07:00:17.485Z"},"id":"olLXh0F54E","kickedReports":[]}
I2015-02-23T07:00:17.438Z]false
I2015-02-23T07:00:17.523Z]Keg updated to Duff.
I2015-02-23T07:00:17.525Z]Sending notifications that keg is refilled to 'Duff'.
I finally understand it. In sendKegRefillNotification, you're calling query.find({...}), then returning an object. That find is asynchronous, and you're doing nothing to wait for the result. I think you need to return the find function call, rather than an object you set within that method.
In other words, you're running along, leaving some async running code behind you.
Edit: I understand what you tried to do. It sort of makes sense. You defined a promise, and thought the caller would wait for the promise. The problem is, the promise is defined in an asynchronous block. It doesn't yet have any meaning at the moment the caller gets it.
It looks like Parse doesn't allow you to run a query from inside a callback from save(). When I moved "notify.sendKegRefillNotification(keg);" to outside of the callback, it worked.
var fillKeg = function(beerName) {
var promise = new Parse.Promise();
var keg = new Keg();
keg.set("beerName", beerName)
keg.set("kickedReports", []);
keg.save(null, { useMasterKey: true }).then(function(keg) {
console.log("Keg updated to " + beerName + ".");
console.log("Send notifications.");
promise.resolve(keg);
},
function(keg, error) {
promise.reject(error);
});
notify.sendKegRefillNotification(keg); // Now this works
return promise;
}
Can anyone shed some more light on why this worked?

AngularJS: Using $q to fire ajax calls synchronously

Is it possible to use $q to fire ajax requests synchronously in AngularJS?
I have a long list of vehicles, each vehicle has events associated with them and I need to retrieve the eventdetails of each event when the user expands the listing.
Right now, if the user expands the listing, I am firing up to 15 calls asynchronously and it seems to be causing issues with the API I'm consuming, so I'd like to see if performance is improved if I wait for each request finishes before firing the next.
I'm attempting to implement $q to delay the next request until the previous is finished, however I can't seem to wrap my head around using the service, here is what I currently have:
// On click on the event detail expander
$scope.grabEventDetails = function(dataReady, index) {
if (dataReady == false) {
retrieveEventDetails($scope.vehicles[index].events);
}
}
var retrieveEventDetails = function(events) {
// events is array
var deferred = $q.defer();
var promise = deferred.promise;
var retrieveData = function(data) {
return $http({
url: '/api/eventdetails',
method: 'POST',
data: {
event_number: data.number
},
isArray: true
});
}
_.each(events, function(single_event) {
promise.then(retrieveData(single_event).success(function(data) {
console.log(data);
}));
});
}
This is still firing asynchronously, Where am I going wrong with this?
I understand firing the requests synchronously isn't the best idea, at the moment I just want to see if performance is improved with the API at all.
You don't need $q to implement a promise as $http returns one.
_.each fires all the callbacks without especially waiting the promise.
All you do is call retrieveData for all events whenever your promise is resolved, and since you don't do a first call, it shouldn't even be working
You could do some recursive call like this :
var retrieveEventDetails = function(events) {
var evt = events.shift();
$http({
url: '/api/eventdetails',
method: 'POST',
data: {
event_number: evt.number
},
isArray: true
}).then(function(response){
console.log(response.data);
retrieveEventDetails(events);
});
}
I do think you should use $q as some other part of your application might need to get a promise.
A good example would be $routeProvider resolve option.
I made a little demo in plunker.
Solution:
retrieveData function should return a function (which returns a promise) instead of a just a promise.
That way we can create a promise chain: promise.then(fn).then(fn).then(fn).then(null,errorFn)
We must resolve the first promise to kick the chain.
var retrieveEventDetails = function(events) {
// events is array
var deferred = $q.defer();
var promise = deferred.promise;
var retrieveData = function(data) {
return function(){
return $http({
url: '/api/eventdetails',
method: 'POST',
data: {
event_number: data.number
},
isArray: true
})
}
}
deferred.resolve();
return events.reduce(function(promise, single_event){
return promise.then(retrieveData(single_event));
}, promise);
}
I'm not sure you even need $q here. In this example, each piece of data is registered in the controller as soon as it comes back from the call.
Live demo (click).
var app = angular.module('myApp', []);
app.controller('myCtrl', function($scope, myService) {
$scope.datas = myService.get();
});
app.factory('myService', function($http) {
var myService = {
get: function() {
var datas = {};
var i=0;
var length = 4;
makeCall(i, length, datas);
return datas;
}
}
function makeCall(i, length, datas) {
if (i < length) {
$http.get('test.text').then(function(resp) {
datas[i] = resp.data+i;
++i;
makeCall(i, length, datas);
});
}
}
return myService;
});
Here's a way using $q.all() that you can wait for all of the data to come through before passing it to the controller: Live demo (click).
var app = angular.module('myApp', []);
app.controller('myCtrl', function($scope, myService) {
myService.get().then(function(datas) {
$scope.datas = datas;
})
});
app.factory('myService', function($q, $http) {
var myService = {
get: function() {
var deferred = $q.defer();
var defs = [];
var promises = [];
var i=0;
var length = 4;
for(var j=0; j<length; ++j) {
defs[j] = $q.defer();
promises[j] = defs[j].promise;
}
makeCall(i, length, defs);
$q.all(promises).then(function(datas) {
deferred.resolve(datas);
});
return deferred.promise;
}
}
function makeCall(i, length, defs) {
if (i < length) {
$http.get('test.text').then(function(resp) {
defs[i].resolve(resp.data+i);
++i;
makeCall(i, length, defs);
})
}
}
return myService;
});

Resources