I have written a cloud code function called approveDish that works within the timeout limits set by Parse.com when I call the function directly once from a client button.
However, I need to migrate some old database records to this and am wondering why when I run the background job with larger than say 3 results returned in the query I get timeouts in that cloud code function. Shouldn't this in a background job not timeout ever as we're calling things seriously?
Parse.Cloud.job("migrateDishesToChains", function(request, status){
Parse.Cloud.useMasterKey();
var Dishes = Parse.Object.extend("Dishes");
var query = new Parse.Query(Dishes);
query.notEqualTo("approved", true);
//dishes.equalTo("user", "JQd58QhOCO");
query.limit(1);
query.find().then(function(results) {
// Create a trivial resolved promise as a base case.
var promise = Parse.Promise.as();
_.each(results, function(result) {
// For each item, extend the promise with a function to delete it.
promise = promise.then(function() {
// Return a promise that will be resolved when the delete is finished.
return Parse.Cloud.run("approveDish", {dishID: result.id});
});
});
return promise;
}).then(function() {
status.success();
});
});
Related
I don't know why I'm not getting notification events from sails models from model.publish().
In pre-1.x sailsjs, similar client-side code had worked and I would get every event when records are created, updated or deleted. So, I must be misunderstanding something.
How do I subscribe to all events for any records from CRUD operations?
On the server side, I have Job.js and JobController.js.
In Job.js model, this test just creates a new record every 10 secs:
test: async function(dataset) {
let count = 0;
setInterval(async function() {
count++;
let newjob = {
dataset: dataset,
state: 'delayed',
name: "job name "+count
};
let job = await Job.create(newjob).fetch()
sails.log.info('created test job: ',JSON.stringify(job));
Job.publish([job.id],job);
},10000);
}
In JobController.js, called by the client and starts the test rolling:
submittest: async function(req,res) {
let dataset = await Dataset.Get({});
await Job.test(dataset[0].id);
return res.ok({status:'ok'});
}
In the client test.html, io.socket.get operations are successful, but I never see an event:
...
<script>
io.socket.get('/job', function(body, JWR) {
console.log('and with status code: ', JWR.statusCode);
setTimeout(function() {
io.socket.get('/job/submittest', function (body,JWR) {
io.socket.on('job', function(msg) {
console.log("job event",msg); // not getting here. why?
});
});
},2000)
});
</script>
This all runs fine but the problem is, no events are seen from the client side. Why? Am I not subscribed to events with the initial io.socket.get('/job')?
Essentially, what is happening here, is you are shouting into an empty box about a new record in your model, but no one is listening to you in that empty box.
In other words, you need to subscribe the socket connection to the model updates.
See: https://sailsjs.com/documentation/reference/web-sockets/resourceful-pub-sub/subscribe
Also, checkout the answer to this question for a quick how to.
for some reason my publisher initiates twice when I create a new a new session. However the 2nd one, isn't in the div where it's supposed to be. Also if you connect to the session you'll get the same so it only show for yourself.
I'm trying to find out why it's appearing. Here's some snippets:
var getApiAndToken, initializeSession;
getApiAndToken = function() {
var apiKey, customer_id, sessionId, token;
if (gon) {
apiKey = gon.api_key;
}
if (gon) {
sessionId = gon.session_id;
}
if (gon) {
token = gon.token;
}
if (gon) {
customer_id = gon.customer_id;
}
initializeSession();
};
initializeSession = function() {
var publishStream, session;
session = OT.initSession(apiKey, sessionId);
session.connect(token, function(error) {
if (!error) {
session.publish(publishStream(true));
layout();
} else {
console.log('There was an error connecting to the session', error.code, error.message);
}
});
$('#audioInputDevices').change(function() {
publishStream(false);
});
$('#videoInputDevices').change(function() {
publishStream(false);
});
return publishStream = function(loadDevices) {
var publisherOptions;
publisherOptions = {
audioSource: $('#audioInputDevices').val() || 0,
videoSource: $('#videoInputDevices').val() || 0
};
OT.initPublisher('publisherContainer', publisherOptions, function(error) {
if (error) {
console.log(error);
} else {
if (loadDevices) {
OT.getDevices(function(error, devices) {
var audioInputDevices, videoInputDevices;
audioInputDevices = devices.filter(function(element) {
return element.kind === 'audioInput';
});
videoInputDevices = devices.filter(function(element) {
return element.kind === 'videoInput';
});
$.each(audioInputDevices, function() {
$('#audioInputDevices').append($('<option></option>').val(this['deviceId']).html(this['label']));
});
$.each(videoInputDevices, function() {
$('#videoInputDevices').append($('<option></option>').val(this['deviceId']).html(this['label']));
});
});
}
}
});
};
};
it also asks me for device access twice.
I see two general problems in the code you provided:
The variables api_key, session_id, and token inside the getApiAndToken() function are scoped to only that function, and therefore not visible inside initializeSession() where you try to use them.
The goal of the publishStream() function is not clear and its use is not consistent. Each time you invoke it (once the session connects and each time the dropdown value changes) this function creates a new Publisher. It also does not return anything, so when using it in the expression session.publish(publishStream(true)), you are effectively just calling session.publish() which results in a new Publisher being added to the end of the page because there is no element ID specified. That last part is the reason why you said its not in the <div> where its supposed to be.
It sounds like what you want is a Publisher with a dropdown to select which devices its using. I created an example of this for you: https://jsbin.com/sujufog/11/edit?html,js,output.
Briefly, the following is how it works. It first initializes a dummy publisher so that the browser can prompt the user for permission to use the camera and microphone. This is necessary for reading the available devices. Note that if you use a page served over HTTPS, browsers such as Chrome will remember the permissions you allowed on that domain earlier and will not have to prompt the user again. Therefore on Chrome, the dummy publisher doesn't cause any prompt be shown for a user who has already run the application. Next, the dummy publisher is thrown away, and OT.getDevices() is called to read the available devices and populate the dropdown menu. While this is happening, the session would have also connected, and on every change of the selection in either of the dropdowns, the publish() function is called. In that function, if a previous publisher existed, it is first removed, and then a new publisher is created with the devices that are currently selected. Then that new publisher is passed into session.publish().
I have a set of ajax calls that should happen asynchronously and after all the ajax calls are made, some function should be executed. I am using $q.all().then() for this.
The problem with $q.all() is that even if one of the ajax calls fails the function won't get executed. My requirement is to execute the function regardless of the success or failure of the individual ajax calls. How can I go about it?
Using $q.allSettled instead of $q.all solves the problem.
Please refer to the following link for decorating the $q.all function
http://www.codeducky.org/q-allsettled/
You can resolve a promise with another promise, which makes it possible for you to do something like this:
var urls = [...];
var promises = urls.map(function (url) {
return $http.get(url).then(null,
// error callback returns a new promise
function (response) { return $q.resolve(response); });
});
$q.all(promises).then(...);
Fiddle: http://jsfiddle.net/j4b7pxmt/
you can use this code to implement this logic.
definition of GroupPromise plugin.
//create a group promise function.
var GroupPromise = function(){
this.allPromise = [];
this.successPromises = [];
this.failurePromises = [];
}
//use this function to add promises which you want to resolve on success or error.
GroupPromise.prototype.add = function(promiseToAdd){
var promise = $q.defer();
var self = this;
promiseToAdd.then(function(){
var args = angular.copy(arguments);
args.push({state: 'success'});
self.successPromises.push(promise);
promise.resolve.apply(this, args);
}, function(){
var args = angular.copy(arguments);
args.push({state = 'failure'});
self.failurePromises.push(promise);
promise.resolve.apply(this, args);
});
this.allPromise.push(promise);
}
//use this to resolve all promises.
GroupPromise.prototype.resolveAll = function(successCallback, errorCallback){
var self = this;
$q.all(this.allPromise).then(function(){
//all API calls processed.
if(self.failurePromises.length === 0){
//no API fails
successCallback.call(self);
}else{
//some API fails
errorCallback.call(self);
}
});
}
usage of GroupPromise plugin.
//create an object of GroupPromise.
var myGroupPromise = new GroupPromise();
//add API call promises to queue.
angular.forEach([1,2,3,4], function(){
myGroupPromise.add($http.get(url));
});
//call for resolve all promises and pass the success and error callback.
myGroupPromise.resolveAll(successCallback, errorCallback);
You have to use individual defers such as:
var myDefer = $q.defer();
myDefer.promise.then(function(result) {
}, function(rejected) {
});
I am new to Angular, and set up a simple example with a REST Api config in Codeigniter that returns a json (default) thread list. No problems!
Until, I add an update to the Database. If I clear/then call getThreads again, I receive the same list of items. A page refresh solves this. I can see in firebug that its only calling the url:api/example/threadlist/id/'x' once per page load.
function ThreadsCtrl($scope, $http, $templateCache) {
$scope.getThreads = function(id) {
if (!id) { id = 'reset'; }
$http({method: 'GET', url: 'api/example/threadlist/id/' + id, cache: $templateCache}).
success(function(data) {
$scope.threadslist = data; //set view model
}).
error(function(data) {
$scope.threadslist = data || "Request failed";
});
};
How would I make it so that it always calls a new list of data rather than reuses the old.
Thanks!
If i understood your question correctly your ajax call is being cached so you have to remove cache:$templatecache from your code
In the function getweather() I do fetch some weather data from a Json-object and store that data in the variable data. Within that function, I can handle the Json-data, but how do I access the data from outside of getweather()?
It doesn't matter if i return the variable location or data. The variable place is just not Json.
How do I handle the place variable in order to make it work as it does within the function getweather()?
var ajax = require('ajax');
var place;
place = getweather();
function getweather()
{
// Make the request
ajax(
{
url: 'http://api.openweathermap.org/data/2.5/weather?q=paris.fr', type: 'json'
},
function(data)
{
// Success!
console.log("Successfully fetched weather data!");
// Extract data
var location = data.name;
return location;
},
function(error) {
// Failure!
console.log('Failed fetching weather data: ' + error);
}
);
}
The A in AJAX stands for asynchronous. What's happening in your code is that you are trying to assign a value to place before the asynchronous call to api.openweather.org has returned.
You can check this by placing a statement like console.log(place); directly after place = getweather();. You will notice in the console that None is returned before you see Successfully fetched weather data!
This issue has been described in detail in this post. It recommends restructuring your code around your callbacks.