Parse Cloud "beforeSave" not saving data - parse-platform

I'm using parse beforeSave method to save an order, here is the code:
//Before save an order - if finish - set priority to 0
Parse.Cloud.beforeSave("UserToOrders", function(request, response) {
Parse.Cloud.useMasterKey();
var preStatus = request.object.get("OrderStatus");
if (preStatus == "Finish") {
request.object.set("Priority",0);
console.log("beforeSave(\"UserToOrders\")\t Order (" + request.object.id + ") Status is 'Finish' So Priority set to '0'");
}
else {
console.log("beforeSave(\"UserToOrders\")\t Order (" + request.object.id + ") Status Changed to: " + preStatus);
request.object.set("OrderStatus",preStatus);
}
response.success();
});
Here is the log:
I2016-03-09T20:56:05.779Z]v136 before_save triggered for UserToOrders for user pSi0iCGJJe:
Input: {"original":{"ACL":{"*":{"read":true},"vxgEWFQ7eu":{"read":true,"write":true}},"OrderStatus":"Ready","OrderStatusActivity":"Active","ResturantID":"g1bzMQEXoj","TimeToBeReady":{"__type":"Date","iso":"2016-03-08T23:35:23.916Z"},"UserETA":{"__type":"Date","iso":"2016-03-08T23:35:23.916Z"},"UserID":"vxgEWFQ7eu","createdAt":"2016-03-08T21:06:06.605Z","objectId":"t3NoxcSp5z","updatedAt":"2016-03-08T21:40:59.538Z"},"update":{"OrderStatus":"Finish","objectId":"t3NoxcSp5z"}}
Result: Update changed to {"OrderStatus":"Finish","Priority":0}
I2016-03-09T20:56:05.975Z]beforeSave("UserToOrders") Order (t3NoxcSp5z) Status is 'Finish' So Priority set to '0'
but nothing is being changed in the DB.
What do i miss?
Thanks.

in var preStatus is the same value as you wanted to save...
var preStatus = request.object.get("OrderStatus");
and you are "saving" the same value, you can just delete this line
request.object.set("OrderStatus",preStatus);
if it is not what you want, provide the log from "OrderStatus" = Finish

I've figured it out. It was an ACL permissions issue.
The Order created by one client, while the chance made by another one.

Related

#returns throwing error when running locally on docker

I'm trying to return array of asset from transaction. So as per the syntax I have added #returns and #commit(false) in cto file but its throwing error as
✖ Installing business network. This may take a minute...
ParseException: Expected ")", "-", "false", "true", comment, end of
line, number, string or whitespace but "P" found. File
models/org.zcon.healthcare.cto line 370 column 10
Command failed
And when i'm removing the #returns annotation its not throwing any error.
And well its not throwing any error when i'm removing parameter "Patient[]" from #returns annotation.. But it's against the syntax right?
I'm running the application locally using docker swarm.
My docker composer version is v0.19.12
What's wrong? Is this any bug?
In case if you want to see the transaction definition in cto file.
#commit(false)
#returns(Patient[])
transaction SearchPatient{
o String firstName optional
o String lastName optional
}
And in logic file
/**
* Sample transaction
* #param {org.zcon.healthcare.SearchPatient} tx
* #returns{org.zcon.healthcare.Patient[]}
* #transaction
*/
async function SearchPatient(tx){
let queryString = `SELECT org.zcon.healthcare.Patient WHERE (`;
let conditions = [];
if (tx.hasOwnProperty('firstName')) {
var firstName =tx.firstName;
conditions.push(`(firstName == "${firstName}")`)
};
if (tx.hasOwnProperty('lastName')) {
var lastName = tx.lastName;
conditions.push(`(lastName == "${lastName}")`)
};
queryString += conditions.join(' AND ') + ')';
let finalQuery = buildQuery(queryString);
const searchPatient = await query(finalQuery);
if(searchPatient.length ==0){
throw "No Patient Records found!!"
}else
return searchPatient;
}
I've not seen this error with composer network install (deploying to a running Fabric) I did the network install just fine (see screenshot) with your model and code. I suggest that your error may lie elsewhere in your business network ? Can you add the complete sequence of what you got to get the error? How did you build your bna file?
I also tried your code (ending....):
const searchPatient = await query(finalQuery);
console.log("results are " + searchPatient);
console.log("element 1 of array is " + searchPatient[0]);
if(searchPatient.length ==0){
throw "No Patient Records found!!"
} else
return searchPatient;
}
and can see the returned results fine (as shown in console.log - just using a different network name obviously FYI)

How to properly initialize Elasticsearch for queries?

I wrote a program that inserts an element, then it searches all elements in the store. Thus, it finds one more element every time the program runs. I’d expect to be able to comment out the insert and still run the program, just finding the stuff that was already inserted. Whenever I do though, I get an exception “Failed to execute phase [query_fetch], all shards failed”. Any ideas?
Hypothesis: inserting the element does some sort of implicit initialization on my node. However, I’m looking through the ES source, and I can’t figure out what that would be.
try (Node node = NodeBuilder.nodeBuilder().clusterName("tesssst").build().start()) {
try (Client client = node.client()) {
//insert an entry; if this part is removed, the program crashes
client.prepareIndex("movies", "movie", UUID.randomUUID().toString()).setSource(
"{\"title\": \"Lawrence of Arabia\",\"director\": \"David Lean\",\"year\": 1962,\"genres\":"
+ " [\"Adventure\", \"Biography\", \"Drama\"]}").execute().actionGet();
//search all entries
System.out.println("***************");
SearchResponse response = client.prepareSearch("movies")
.setTypes("movie")
.setSearchType(SearchType.QUERY_AND_FETCH)
.setFrom(0).setSize(60).setExplain(true)
.execute()
.actionGet();
SearchHit[] results = response.getHits().getHits();
System.out.println("Current results: " + results.length);
for (SearchHit hit : results) {
System.out.println("------------------------------");
Map<String, Object> result = hit.getSource();
System.out.println(result);
}
System.out.println("***************");
client.close();
}
node.close();
}
The problem was that Elasticsearch didn't have enough time to startup, but the initial insert gave it enough time. Simply adding an appropriate wait fixes it:
final ClusterHealthRequest clusterHealthRequest = new ClusterHealthRequest("movies")
.timeout(TimeValue.timeValueSeconds(60)).waitForGreenStatus();
final ClusterHealthResponse clusterHealth = client.admin().cluster()
.health(clusterHealthRequest).actionGet();
if (clusterHealth.isTimedOut()) {
System.out.println("ElasticSearch cluster health timed out");
} else {
System.out.println("ElasticSearch cluster health: Status "
+ clusterHealth.getStatus().name() + "; " + clusterHealth.getNumberOfNodes()
+ " nodes; " + clusterHealth.getActiveShards() + " active shards.");
}
(if your standards are lower, you can save time with waitForYellowStatus)

How do I add an iCal event using JS automation in OS X Yosemite?

I'm really having a hard time understanding how to interact with iCal using the new support for Javascript automation in Yosemite. I'm not too experienced with OOP so that might be where my problem lies.
Could someone help me out with the code for adding a new event to a specified calendar? I can probably reverse engineer it from there to understand how to work with it.
Thanks
UPDATE
Here is the code I am trying..
// -- perform adding to iCal
// -- select "Test" calendar
theCalendar = iCal.calendars.Test;
events = theCalendar.events;
// -- load in the events
stringifiedData = Safari.doJavaScript("localStorage.getItem('theEvents');", {
in: Safari.windows[0].tabs[0]})
// -- parse data back to js object
theEvents = JSON.parse(stringifiedData);
// -- loop through events
for (var key in theEvents) {
var obj = theEvents[key];
// -- set vars
theDate = obj['date'];
theDescription = obj['description'];
theSummary = obj['summary'];
theLocation = obj['location'];
theStartTime = theDate + " " + obj['startTime'];
theEndTime = theDate + " " + obj['endTime'];
// -- create event object
theEvent = iCal.Event({
description: theDescription,
summary: theSummary,
location: theLocation,
startDate: theStartTime,
endDate: theEndTime
});
// -- get the last index event
last = events.length;
// -- insert it into iCal
events.push(theEvent);
}
The error i'm receiving..
Error on line 68: TypeError: undefined is not an object (evaluating 'events.push(theEvent)')
UPDATE
I figured out the issue. I was formatting the dates incorrectly.. they were just strings. So I made sure to turn them into date objects, and I was able to insert successfully using
events[last] = theEvent;

Why fetching data from sqlite block Node.JS?

I want to fetch and export to csv file huge amount (5 - 12 milions rows) of archive data from Sqlite database. While doing this the whole server is blocked. No other connection can be handled by server (for example I couldn't open website in another tab in browser).
Node.JS server part:
function exportArchiveData(response, query){
response.setHeader('Content-type', 'text/csv');
response.setHeader('Content-disposition', 'attachment; filename=archive.csv');
db.fetchAllArchiveData(
query.ID,
function(error, data){
if(!error)
response.write(data.A + ';' + data.B + ';' + data.C + '\n');
},
function(error, retrievedRows){
response.end();
});
};
Sqlite DB module:
module.exports.SS.prototype.fetchAllArchiveData = function (
a, callback, complete) {
var self = this;
// self.sensorSqliteDb.all(
self.sensorSqliteDb.each(
'SELECT A, B, C '+
'FROM AD WHERE '+
' A="' + a + '"'+
' ORDER BY C ASC' +
';'
,
callback,
complete
);
};
I also create index on AD like CREATE INDEX IAD ON AD(A, C) and EXPLAIN QUERY PLAN show that this index is used by sqlite engine.
Still, when I call exportArchiveData server send the data properly but no other action can be performed during this. I have a huge amount of data (5 - 12 milions of rows to send) so it takes ~3 minutes.
How can I prevent this from blocking whole server?
I thought that if I use EACH and there will be callback's the server will be more responsive. Also Memory usage is huge (about 3GB and even more). Can I prevent this somehow?
In answer to comments, I would like to add some clarifications:
I use node-sqlite3 from developmentseed. It should be asynchronous and non-blocking. And it is. When statement is prepared I can request main page. But when server start serving data, then Node.js server is blocked. I guess thats because request for home page is one request to call some callback while there are milions request for callback handling archive data "EACH".
If I use sqlite3 tool from linux command line I do not get rows immediately but that is not the problem as long as node-sqlite3 is non-blocking.
Yes. I'm hitting CPU max. What is worse, when I request twice as much data the whole memory is used, and then server freeze forever.
OK. I handle this problem this way.
Instead of using Database#each I use Database#prepare with multiple Statement#get.
What is more, I investigate that running out of memory was caused by full buffer of response. So now, I call for next row when I get previous and when response buffer have place for new data. Working perfect. And Now server is not blocked (only during preparing statement).
Sqlite module:
module.exports.SS.prototype.fetchAllArchiveData = function (
a) {
var self = this;
var statement = self.Db.prepare(
'SELECT A, B, C '+
'FROM AD WHERE '+
' A="' + a + '"'+
' ORDER BY C ASC' +
';'
,
function(error){
if(error != null){
console.log(error);
}
}
);
return statement;
};
Server side:
function exportArchiveData(response, query){
var respRet = null;
var i = 0;
var statement = db.fetchAllArchiveData(
query.ID);
var getcallback = function(err, row){
if(err != null){
console.mylog(err);
return;
}
if(typeof(row) != 'undefined'){
respRet = response.write(row.A + ';' + row.B + ';' + row.C + '\n');
console.log(i++ + ' ' + respRet);
if(respRet){
statement.get(getcallback);
}else{
console.log('should wait on drain');
response.on('drain', function(){
console.log('drain - set on drain to null, call statement');
response.on('drain', function(){});
statement.get(getcallback);
});
}
}else{
response.end();
}
};
statement.get(function(err, row){
response.setHeader('Content-type', 'text/csv');
response.setHeader('Content-disposition', 'attachment; filename=archive.csv');
getcallback(err, row);
});
};

Django Session Persistent but Losing Data

I have been working for hours trying to understand the following problem: I have a user send an Ajax request to dynamically send a form and record that the number of forms to read on submission has increased. Toward this end I use request.session['editing_foo'] = { 'prefix_of_form_elements' : pkey } so that I can associate them with the database for saving and loading (-1 is for new forms that haven't been saved yet).
However, when I use the following code (see bottom) I get the following bizarre output:
1st Click:
{} foousername
next_key 1
1
{u'1-foo': -1}
2nd Click:
{} foousername
next_key 1
1
{u'1-foo': -1}
3rd Request:
{} foousername
next_key 1
1
{u'1-foo': -1}
What the heck is going on?
id_fetcher = re.compile(r'\d')
#login_required
def ajax_add_foo(request):
def id_from_prefix(key):
return int( id_fetcher.search(key).group(0) )
if 'editing_foos' not in request.session:
print "reinitializing"
request.session['editing_foos'] = {}
print request.session['editing_foos'], request.user
keys = request.session['editing_foos'].keys()
if len(keys) == 0:
next_key = 1
else:
print [ id_from_prefix(key) for key in keys ]
next_key = max([ id_from_prefix(key) for key in keys ]) + 1
print "next_key", next_key
fooform = FooForm(prefix=next_key)
print next_key
request.session['editing_foos'].update( {create_prefix(FooForm, next_key) : -1 } ) # This quote is new and has no pkey
print request.session['editing_foos']
return render_to_response( 'bar/foo_fragment.html',
{'fooform' : fooform, },
context_instance=RequestContext(request))
Thank you all very much!
Note: This is a followup to a previous question concerning the same source code.
I don't think I completely understand the question, but you may want to take a look at which session engine you're using
if you're using the cache session engine you need to make sure you have caching properly set up (for instance the dummy cache would just throw out your session data)
another possibility is that your session isn't being saved because you're not changing the session, you're changing a mutable object that is stored in the session. you can try forcing the session to save by adding this somewhere in your view:
request.session.modified = True

Resources