Pass jQuery event to default on-eventhandler - events

I've written a function to call the default jQuery.fn.on-handler, after a given number of fired events. Now I stuck, because the original event will not passed to the function, any ideas how to improve this?
;(function ($) {
var oldOn = $.fn.on,
i = 0;
$.fn.on = function () {
var args = arguments,
j = args.length;
for (var last in args);
while (j--) {
if ($.isFunction(args[j]) && !isNaN(args[last])) {
var oldFn = args[j],
after = args[last];
args[j] = function () {
i++;
if (i === after) {
oldFn.call();
i = 0;
}
};
}
}
if (!isNaN(args[last])) delete args[last];
return oldOn.apply(this, arguments);
};
})(jQuery);
// call the plugin and fire the `fn` after each 20 mousemoves
$(document).on('mousemove', function (e) {
console.log(e); // undefined
}, 20);
As you can see, will the following work without problems:
var oldOn = $.fn.on;
$.fn.on = function () {
return oldOn.apply(this, arguments);
};
$(document).on('click', function(e){
console.log(e) // jQuery.Event
});
Where's the mistake, how can i get this to work?
Update
I got it much simpler now: https://github.com/yckart/jquery.unevent.js

You're not passing the arguments from your callback wrapper function to the original callback function.
args[j] = function (*HERE*) {
i++;
if (i === after) {
oldFn.call(*TO HERE*);
i = 0;
}
};
Try replacing oldFn.call(); with oldFn.apply(this, [].slice.call(arguments)); to carry them over (and keeping up jQuery's this).
Edit: http://jsfiddle.net/QFyhX/

I think
for (var last in args);
should be
var last = args[args.length-1];

Related

Parallel asynchronous Ajax calls from the client

I have 20 data packet in the client and I am pushing one by one to the server via Ajax post. Each call take approximately one minute to yield the response. Is there any way to make few of these requests run parallel.
I have used Jquery promise. However, still the request waiting for the prior one to get completed.
var dataPackets=[{"Data1"},{"Data2"},{"Data3"},{"Data4"},{"Data5"},
{"Data6"},{"Data7"},{"Data8"},{"Data9"},{"Data10"},
{"Data11"},{"Data12"},{"Data13"},{"Data14"},{"Data15"},{"Data16"},
{"Data17"},{"Data18"},{"Data19"},{"Data20"}];
$(dataPackets).each(function(indx, request) {
var req = JSON.stringify(request);
setTimeout({
$.Ajax({
url: "sample/sampleaction",
data: req,
success: function(data) {
UpdateSuccessResponse(data);
}
});
}, 500);
});
The when...done construct in jQuery runs ops in parallel..
$.when(request1(), request2(), request3(),...)
.done(function(data1, data2, data3) {});
Here's an example:
http://flummox-engineering.blogspot.com/2015/12/making-your-jquery-ajax-calls-parallel.html
$.when.apply($, functionArray) allows you to place an array of functions that can be run in parallel. This function array can be dynamically created. In fact, I'm doing this to export a web page to PDF based on items checked in a radio button list.
Here I create an empty array, var functionArray = []; then based on selected items I push a function on to the array f = createPDF(checkedItems[i].value)
$(document).ready(function () {
});
function sleep(milliseconds) {
var start = new Date().getTime();
for (var i = 0; i < 1e7; i++) {
if ((new Date().getTime() - start) > milliseconds){
break;
}
}
}
function exportPDFCollection() {
var f = null;
var x = 0;
var checkedItems = $("input:checked");
var count = checkedItems.length;
var reportList = $(checkedItems).map(
function () {
return $(this).next("label").text();
})
.get().join(",");
var functionArray = [];
var pdf = null;
for (var i = 0; i < count; i++) {
f = createPDF(checkedItems[i].value)
.done(function () {
pdf = checkedItems[x++].value;
alert('PDF => ' + pdf + ' created.');
})
.fail(function (jqxhr, errorText, errorThrown) {
alert('ajax call failed');
});
functionArray.push(f);
}
$.when.apply($, functionArray)
.done(function () {
$.get("http://yourserver/ExportPage.aspx",{reports: reportList})
.done(function () {
alert('PDF merge complete.');
})
.fail(function (jqxhr, errorText, errorThrown) {
alert('PDF merge failed. Please try again.');
});
return true;
});
}
function createPDF(webPage) {
return $.get(webPage);
}

nightwatch assert ALL elements, not ANY

I'm trying to test that, when the Submit button is clicked on an empty form, all the "please fill in this field" labels are displayed.
I'm doing so with this:
page.click('#btn_submit');
page.expect.element('#validation_label_required').to.be.visible;
where #validation_label_required is represented by the CSS selector:
input[required] ~ p.error-message-required
However, this test passes if ANY of the validation labels are visible. The test should only pass if they ALL are.
How can I achieve this?
You will need to create a custom assertion for that where you locate all elements by selenium commands and then loop to verify condition. It should look something like this
var util = require('util');
exports.assertion = function (elementSelector, expectedValue, msg) {
this.message = msg || util.format('Testing if elements located by "%s" are visible', elementSelector);
this.expected = expectedValue;
this.pass = function (value) {
return value === this.expected;
};
this.value = function (result) {
return result;
};
this.command = function (callback) {
var that = this.api;
this.api.elements('css selector',elementSelector, function (elements) {
elements.value.forEach(function(element){
that.elementIdDisplayed(element.ELEMENT,function(result){
if(!result.value){
callback(false);
}
});
});
callback(true);
});
return this;
};
};
I've just ended up with another custom assertion that check how many elements are visible by given css selector.
/**
* Check how many elements are visible by given css selector.
*
*/
var util = require('util');
exports.assertion = function(elementSelector, expectedCount, msg) {
this.message = msg || util.format('Asserting %s elements located by css selector "%s" are visible', expectedCount, elementSelector);
this.expected = expectedCount;
this.count = 0;
this.pass = function(value) {
return value === this.expected;
};
this.value = function(result) {
return this.count;
};
this.command = function(callback) {
var me = this, elcount = 0;
this.count = 0;
this.api.elements('css selector', elementSelector, function(elements) {
if(elements.value && elements.value.length > 0){
elcount = elements.value.length;
}else{
return callback(false);
}
elements.value.forEach(function(element) {
me.api.elementIdDisplayed(element.ELEMENT, function(result) {
if (result.value) {
me.count++;
}
elcount--;
if (elcount === 0) {
callback(me.count);
}
});
});
});
};
};

Double callback on error

When testing stylus and the assertion throws, it calls the callback a second time with the Assertion error:
var expect = require('chai').expect,
stylus = require('stylus'),
i = 0
describe('test stylus', function(){
it('calls back', function(done){
stylus('p\n\tcolor white').render(function(err,css){
i++;
console.log('callback', i) //logs twice
expect(css).equal('p\n\t{ color: bad;\n}')
done()
})
})
})
I'm using this to work-around:
describe('test stylus', function(){
it('calls back', function(done){
stylus('p\n\tcolor white').render(function(err,css){
try {
expect(css).equal('p\n\t{ color: bad;\n}')
} catch(e) {
done(e)
}
})
})
})
I'm thinking it's a stylus bug to re-call the callback. Or Am I missing something here?
Your hypothesis is correct. Here is the code in stylus:
Renderer.prototype.render = function(fn){
// ...
try {
// ...
var listeners = this.listeners('end');
if (fn) listeners.push(fn);
for (var i = 0, len = listeners.length; i < len; i++) {
var ret = listeners[i](null, css); // Called here once.
if (ret) css = ret;
}
if (!fn) return css;
} catch (err) {
var options = {};
options.input = err.input || this.str;
options.filename = err.filename || this.options.filename;
options.lineno = err.lineno || parser.lexer.lineno;
if (!fn) throw utils.formatException(err, options);
// Called here a second time if there is an exception.
fn(utils.formatException(err, options));
}
};
fn is the callback. It is added to listeners and will be called once as part of the loop that calls all listeners. If the callback raises an exception there, then it is called again as part of the exception handling.

AngularJS: Using $q to fire ajax calls synchronously

Is it possible to use $q to fire ajax requests synchronously in AngularJS?
I have a long list of vehicles, each vehicle has events associated with them and I need to retrieve the eventdetails of each event when the user expands the listing.
Right now, if the user expands the listing, I am firing up to 15 calls asynchronously and it seems to be causing issues with the API I'm consuming, so I'd like to see if performance is improved if I wait for each request finishes before firing the next.
I'm attempting to implement $q to delay the next request until the previous is finished, however I can't seem to wrap my head around using the service, here is what I currently have:
// On click on the event detail expander
$scope.grabEventDetails = function(dataReady, index) {
if (dataReady == false) {
retrieveEventDetails($scope.vehicles[index].events);
}
}
var retrieveEventDetails = function(events) {
// events is array
var deferred = $q.defer();
var promise = deferred.promise;
var retrieveData = function(data) {
return $http({
url: '/api/eventdetails',
method: 'POST',
data: {
event_number: data.number
},
isArray: true
});
}
_.each(events, function(single_event) {
promise.then(retrieveData(single_event).success(function(data) {
console.log(data);
}));
});
}
This is still firing asynchronously, Where am I going wrong with this?
I understand firing the requests synchronously isn't the best idea, at the moment I just want to see if performance is improved with the API at all.
You don't need $q to implement a promise as $http returns one.
_.each fires all the callbacks without especially waiting the promise.
All you do is call retrieveData for all events whenever your promise is resolved, and since you don't do a first call, it shouldn't even be working
You could do some recursive call like this :
var retrieveEventDetails = function(events) {
var evt = events.shift();
$http({
url: '/api/eventdetails',
method: 'POST',
data: {
event_number: evt.number
},
isArray: true
}).then(function(response){
console.log(response.data);
retrieveEventDetails(events);
});
}
I do think you should use $q as some other part of your application might need to get a promise.
A good example would be $routeProvider resolve option.
I made a little demo in plunker.
Solution:
retrieveData function should return a function (which returns a promise) instead of a just a promise.
That way we can create a promise chain: promise.then(fn).then(fn).then(fn).then(null,errorFn)
We must resolve the first promise to kick the chain.
var retrieveEventDetails = function(events) {
// events is array
var deferred = $q.defer();
var promise = deferred.promise;
var retrieveData = function(data) {
return function(){
return $http({
url: '/api/eventdetails',
method: 'POST',
data: {
event_number: data.number
},
isArray: true
})
}
}
deferred.resolve();
return events.reduce(function(promise, single_event){
return promise.then(retrieveData(single_event));
}, promise);
}
I'm not sure you even need $q here. In this example, each piece of data is registered in the controller as soon as it comes back from the call.
Live demo (click).
var app = angular.module('myApp', []);
app.controller('myCtrl', function($scope, myService) {
$scope.datas = myService.get();
});
app.factory('myService', function($http) {
var myService = {
get: function() {
var datas = {};
var i=0;
var length = 4;
makeCall(i, length, datas);
return datas;
}
}
function makeCall(i, length, datas) {
if (i < length) {
$http.get('test.text').then(function(resp) {
datas[i] = resp.data+i;
++i;
makeCall(i, length, datas);
});
}
}
return myService;
});
Here's a way using $q.all() that you can wait for all of the data to come through before passing it to the controller: Live demo (click).
var app = angular.module('myApp', []);
app.controller('myCtrl', function($scope, myService) {
myService.get().then(function(datas) {
$scope.datas = datas;
})
});
app.factory('myService', function($q, $http) {
var myService = {
get: function() {
var deferred = $q.defer();
var defs = [];
var promises = [];
var i=0;
var length = 4;
for(var j=0; j<length; ++j) {
defs[j] = $q.defer();
promises[j] = defs[j].promise;
}
makeCall(i, length, defs);
$q.all(promises).then(function(datas) {
deferred.resolve(datas);
});
return deferred.promise;
}
}
function makeCall(i, length, defs) {
if (i < length) {
$http.get('test.text').then(function(resp) {
defs[i].resolve(resp.data+i);
++i;
makeCall(i, length, defs);
})
}
}
return myService;
});

prototype ajax, undefined json after second setTimeout

Having problem with prototype ajax and setTimeout. Here is my code shortened:
//new ajax request
....onComplete: function (transport) { //json as this -> array[$i].something
var json = transport.responseJSON;
var $i = 0;
window.setTimeout(function () {
SLOW();
},
500); //display every json[$i] with custom delay
function SLOW() {
$i++;
if (json[$i].something !== null) { //insert in proper div id in the html document
window.setTimeout(function () {
$('document_div' + json[$i].something).innerHTML = json[$i].something_to_display;
},
500);
window.setTimeout(function () {
$('document_div' + json[$i].something).innerHTML = json[$i].something_to_display;
},
1000);...window.setTimeout(function () {
SLOW();
},
500);
} else {
//stop and continue
}
Getting this error: json[$i] is undefined.
EDIT: looks like i'm getting this error on second timeout, the first one changes the div correctly.
Done.
Solution was to re-var json again before using it in setTimeout.
var json_something = json[$i].something; //and so on...
var json_something_to_display = json[$i].something_to_display
window.setTimeout(function() { $('document_div'+json_something).innerHTML = json_something_to_display; }, 500);
Can somebody explain why this is needed? Why varing json is not enough and it disapears somewhere after one window.setTimeout function?

Resources