Phantomjs if proxy not respond - proxy

Script test.js:
var page = require('webpage').create();
var url = args[1];
page.open(url, function (status) {
console.log(status);
phantom.exit();
});
Run script:
phantomjs --proxy=1.1.1.1:22 test.js 'http://nonexistent_site.com'
1.1.1.1:22 - nonexistent server
http://nonexistent_site.com - nonexistent site
How can I determine in PhantomJS which one is not responding - a proxy or a site?

You can catch network timeouts with page.onResourceTimeout callback:
page.onResourceTimeout = function(request) {
console.log('Response (#' + request.id + '): ' + JSON.stringify(request));
};
You can also set your own timeout:
page.settings.resourceTimeout = 3000; // ms
To intercept network errors you can register page.onResourceError callback:
page.onResourceError = function(resourceError) {
console.log('Unable to load resource #' + resourceError.id + ' URL:' + resourceError.url);
console.log('Error code: ' + resourceError.errorCode + '. Description: ' + resourceError.errorString);
};
With this in place, non-existent host will trigger Host not found error.
But if you use a non-working proxy, you will always end up with error Network timeout on resource first, even if target host does not exist.
So if you want to check proxies :) I'd suggest just to page.open hosts that are 100% working, for example, set up a simple static web page on the very server that you are operating from.
Also there is a node.js module: proxy-checker

Related

Possible to call http gets with Alexa hosted skill?

I have been trying without success to use http module in my Node.js endpoint to do a simple http get.
I have followed the various tutorials to execute the get within my intent, but it keeps failing with getaddrinfo ENOTFOUND in the cloudwatch log.
It seems like I am preparing the url correctly, if I just cut and past the url output into the browswer I get the expected response, and its just a plain http get over port 80.
I suspect that maybe the Alexa hosted lambda doesn't have permission necessary to make remote calls to non-amazon web services, but I don't know this for sure.
Can anybody shed any light? FYI this is the code in my lambda:
var http = require('http');
function httpGet(address, zip, zillowid) {
const pathval = 'www.zillow.com/webservice/GetSearchResults.htm' + `?zws-id=${zillowid}` + `&address=${encodeURIComponent(address)}&citystatezip=${zip}`;
console.log ("pathval =" + pathval);
return new Promise(((resolve, reject) => {
var options = {
host: pathval,
port: 80,
method: 'GET',
};
const request = http.request(options, (response) => {
response.setEncoding('utf8');
console.log("options are" + options);
let returnData = '';
response.on('data', (chunk) => {
returnData += chunk;
});
response.on('end', () => {
resolve(JSON.parse(returnData));
});
response.on('error', (error) => {
console.log("I see there was an error, which is " + error);
reject(error);
});
});
request.end();
}));
}
host: pathval is incorrect usage of the Node.js http module. You need to provide the hostname and the path + query string as two different options.
An example of correct usage:
host: 'example.com',
path: '/webservice/GetSearchResults.htm?zws-id=...',
(Of course, these can be variables, they don't need to be literals as shown here for clarity.)
The error occurs because you're treating the whole URL as a hostname, and as such it doesn't exist.
I suspect that maybe the Alexa hosted lambda doesn't have permission necessary to make remote calls to non-amazon web services
There is no restriction on what services you can contact from a within a Lambda function (other than filters that protect against sending spam email directly to random mail servers).

Geoserver - GetFeature SHAPE-ZIP request - 413 error

I am using Geoserver with an app written with OpenLayers 3. The app can download zipped shapefiles using a WFS service, which works unless I make a large (long URL) request. In that case I get a 413 error in Chrome.
Is there a way I can change this setting so that I can make a longer request to Geoserver (or is the problem something else?
Here is the request:
$('#btnDownloadSHP').click(function (e) {
var tostring = '(' + ids.toString() + ')';
var data = {
service: 'WFS',
version: '1.1.0',
request: 'GetFeature',
typename: 'download_layer',
format_options: "filename:" + shapefileName,
srsname: 'EPSG:3857',
outputFormat: 'SHAPE-ZIP',
CQL_FILTER: "id IN " + tostring
}
var parameters = Object.keys(data).map(function (key) {
return key + '=' + data[key]
}).join('&');
var url = "http://" + servername + "/geoserver/wfs?" + parameters;
//make dummy link and download shapefile
var link = document.createElement("a");
link.download = 'Features';
link.href = url;
link.click();
// }
});
That response would be generated by the server that GeoServer is running on rather than GeoServer itself. So depending on which httpd and/or servlet engine you are using you may be able to fix it there.
But the easy answer is to switch from GET to POST.

Accessing ProcessMaker BPM framework's APIs from external apllication

I have followed the tutorial from the PM docs (http://wiki.processmaker.com/3.1/OAuth_2.0) and have not success accessing the access token.
Currently I am using the trial version of PM and I would like to access the APIs in my java application js file, but the browser returns the following error "XMLHttpRequest cannot load 'myPMServerAddress' has been blocked by CORS policy: No 'Access-Control-Allow-Origin' header is present on the requested resource. Origin 'http://localhost:8100' is therefore not allowed access.".
Any help??
I registered my apps server (http://localhost:8100) within the Website box of the (User Applications -> +New) form and my code looks as followed:
var restServer = 'https://trial.processmaker.com/';
var workspace = 'sysmyWorkspace/';
var jqxhr = $.ajax({
type: "POST",
url: restServer + workspace + 'oauth2/token',
data: {
grant_type : 'password',
scope : '*',
client_id : 'myClientId',
client_secret: 'myClientSecret',
username : 'admin',
password : 'myPassword'
}
})
.done( function(data) {
if (data.error) {
alert("Error in login!\nError: " + data.error + "\nDescription: " + data.error_description);
}
else if (data.access_token) {
alert("data access token received!");
var d = new Date();
d.setTime(d.getTime() + 60*60*1000);
document.cookie = "access_token=" + data.access_token + "; expires=" + d.toUTCString();
document.cookie = "refresh_token=" + data.refresh_token; //refresh token doesn't expire
}
else {
alert(JSON.stringify(data, null, 4));
}
})
.fail(function(data, statusText, xhr) {
alert("Failed to connect.\nHTTP status code: " + xhr.status + ' ' + statusText);
});
});
You need to disable CORS in client side
for Ubuntu:
google-chrome --disable-web-security --user-data-dir
for Ms Windows:
Go into the command prompt and go into the folder where Chrome.exe is and type
chrome.exe --disable-web-security
I can test that with no errors.

node.js server running but not loading

I've just installed node.js on my computer running Win7(64bit).
The problem is that when I run a simple hello-world application it is running (as confirmed by console.log() and me pushing the code to OpenShift where it works just fine) but when I try to load the page in localhost:1337 it just keeps on loading (eventually times out).
I've no idea what to check, since firewall is not blocking node and I'm not running anything that would block the port.
Here's the server code.
#!/bin/env node
// Include http module.
var http = require("http");
//Get the environment variables we need if on OpenShift
var ipaddr = process.env.OPENSHIFT_NODEJS_IP || "127.0.0.1";
var port = process.env.OPENSHIFT_NODEJS_PORT || 1337;
http.createServer(function (request, response) {
request.on("end", function () {
response.writeHead(200, {
'Content-Type': 'text/plain'
});
response.end('Hello HTTP!');
});
}).listen(port, ipaddr);
console.log('It works');
console.log('IP : ' + ipaddr + '\nPort : ' + port);
Any help is appreciated, thank you.
edit
Here's a screenshot of commandline output.
http://i.stack.imgur.com/GGaLD.png
The node server is hanging as you need to always call response.end.
I believe that listening to the end event on the request is causing the timeout. If you remove it will work.

prototype javascript ajax request runs back end perl script but continues to return 500

Newbie to this - This code is works - in that the call to the script does what it is supposed to but returns the condition 500 and I can not see why. I am looking for any suggestions or changes that I should be making to make this work.
Thanks to all who respond.
function get_update_odometer(vehicle_key,odometer_value){
var url = "[%Catalyst.uri_for('/invoice/dispatch_util/get_update_odometer')%]";
new Ajax.Request(url, {
method: 'get',
parameters: {
key: vehicle_key,
ovalue: odometer_value
},
asynchronous:false,
onSuccess: successFunc,
onFailure: failureFunc
});
var return_v = $('rcontainer').innerHTML;
document.getElementById('odometer').value = return_v;
return true;
}
function successFunc(response){
if (200 == response.status){
var container = $('rcontainer');
var content = response.responseText;
container.update(content);
}
}
function failureFunc(response){
alert("Call has failed " + response.status );
}
Error code is coming from server side, and you provided the client part.
So have a look if your server script get_update_odometer is working, is callable by your web server and etc ...

Resources