I have the following nodejs code per this and this:
const WebSocket = require('ws');
const ws = new WebSocket('wss://ws.tradier.com/v1/markets/events');
request({
method: 'post',
url: 'https://api.tradier.com/v1/markets/events/session',
form: {
},
headers: {
'Authorization': 'Bearer MY_API_KEY_NOT_SHOWN',
'Accept': 'application/json'
}
}, (error, response, body) => {
console.log(response.statusCode);
console.log(body);
let data = JSON.parse(body)
let sessionId = data.stream.sessionid
streamPrice(sessionId)
});
function streamPrice(sessionId){
console.log(sessionId)
ws.on('open', function open() {
console.log('Connected, sending subscription commands...');
ws.send(`{"symbols": ["TSLA"], "sessionid": "${sessionId}", "linebreak": true}`);
});
ws.on('message', function incoming(data) {
console.log(data);
});
ws.on('error', function error(data) {
console.log(data);
});
}
I get a 200 OK back from the API request to create the web sockets session, and I have a valid session ID:
200
{"stream":{"url":"https:\/\/stream.tradier.com\/v1\/markets\/events","sessionid":"6ba4158d-8ff8-46c3-b005-***********"}}
6ba4158d-8ff8-46c3-b005-***********
However, the ws.on() events never fire. I am not getting any errors. The session does close after a period of time, presumably due to inactivity. But it's not inactivity on my code's part...
Is there something wrong in my code / something I'm missing to make this work?
I was able to identify the issue myself. The problem is I was opening the websocket too early.
I moved the following line inside of streamingPrice scope instead of the global scope to resolve.
const ws = new WebSocket('wss://ws.tradier.com/v1/markets/events');
Related
I am trying to setup a E2E cypress test
And for the same, trying(but FAILED) to get the events from the SSE connection(mocked) and for the same emitting the push event before setting the SSE connection
Q: Can you please help to sort this out, as I might be doing the whole thing wrong or missing something
Note: as per this PR, cypress supports SSE - https://github.com/cypress-io/cypress/pull/2054
Not able to find any reference on cypress for SSE support - https://docs.cypress.io/api/commands/route.html
const EEmitter = new EventEmitter();
cy.route({
method: 'GET',
url: `**/documentprocessing/startprocess`,
status:200,
response: {
"uniqueId": "abcd12345677",
},
})
.as(`startprocess`)
.route({
method: 'GET',
status:200,
url: '**/documentprocessing/getSSEStatus/**',
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
},
onResponse: () => {
EEmitter.on('push', function(event, data) {
response.write(
'event: ' +
String(event) +
'data: ' +
String(data) +,
);
});
}
})
.as(`sseStatus`);
In the below code, after 1st API call(#startprocess), emitting push events
Then trying to get push events in response (in #sseStatus call above)
cy.route(`#startprocess`);
setTimeout(function() {
EEmitter.emit('push','message', { 'uniqueId':'abcd12345677' ,'uploadStatus':'Started'});
}, 1000);
setTimeout(function() {
EEmitter.emit('push','message', { 'uniqueId':'abcd12345677' ,'uploadStatus':'Complete'});
}, 3000);
cy.wait(3000)
cy.wait(`#sseStatus`);
I had a similar issue where I needed to mock a api call with text/event-stream as the content type.
I have mocked api calls using json fixtures in the past easy stuff you just do something like
cy.intercept('GET', '/endpoint?*', { fixture: 'folder/my-json.json' }).as('my-api-call');
but with text/event-stream I had to do it a little different (keep in mind this is what I did, it doesn't mean it is the best way to do it, since I couldn't find anything in the oficial documentation)
import json from 'path/to/myjson.json'
cy.intercept('GET', '/endpoint*', (req) =>
req.reply(`data: ${JSON.stringify(json)} \n\n`,
{
'content-type': 'text/event-stream'
}
)).as('my-api-call');
I created a webapi in ASP.NET Core, and I need to consume it using React, the web api works normally, if I use curl or postman among others, it works normally. The problem starts when I'm going to use React, when I try to make any requests for my API with js from the problem.
To complicate matters further, when I make the request for other APIs it works normally, this led me to believe that the problem was in my API, but as I said it works with others only with the react that it does not. I've tried it in many ways.
The API is running on an IIS on my local network
Attempted Ways
Using Ajax
$ .ajax ({
method: "POST",
url: 'http://192.168.0.19:5200/api/token',
beforeSend: function (xhr) {
xhr.setRequestHeader ("Content-type", "application / json");
},
date: {
name: 'name',
password: 'password'
},
success: function (message) {
console.log (message);
},
error: function (error) {
/ * if (error.responseJSON.modelState)
showValidationMessages (error.responseJSON.modelState); * /
console.log (error);
}
});
Using Fetch
const headers = new Headers ();
headers.append ('Content-Type', 'application / json');
const options = {
method: 'POST',
headers,
body: JSON.stringify (login),
mode: 'cors' // I tried with cors and no-cors
}
const request = new Request ('http://192.168.0.19:5200/api/token', options);
const response = await fetch (request);
const status = await response.status;
console.log (response); * /
// POST adds a random id to the object sent
fetch ('http://192.168.0.19:5200/api/token', {
method: 'POST',
body: JSON.stringify ({
name: 'name',
password: 'password'
}),
headers: {
"Content-type": "application / json; charset = UTF-8"
},
credentials: 'same-origin'
})
.then (response => response.json ())
.then (json => console.log (json))
Using Request
var request = new XMLHttpRequest ();
request.open ('POST', 'http://192.168.0.19:5200/api/token', true);
request.setRequestHeader ('Content-Type', 'application / json; charset = UTF-8');
request.send (login);
ERRORS
Console
Network tab
When I do this without being change the content type to JSON it works
because the API returns saying that it is not a valid type.
Apart from allowing CORS in you .NET configuration. You also need to return 200 OK for all OPTION requests.
Not sure how it's done in .NET but just create a middleware that detects the METHOD of the request, and if it's OPTIONS, the finish the request right there with 200 status.
Well I had the same issue and it seems that you need to add the action to the HttpPost attribute in the controller.
Here is an example.
[HttpPost("[action]")]
public void SubmitTransaction([FromBody] SubmitTransactionIn request)
{
Ok();
}
Try like this
public void ConfigureServices(IServiceCollection services)
{
services.AddCors();
}
public void Configure(IApplicationBuilder app, IHostingEnvironment env)
{
app.UseCors(option => option.AllowAnyOrigin().AllowAnyHeader().AllowAnyMethod().AllowCredentials());
app.UseAuthentication();
app.UseMvc();
}
So I have been working on this serverless configuration that calls a Lambda function through ajax. The I enable CORs through the API Gateway, and I have made sure of the domain I specified. This domain works when calling other lambda functions within the same API.
Now for the weird stuff.
I send a post request (I am trying to upload a file through ajax, lambda, and S3), to my API. If I configure the Access-Control-Allow-Origin so that it points to the domain WITHOUT the http in front of it. Ex: example.com. When I try to call this i get:
Failed to load https://m562ogkc1l.execute-api.us-east-1.amazonaws.com/test/upload: Response to preflight request doesn't pass access control check: The 'Access-Control-Allow-Origin' header has a value 'example.com' that is not equal to the supplied origin. Origin 'http://example.com' is therefore not allowed access.
Ok fine, this is assumed, since that's not the proper domain. So when I add in the http (http://example.com) for the CORs of the API I get:
Failed to load https://m562ogkc1l.execute-api.us-east-1.amazonaws.com/test/upload: No 'Access-Control-Allow-Origin' header is present on the requested resource. Origin 'http://example.com' is therefore not allowed access. The response had HTTP status code 400.
What this seems like to me is that there is an issue elsewhere, except I don't know where the issue lies.
I have made sure the data I pass for parameters of the ajax call are strignified (JSON.stringify()), and I am NOT running an AWS Lambda Proxy which means I shouldn't be configuring responses on the lambda side of things.
This all really confuses me and I wish AWS had better documentation and examples since they really want to push these serverless services.
Further code is here:
Ajax:
$('#submitButton').on('click', function(){
//console.log(document.getElementById('fileUpload').value.substring(12));//C:\fakepath\ in front of filename (size = 12)
$.ajax({
type: 'POST',
url: 'https://m562ogkc1l.execute-api.us-east-1.amazonaws.com/test/upload',
data: JSON.stringify({"id": id,"name": document.getElementById('fileUpload').value.substring(12),"body": document.getElementById('fileUpload').files[0]}),
contentType: "application/json",
success: function(data){
console.log(data);
//location.reload();
}
});
return false;
});
Lambda:
const AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event) => {
let encodedImage = JSON.parse(event.body);
let decodedImage = Buffer.from(encodedImage, 'base64');
var filePath = event.id + '/' + event.name
var params = {
"Body": decodedImage,
"Bucket": "repository.example.com",
"Key": filePath
};
return await new Promise((resolve, reject) => {
s3.upload(params, function(err, data){
if(err) {
let response = {
"statusCode": 200,
"headers": {
"Access-Control-Allow-Origin": "http://example.com"
},
"body": JSON.stringify(err),
"isBase64Encoded": false
};
resolve(response);
} else {
let response = {
"statusCode": 200,
"headers": {
"Access-Control-Allow-Origin": "http://example.com"
},
"body": JSON.stringify(data),
"isBase64Encoded": false
};
resolve(response);
}
});
});
};
(Yes I threw in some response configuration for the function, I just wanted to see if it would work)
So we get a file (an image file) in the front-end like so:
//html
<input type="file" ng-change="onFileChange">
//javascript
$scope.onFileChange = function (e) {
e.preventDefault();
let file = e.target.files[0];
// I presume this is just a binary file
// I want to HTTP Post this file to a server
// without using form-data
};
What I want to know is - is there a way to POST this file to a server, without including the file as form-data? The problem is that the server I am send a HTTP POST request to, doesn't really know how to store form-data when it receives a request.
I believe this is the right way to do it, but I am not sure.
fetch('www.example.net', { // Your POST endpoint
method: 'POST',
headers: {
"Content-Type": "image/jpeg"
},
body: e.target.files[0] // the file
})
.then(
response => response.json() // if the response is a JSON object
)
You can directly attach the file to the request body. Artifactory doesn't support form uploads (and it doesn't look like they plan to)
You'll still need to proxy the request somehow to avoid CORS issues, and if you're using user credentials, you should be cautious in how you treat them. Also, you could use a library like http-proxy-middleware to avoid having to write/test/maintain the proxy logic.
<input id="file-upload" type="file" />
<script>
function upload(data) {
var file = document.getElementById('file-upload').files[0];
var xhr = new XMLHttpRequest();
xhr.open('PUT', 'https://example.com/artifactory-proxy-avoiding-cors');
xhr.send(file);
}
</script>
Our front-end could not HTTP POST directly to the JFrog/Artifactory server. So we ended up using a Node.js server as a proxy, which is not very ideal.
Front-end:
// in an AngularJS controller:
$scope.onAcqImageFileChange = function (e) {
e.preventDefault();
let file = e.target.files[0];
$scope.acqImageFile = file;
};
// in an AngularJS service
createNewAcqImage: function(options) {
let file = options.file;
return $http({
method: 'POST',
url: '/proxy/image',
data: file,
headers: {
'Content-Type': 'image/jpeg'
}
})
},
Back-end:
const express = require('express');
const router = express.Router();
router.post('/image', function (req, res, next) {
const filename = uuid.v4();
const proxy = http.request({
method: 'PUT',
hostname: 'engci-maven.nabisco.com',
path: `/artifactory/cdt-repo/folder/${filename}`,
headers: {
'Authorization': 'Basic ' + Buffer.from('cdt-deployer:foobar').toString('base64'),
}
}, function(resp){
resp.pipe(res).once('error', next);
});
req.pipe(proxy).once('error', next);
});
module.exports = router;
not that we had to use a PUT request to send an image to Artifactory, not POST, something to do with Artifactory (the engci-maven.nabisco.com server is an Artifactory server). As I recall, I got CORS issues when trying to post directly from our front-end to the other server, so we had to use our server as a proxy, which is something I'd rather avoid, but oh well for now.
I have the following node.js server set up listening to port 9001
var https = require('https');
var fs = require('fs');
var qs = require('querystring');
var options = {
key: fs.readFileSync('privatekey.pem'),
cert: fs.readFileSync('certificate.pem')
};
https.createServer(options, function (req, res) {
res.writeHead(200);
console.log('Request Received!');
console.log(req.method);
if (true || req.method == 'POST') {
var body = '';
req.on('data', function (data) {
body += data;
});
req.on('end', function () {
console.log(body);
var POST = qs.parse(body);
console.log(POST);
});
}
res.end("hello, world\n");
}).listen(9001);
and I am trying to get this server to respond to an AJAX call
function form_save()
{
console.log("submitted!");
var data_obj = {
data1: "item1",
data2: "item2"
}
$.ajax({
url: 'https://adam.testserver.com:9001/',
type: "POST",
dataType: "json",
data: data_obj,
success: function() {
console.log("success!");
},
complete: function() {
console.log("complete!");
}
});
}
There are two problems occurring with my arrangement. The first is that if I start the server and then click the button that triggers my form_save() the node server does not respond and I get the following error:
submitted!
OPTIONS https://adam.testserver.com:9001/ Resource failed to load
jQuery.extend.ajaxjquery.js:3633
$.ajaxjquery.validate.js:1087
form_savew_worksheet.php:64
confirm_deletew_worksheet.php:95
jQuery.event.handlejquery.js:2693
jQuery.event.add.handlejquery.js:2468
w_worksheet.php:73
complete!
At this point if I access that url directy (https://adam.testserver.com:9001/) I get the expected "hello, world" output as well as the console message "Request Received!
GET". From this point on if I click the button to trigger my AJAX call I get a new error.
submitted!
XMLHttpRequest cannot load https://adam.testserver.com:9001/. Origin
https://adam.testserver.com is not allowed by Access-Control-Allow-Origin.
w_worksheet.php:73
complete!
I don't understand why I get this message as both my form and node server reside on the same server. Thanks for taking the time to read, I appreciate any help I can get on this. I've been stuck for a while now!
You've run into the Cross-Origin Resource Sharing (CORS) specification.
Note the OPTIONS in your output. The OPTIONS HTTP Verb is used by the browser to query the web server about the URL, not to GET its contents or POST data to it.
Your server doesn't respond with the correct header data on a CORS request, so your browser assumes it has no rights to access the data, and refuses to GET or POST to the URL.
If you truly want to let any website in the world run that AJAX request, you can do something similar to the following:
function handleOptions(request, response) {
response.writeHead(200, {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Method": "POST, GET, OPTIONS",
"Access-Control-Allow-Headers": request.headers["access-control-request-headers"]
});
response.end();
}
function server(request, response) {
if(request.method == "POST") {
handlePost(request, response);
} else if(request.method == "OPTIONS") {
handleOptions(request, response);
} else {
handleOther(response);
}
}
https.createServer(sslObj, server).listen(9001);
You can fill in the details and whether you should handle GET separately, and so on (handleOther should return an appropriate error code for each request method you don't support).