Think I must be missing something, but I don't have the debug capability to check. Web app passes data to node.js server through socket.io, but doesn't emit anything back.
server.js
// Load required modules
var http = require("http"); // http server core module
var express = require("express"); // web framework external module
var io = require("socket.io"); // web socket external module
var easyrtc = require("easyrtc"); // EasyRTC external module
// Setup and configure Express http server. Expect a subfolder called "static" to be the web root.
var httpApp = express();
httpApp.use(express.static(__dirname + "/static/"));
// Start Express http server on port 8080
var webServer = http.createServer(httpApp).listen(8080);
// Start Socket.io so it attaches itself to Express server
var socketServer = io.listen(webServer, {"log level":1});
// Start EasyRTC server
var rtc = easyrtc.listen(httpApp, socketServer);
easyrtc.events.on('get_candy', function(easyrtc) {
easyrtc.events.emit('did it');
});
file.js
function joined_room() {
easyrtc.sendServerMessage('get_candy', {candy_name:'mars'},
function(msgType, msgData ) {
console.log("got candy count of " + msgData.barCount);
},
function(errorCode, errorText) {
console.log("error was " + errorText);
});
}
easyrtc.setServerListener( function(msgType, msgData, targeting) {
console.log("The Server sent the following message " + JSON.stringify(msgData));
}, 'get_candy');
I got same problem few months ago and now my solution is reuse websocket on server:
In client side:
var socket = io.connect('http://' + document.domain + ':' + location.port);
easyrtc.useThisSocketConnection(socket);
In server side:
var io = require("socket.io"); // web socket external module
var easyrtc = require("easyrtc"); // EasyRTC external module
var httpApp = express();
httpApp.use(express.static(__dirname + "/static/"));
// Start Express http server on port 8080
var webServer = http.createServer(httpApp).listen(8080);
// Start Socket.io so it attaches itself to Express server
var socketServer = io.listen(webServer);
var rtc = easyrtc.listen(webServer, io, null, function(err, rtcRef) {
rtcRef.events.on('roomCreate', function(appObj, creatorConnectionObj, roomName, roomOptions, callback) {
console.log('roomCreate fired!');
});
});
This snip allows you reuse your websocket for easyRTC. Signature message for RTC and other can using same websocket.
Related
Trying to listen for a websocket event on frontend using the following code
var socket = new WebSocket(`wss://${window.location.host}`);
socket.onmessage = function (event) {
this.step2Text = event.data
}
and firing from backend using
let app = express()
let server = http.createServer(app)
const WebSocket = require('ws')
let wss = new WebSocket.Server({ server });
wss.on('connection', function (ws, req) {
ws.send('some-data')
var stream = new WebSocketJSONStream(ws);
})
the registered function dosen't gets called but says
"Ignoring unrecognized message 'some-data'"
I'm learning ZeroMQ and just went through the tutorial and a few examples. I'm using Node.js as my main environment ( with Python eventually used to replace my workers ).
Trying to sort out how I can create a fully asynchronous messaging system that will allow my API to push tasks ( via a REQ socket ) to a router, have a dealer pass the message to a worker, process the message and send its results back up to my client ( which is an Express route ).
I believe the pattern for this would work something like this ( haven't tested or properly implemented code yet, so please take it as a conceptual outline ):
router.js
const zmq = require('zmq');;
const frontend = zmq.socket('router');
const backend = zmq.socket('dealer');
frontend.on('message', function() {
var args = Array.apply(null, arguments);
backend.send(args);
});
backend.on('message', function() {
var args = Array.apply(null, arguments);
frontend.send(args);
});
frontend.bindSync('tcp://*:5559');
backend.bindSync('tcp://*:5560');
client.js
var zmq = require('zmq'),
var express = require('express');
var app = express();
app.post('send', function(req, res) {
var client = zmq.socket('req');
// listen for responses from the server
client.on('message', function(data) {
console.log(data);
client.close();
});
// connect to the server port
client.connect('tcp://0.0.0.0:5454');
client.send('Request from ' + process.id);
});
app.listen('80');
worker.js
var zmq = require('zmq');
var server = zmq.socket('rep');
server.on('message', function(d){
server.send('Response from ' + process.id);
});
// bind to port 5454
server.bind('tcp://0.0.0.0:5454', function(err){
if (err){
console.error("something bad happened");
console.error( err.msg );
console.error( err.stack );
process.exit(0);
}
});
What I'm not fully understanding is if the ROUTER/DEALER will handle sending the response worker to the correct client. Also in this case the Dealer handles the Fair Queueing as I want my work distributed amongst the workers evenly.
My client could be distributed amongst many different boxes ( load balancer API server ), my router will be on its own server and the workers would be distributed amongst multiple boxes as well.
Forget REQ/REP in any production-grade app, can fall in mutual deadlock
You might find this subject in many other posts on high-risk mutual FSM-FSM deadlocking in REQ/REP Formal Scalable Communication Pattern.
Be sure, XREQ/XREP == DEALER/ROUTER ( already since 2011 )
source code removes all hidden magics behind this, XREQ == DEALER and XREP == ROUTER
+++b/include/zmq.h
...
-#define ZMQ_XREQ 5
-#define ZMQ_XREP 6
+#define ZMQ_DEALER 5
+#define ZMQ_ROUTER 6
...
+#define ZMQ_XREQ ZMQ_DEALER /* Old alias, remove in 3.x */
+#define ZMQ_XREP ZMQ_ROUTER /* Old alias, remove in 3.x */
For anyone reading this in the future, in my further research I stumbled on the Majordomo Protocol/pattern. It's precisely what I'm trying to implement. Documentation on the implementation, benefits and disadvantages can be read here: https://rfc.zeromq.org/spec:18/MDP/. Here's the broker implementation: https://github.com/zeromq/majordomo
Seems like I was using DEALER/ROUTER when I should have been using XREQ and XREP.
broker.js
var zmq = require('zmq');
var frontPort = 'tcp://127.0.0.1:5559';
var backPort = 'tcp://127.0.0.1:5560';
var frontSocket = zmq.socket('xrep');
var backSocket = zmq.socket('xreq');
frontSocket.identity = 'xrep_' + process.pid;
backSocket.identity = 'xreq_' + process.pid;
frontSocket.bind(frontPort, function (err) {
console.log('bound', frontPort);
});
frontSocket.on('message', function() {
//pass to back
console.log('router: sending to server', arguments[0].toString(), arguments[2].toString());
backSocket.send(Array.prototype.slice.call(arguments));
});
backSocket.bind(backPort, function (err) {
console.log('bound', backPort);
});
backSocket.on('message', function() {
//pass to front
console.log('dealer: sending to client', arguments[0].toString(), arguments[2].toString());
frontSocket.send(Array.prototype.slice.call(arguments));
});
console.log('Broker started...');
worker.js
var zmq = require('zmq');
var socket = zmq.socket('rep');
socket.identity = 'worker_' + process.pid;
socket.on('message', function(data) {
console.log(socket.identity + ': received ' + data.toString());
socket.send(data * 2);
});
socket.connect('tcp://127.0.0.1:5560', function(err) {
if (err) throw err;
console.log('server connected!');
});
console.log('Worker started...');
client.js
var zmq = require('zmq');
var socket = zmq.socket('req');
socket.identity = 'client_' + process.pid;
socket.on('message', function(data) {
console.log(socket.identity + ': answer data ' + data);
});
socket.connect('tcp://127.0.0.1:5559');
setInterval(function() {
var value = Math.floor(Math.random()*100);
console.log(socket.identity + ': asking ' + value);
socket.send(value);
}, 100);
console.log('Client started...');
I'm still not sure if it's safe opening a connection on EVERY API inbound request.
Is it possible to open a websocket between two computers that are either on the same WAN network or connected via an ethernet cable (LAN)? If it is, can someone provide with some readings I can do to learn about this?
EDIT: added my server code
'use strict';
var express = require('express'); //web server
var app = express();
var server = require('http').createServer(app);
var io = require('socket.io').listen(server); //web socket server
// ===============================Serial port setup========================
var serialport = require('serialport');
var SerialPort = serialport.SerialPort;
var parsers = serialport.parsers;
var openSocket = false;
var port = new SerialPort("/dev/ttyACM0", {
baudrate: 9600,
parser: parsers.readline('\r\n')
});
// ===============================Server Setup===========================
const netPort = 8080;
server.listen(netPort); //start the webserver on port 8080
app.use(express.static('public')); //tell the server that ./public/ contains
the static webpages
console.log("listening on port: localhost:" + netPort);
// ==============================Sending data======================
io.sockets.on('connection', function (socket) {
socket.emit('pi', { status: 'opened' });
console.log('Socket is open'); // log to console, once serial connection is established
openSocket = true;
});
port.on('data', function(data) {
if (openSocket) {
socket.emit('pi', {value: data}); //send data
}
});
In my app, I send a post request to the server with data containing a CSV file:
$.ajax({
type:"POST",
contentType: "application/json",
url:"/",
data: JSON.stringify({fileData:My_CSV_FILE}),
success: function(csvJson) {
console.log('in the done block!');
//can use csvJson in this handler
});
});
Note: I'm posting to the home route, and I am able to get a response with the data converted from the server. The problem is that whether I run on localhost or Heroku, I am only able to trigger the POST request once, then I have to restart the server (even if I refresh the page). So I know the issue is with my route somewhere:
UPDATED TO INCLUDE FULL SERVER FILE:
'use strict';
const express = require('express');
const csvtojson = require('csvtojson');
const PORT = process.env.PORT || 3000;
const bodyParser = require('body-parser');
const Converter = require('csvtojson').Converter;
var converter = new Converter({});
let app = express();
app.use(bodyParser.json({limit: '300kb'}));
app.use(express.static(__dirname +'/public'));
app.post('/',function(req,res) {
var csvFile = (req.body.fileData);
converter.fromString(csvFile, function(err, result) {
if(!err) {
console.log(result);
res.json(result);
}else {
res.json({error: 'Could not convert'});
}
})
});
app.listen(PORT, () => {
console.log(`app listening on port ${PORT}`);
});
I'm using Express 4. Again, everything works, but only once. When I run Heroku logs, or check the console on localhost I get:
Error: Can't set headers after they are sent.
But I don't understand how I'm re-setting them.
If wanting to run on localhost, here is a link to the projects github: https://github.com/qctimes/calendar_export
You should move the converter instantiation to be done inside the app.post callback method. This way it will instantiate a new object at every request.
This is is how your code should be:
'use strict';
const express = require('express');
const csvtojson = require('csvtojson');
const PORT = process.env.PORT || 3000;
const bodyParser = require('body-parser');
const Converter = require('csvtojson').Converter;
let app = express();
app.use(bodyParser.json({limit: '300kb'}));
app.use(express.static(__dirname +'/public'));
app.post('/',function(req,res) {
var csvFile = (req.body.fileData);
var converter = new Converter({}); // instantiation is done here
converter.fromString(csvFile, function(err, result) {
if(!err) {
console.log(result);
res.send(result);
}else {
res.send({error: 'Could not convert'});
}
});
});
app.listen(PORT, () => {
console.log(`app listening on port ${PORT}`);
});
So I am trying to get Sessions to work inside my socket.on('connection', ...)
I am trying to get this working using recent versions: Socket.io - 0.9.13, Express - 3.1.0 and latest versions of other modules.
Anyway I have tried using both modules 'connect-redis' and 'session.socket.io' and they both have similar problems.
In my code I have 2 redis stores (socketio.RedisStore and require('connect-redis')(express)), now this program all runs fine, but because express and socket.io need to share session data, I was wondering if this setup will use sessions correctly? do the session stores need to be the same object for express/socketio? A bit of a gray area to me, because the 2 RedisStore's will use the same db in the background?
I have tried using either the socket.io redisStore or the connect-redis redisStore in both places, but socket.io doesnt like the connect-redis redisStore and express doesnt like the socketio.redisStore.
If I use the connect-redis RedisStore then socket.io/lib/manager.js complains:
this.store.subscribe(...
TypeError Object # has no method 'subscribe'
If I use socketio.RedisStore then express/node_modules/connect/lib/middleware/session.js complains:
TypeError: Object # has no method 'get'
*Note I would rather get the session.socket.io plugin working, but when I do the same setup with that plugin, express (also) complains:
TypeError: Object # has no method 'get'
So is it ok that I use 2 different RedisStores for sessions, or do I need to somehow get one or the other working for both, and if so any ideas on how to fix?
My current code looks like this:
var
CONST = {
port: 80,
sessionKey: 'your secret sauce'
};
var
redis = require('redis');
var
express = require('express'),
socketio = require('socket.io'),
RedisStore = require('connect-redis')(express);
var
redisStore = new RedisStore(),
socketStore = new socketio.RedisStore();
var
app = express(),
server = require('http').createServer(app),
io = socketio.listen(server);
app.configure(function(){
app.use(express.cookieParser( CONST.sessionKey ));
app.use(express.session({ secret: CONST.sessionKey, store: redisStore }));
app.use(express.static(__dirname + '/test'));
app.get('/', function (req, res) {res.sendfile(__dirname + '/test/' + 'index.htm');});
});
io.configure(function(){
io.set('log level', 1);
io.enable('browser client minification');
io.enable('browser client etag');
io.enable('browser client gzip');
io.set('store', socketStore);
});
io.sockets.on('connection', function(socket){
socket.emit('message', 'Test 1 from server')
});
server.listen( CONST.port );
console.log('running...');
inside the io.configure, you have to link the socket with the http session.
Here's a piece of code that extracts the cookie (This is using socket.io with xhr-polling, I don't know if this would work for websocket, although I suspect it would work).
var cookie = require('cookie');
var connect = require('connect');
var sessionStore = new RedisStore({
client: redis // the redis client
});
socketio.set('authorization', function(data, cb) {
if (data.headers.cookie) {
var sessionCookie = cookie.parse(data.headers.cookie);
var sessionID = connect.utils.parseSignedCookie(sessionCookie['connect.sid'], secret);
sessionStore.get(sessionID, function(err, session) {
if (err || !session) {
cb('Error', false);
} else {
data.session = session;
data.sessionID = sessionID;
cb(null, true);
}
});
} else {
cb('No cookie', false);
}
});
Then you can access the session using:
socket.on("selector", function(data, reply) {
var session = this.handshake.session;
...
}
This also has the added benefit that it checks there is a valid session, so only your logged in users can use sockets. You can use a different logic, though.
Looking at your last note (won't be able to share its state over multiple processes using redis) I had the same problem and found a solution:
var express = require("express.io");
var swig = require('swig');
var redis = require('redis');
var RedisStore = require('connect-redis')(express);
workers = function() {
var app = express().http().io();
app.use(express.cookieParser());
app.use(express.session({
secret: 'very cool secretcode',
store: new RedisStore({ client: redis.createClient() })
}));
app.io.set('store', new express.io.RedisStore({
redisPub: redis.createClient(),
redisSub: redis.createClient(),
redisClient: redis.createClient()
}));
app.get('/', function(req, res) {
res.render('index');
});
app.listen(3000);
app.io.route('ready', function(req){
//setup session stuff, use session stuff, etc. Or make new routes
});
};
cluster = require('cluster');
numCPUs = require('os').cpus().length;
if (cluster.isMaster)
{
for (var i = 0; i < numCPUs; i++)
{
cluster.fork();
}
}
else
{
workers();
}