how can I get sessions to work using redis, express & socket.io? - session

So I am trying to get Sessions to work inside my socket.on('connection', ...)
I am trying to get this working using recent versions: Socket.io - 0.9.13, Express - 3.1.0 and latest versions of other modules.
Anyway I have tried using both modules 'connect-redis' and 'session.socket.io' and they both have similar problems.
In my code I have 2 redis stores (socketio.RedisStore and require('connect-redis')(express)), now this program all runs fine, but because express and socket.io need to share session data, I was wondering if this setup will use sessions correctly? do the session stores need to be the same object for express/socketio? A bit of a gray area to me, because the 2 RedisStore's will use the same db in the background?
I have tried using either the socket.io redisStore or the connect-redis redisStore in both places, but socket.io doesnt like the connect-redis redisStore and express doesnt like the socketio.redisStore.
If I use the connect-redis RedisStore then socket.io/lib/manager.js complains:
this.store.subscribe(...
TypeError Object # has no method 'subscribe'
If I use socketio.RedisStore then express/node_modules/connect/lib/middleware/session.js complains:
TypeError: Object # has no method 'get'
*Note I would rather get the session.socket.io plugin working, but when I do the same setup with that plugin, express (also) complains:
TypeError: Object # has no method 'get'
So is it ok that I use 2 different RedisStores for sessions, or do I need to somehow get one or the other working for both, and if so any ideas on how to fix?
My current code looks like this:
var
CONST = {
port: 80,
sessionKey: 'your secret sauce'
};
var
redis = require('redis');
var
express = require('express'),
socketio = require('socket.io'),
RedisStore = require('connect-redis')(express);
var
redisStore = new RedisStore(),
socketStore = new socketio.RedisStore();
var
app = express(),
server = require('http').createServer(app),
io = socketio.listen(server);
app.configure(function(){
app.use(express.cookieParser( CONST.sessionKey ));
app.use(express.session({ secret: CONST.sessionKey, store: redisStore }));
app.use(express.static(__dirname + '/test'));
app.get('/', function (req, res) {res.sendfile(__dirname + '/test/' + 'index.htm');});
});
io.configure(function(){
io.set('log level', 1);
io.enable('browser client minification');
io.enable('browser client etag');
io.enable('browser client gzip');
io.set('store', socketStore);
});
io.sockets.on('connection', function(socket){
socket.emit('message', 'Test 1 from server')
});
server.listen( CONST.port );
console.log('running...');

inside the io.configure, you have to link the socket with the http session.
Here's a piece of code that extracts the cookie (This is using socket.io with xhr-polling, I don't know if this would work for websocket, although I suspect it would work).
var cookie = require('cookie');
var connect = require('connect');
var sessionStore = new RedisStore({
client: redis // the redis client
});
socketio.set('authorization', function(data, cb) {
if (data.headers.cookie) {
var sessionCookie = cookie.parse(data.headers.cookie);
var sessionID = connect.utils.parseSignedCookie(sessionCookie['connect.sid'], secret);
sessionStore.get(sessionID, function(err, session) {
if (err || !session) {
cb('Error', false);
} else {
data.session = session;
data.sessionID = sessionID;
cb(null, true);
}
});
} else {
cb('No cookie', false);
}
});
Then you can access the session using:
socket.on("selector", function(data, reply) {
var session = this.handshake.session;
...
}
This also has the added benefit that it checks there is a valid session, so only your logged in users can use sockets. You can use a different logic, though.

Looking at your last note (won't be able to share its state over multiple processes using redis) I had the same problem and found a solution:
var express = require("express.io");
var swig = require('swig');
var redis = require('redis');
var RedisStore = require('connect-redis')(express);
workers = function() {
var app = express().http().io();
app.use(express.cookieParser());
app.use(express.session({
secret: 'very cool secretcode',
store: new RedisStore({ client: redis.createClient() })
}));
app.io.set('store', new express.io.RedisStore({
redisPub: redis.createClient(),
redisSub: redis.createClient(),
redisClient: redis.createClient()
}));
app.get('/', function(req, res) {
res.render('index');
});
app.listen(3000);
app.io.route('ready', function(req){
//setup session stuff, use session stuff, etc. Or make new routes
});
};
cluster = require('cluster');
numCPUs = require('os').cpus().length;
if (cluster.isMaster)
{
for (var i = 0; i < numCPUs; i++)
{
cluster.fork();
}
}
else
{
workers();
}

Related

REQ/REP & DEALER/ROUTER for two-way asynchronous worker processing

I'm learning ZeroMQ and just went through the tutorial and a few examples. I'm using Node.js as my main environment ( with Python eventually used to replace my workers ).
Trying to sort out how I can create a fully asynchronous messaging system that will allow my API to push tasks ( via a REQ socket ) to a router, have a dealer pass the message to a worker, process the message and send its results back up to my client ( which is an Express route ).
I believe the pattern for this would work something like this ( haven't tested or properly implemented code yet, so please take it as a conceptual outline ):
router.js
const zmq = require('zmq');;
const frontend = zmq.socket('router');
const backend = zmq.socket('dealer');
frontend.on('message', function() {
var args = Array.apply(null, arguments);
backend.send(args);
});
backend.on('message', function() {
var args = Array.apply(null, arguments);
frontend.send(args);
});
frontend.bindSync('tcp://*:5559');
backend.bindSync('tcp://*:5560');
client.js
var zmq = require('zmq'),
var express = require('express');
var app = express();
app.post('send', function(req, res) {
var client = zmq.socket('req');
// listen for responses from the server
client.on('message', function(data) {
console.log(data);
client.close();
});
// connect to the server port
client.connect('tcp://0.0.0.0:5454');
client.send('Request from ' + process.id);
});
app.listen('80');
worker.js
var zmq = require('zmq');
var server = zmq.socket('rep');
server.on('message', function(d){
server.send('Response from ' + process.id);
});
// bind to port 5454
server.bind('tcp://0.0.0.0:5454', function(err){
if (err){
console.error("something bad happened");
console.error( err.msg );
console.error( err.stack );
process.exit(0);
}
});
What I'm not fully understanding is if the ROUTER/DEALER will handle sending the response worker to the correct client. Also in this case the Dealer handles the Fair Queueing as I want my work distributed amongst the workers evenly.
My client could be distributed amongst many different boxes ( load balancer API server ), my router will be on its own server and the workers would be distributed amongst multiple boxes as well.
Forget REQ/REP in any production-grade app, can fall in mutual deadlock
You might find this subject in many other posts on high-risk mutual FSM-FSM deadlocking in REQ/REP Formal Scalable Communication Pattern.
Be sure, XREQ/XREP == DEALER/ROUTER ( already since 2011 )
source code removes all hidden magics behind this, XREQ == DEALER and XREP == ROUTER
+++b/include/zmq.h
...
-#define ZMQ_XREQ 5
-#define ZMQ_XREP 6
+#define ZMQ_DEALER 5
+#define ZMQ_ROUTER 6
...
+#define ZMQ_XREQ ZMQ_DEALER /* Old alias, remove in 3.x */
+#define ZMQ_XREP ZMQ_ROUTER /* Old alias, remove in 3.x */
For anyone reading this in the future, in my further research I stumbled on the Majordomo Protocol/pattern. It's precisely what I'm trying to implement. Documentation on the implementation, benefits and disadvantages can be read here: https://rfc.zeromq.org/spec:18/MDP/. Here's the broker implementation: https://github.com/zeromq/majordomo
Seems like I was using DEALER/ROUTER when I should have been using XREQ and XREP.
broker.js
var zmq = require('zmq');
var frontPort = 'tcp://127.0.0.1:5559';
var backPort = 'tcp://127.0.0.1:5560';
var frontSocket = zmq.socket('xrep');
var backSocket = zmq.socket('xreq');
frontSocket.identity = 'xrep_' + process.pid;
backSocket.identity = 'xreq_' + process.pid;
frontSocket.bind(frontPort, function (err) {
console.log('bound', frontPort);
});
frontSocket.on('message', function() {
//pass to back
console.log('router: sending to server', arguments[0].toString(), arguments[2].toString());
backSocket.send(Array.prototype.slice.call(arguments));
});
backSocket.bind(backPort, function (err) {
console.log('bound', backPort);
});
backSocket.on('message', function() {
//pass to front
console.log('dealer: sending to client', arguments[0].toString(), arguments[2].toString());
frontSocket.send(Array.prototype.slice.call(arguments));
});
console.log('Broker started...');
worker.js
var zmq = require('zmq');
var socket = zmq.socket('rep');
socket.identity = 'worker_' + process.pid;
socket.on('message', function(data) {
console.log(socket.identity + ': received ' + data.toString());
socket.send(data * 2);
});
socket.connect('tcp://127.0.0.1:5560', function(err) {
if (err) throw err;
console.log('server connected!');
});
console.log('Worker started...');
client.js
var zmq = require('zmq');
var socket = zmq.socket('req');
socket.identity = 'client_' + process.pid;
socket.on('message', function(data) {
console.log(socket.identity + ': answer data ' + data);
});
socket.connect('tcp://127.0.0.1:5559');
setInterval(function() {
var value = Math.floor(Math.random()*100);
console.log(socket.identity + ': asking ' + value);
socket.send(value);
}, 100);
console.log('Client started...');
I'm still not sure if it's safe opening a connection on EVERY API inbound request.

Can't use Express to send data back to client more than once

In my app, I send a post request to the server with data containing a CSV file:
$.ajax({
type:"POST",
contentType: "application/json",
url:"/",
data: JSON.stringify({fileData:My_CSV_FILE}),
success: function(csvJson) {
console.log('in the done block!');
//can use csvJson in this handler
});
});
Note: I'm posting to the home route, and I am able to get a response with the data converted from the server. The problem is that whether I run on localhost or Heroku, I am only able to trigger the POST request once, then I have to restart the server (even if I refresh the page). So I know the issue is with my route somewhere:
UPDATED TO INCLUDE FULL SERVER FILE:
'use strict';
const express = require('express');
const csvtojson = require('csvtojson');
const PORT = process.env.PORT || 3000;
const bodyParser = require('body-parser');
const Converter = require('csvtojson').Converter;
var converter = new Converter({});
let app = express();
app.use(bodyParser.json({limit: '300kb'}));
app.use(express.static(__dirname +'/public'));
app.post('/',function(req,res) {
var csvFile = (req.body.fileData);
converter.fromString(csvFile, function(err, result) {
if(!err) {
console.log(result);
res.json(result);
}else {
res.json({error: 'Could not convert'});
}
})
});
app.listen(PORT, () => {
console.log(`app listening on port ${PORT}`);
});
I'm using Express 4. Again, everything works, but only once. When I run Heroku logs, or check the console on localhost I get:
Error: Can't set headers after they are sent.
But I don't understand how I'm re-setting them.
If wanting to run on localhost, here is a link to the projects github: https://github.com/qctimes/calendar_export
You should move the converter instantiation to be done inside the app.post callback method. This way it will instantiate a new object at every request.
This is is how your code should be:
'use strict';
const express = require('express');
const csvtojson = require('csvtojson');
const PORT = process.env.PORT || 3000;
const bodyParser = require('body-parser');
const Converter = require('csvtojson').Converter;
let app = express();
app.use(bodyParser.json({limit: '300kb'}));
app.use(express.static(__dirname +'/public'));
app.post('/',function(req,res) {
var csvFile = (req.body.fileData);
var converter = new Converter({}); // instantiation is done here
converter.fromString(csvFile, function(err, result) {
if(!err) {
console.log(result);
res.send(result);
}else {
res.send({error: 'Could not convert'});
}
});
});
app.listen(PORT, () => {
console.log(`app listening on port ${PORT}`);
});

Node.js (Express JS 3.x with Socket.io together)

I need to share session between sockets and express js.
I've tried to make this example work: http://www.danielbaulig.de/socket-ioexpress/ without success. Web browser return
GET localhost:8000/socket.io/1/?t=1354005884872 500 (Internal
Server Error)
on session start. I get server side error:
warn - handshake error Error
Server side script:
var express = require('express')
, util = require('util')
, connect = require('express/node_modules/connect')
, parseCookie = connect.utils.parseCookie
, MemoryStore = connect.middleware.session.MemoryStore
, store;
var app = express()
, http = require('http')
, server = http.createServer(app)
, io = require('socket.io').listen(server);
var connect = require('express/node_modules/connect')
, util = require('util')
, cookie = require('cookie')
, parseCookie = cookie.parse
, MemoryStore = connect.middleware.session.MemoryStore
, store;
app.configure(function () {
app.use(express.cookieParser());
app.use(express.session({
secret: 'secret'
, key: 'express.sid'
, store: store = new MemoryStore()
}));
app.use(function (req, res) {
res.send('Hello, your session id is ' + req.sessionID);
});
});
io.set('authorization', function (data, accept) {
if (!data.headers.cookie)
return accept('No cookie transmitted.', false);
data.cookie = parseCookie(data.headers.cookie);
data.sessionID = data.cookie['express.sid'];
store.load(data.sessionID, function (err, session) {
if (err || !session) return accept('Error', false);
data.session = session;
return accept(null, true);
});
}).sockets.on('connection', function (socket) {
var sess = socket.handshake.session;
socket.log.info(
'a socket with sessionID'
, socket.handshake.sessionID
, 'connected'
);
socket.on('set value', function (val) {
sess.reload(function () {
sess.value = val;
sess.touch().save();
});
});
});
app.use(count);
// custom middleware
function count(req, res) {
req.session.count = req.session.count || 0;
var n = req.session.count++;
res.send('viewed ' + n + ' times\n');
}
server.listen(8000);
Client side script:
<%= javascript_include_tag "http://localhost:8000/socket.io/socket.io.js" %>
var socket = io.connect('http://localhost:8000');
socket.io v0.9.11, express.js v3.0.3, connect v2.7.0, cookie v0.0.5
I wish you can help me.
Just came across a project that might help simplify everything for you. Express.io combines express and socket.io and includes a bunch of easy to get working examples:
https://github.com/techpines/express.io
I've only toyed with it a little myself, but it looked promising. Hope this helps.

NodeJS + Express + Mongo Session storage

I am currently having a hell of time trying to store sessions in MongoDb.
I've tried express-session-mongo and connect-mongodb and both give me the same "500 internal server error" when I try to load the login page. Which leads me to think maybe there is a conflict with mongoose-auth somewhere.
Anyway here is my setup:
app.js:
var MongoStore = require('connect-mongodb');
var MongoDb = require('mongodb').Db;
var Server = require('mongodb').Server;
var db = new MongoDb('myDb', new Server('localhost', 27017, {auto_reconnect: true, native_parser: false}));
app.configure(function() {
app.use(express.logger({format: 'dev'}));
app.set('views', __dirname + '/../views');
app.set('view engine', 'jade');
app.set('view options', { layout: false });
app.use(express.bodyParser());
app.use(express.cookieParser());
app.use(mongooseAuth.middleware());
app.use(require('./mysite').middleware());
app.use(express.methodOverride());
});
app.configure('production', function(){
var oneWeek = 657450000;
app.use(express.static(__dirname + '/../public', { maxAge: oneWeek }));
app.use(express.session({ store: new MongoStore({db: db}), secret: 'super secret' }));
app.use(express.errorHandler());
});
// Routes
require('./routing.js');
mongooseAuth.helpExpress(app);
app.listen(3000);
console.log('Express server listening on port %d in %s mode', app.address().port, app.settings.env);
userModel.js
var Schema = mongoose.Schema;
var mongooseTypes = require("mongoose-types");
var mongooseAuth = require('mongoose-auth');
mongooseTypes.loadTypes(mongoose);
var everyauth = require('everyauth')
everyauth.debug = true;
var UserSchema = new Schema({any: {}});
UserSchema.plugin(mongooseAuth, {
everymodule: {
everyauth: {
User: function () {
return User;
}
}
}
, password: {
loginWith: 'email'
, extraParams: {
phone: String
, username: String
}
, everyauth: {
getLoginPath: '/login'
, postLoginPath: '/login'
, loginView: 'account/login.jade'
, getRegisterPath: '/register'
, postRegisterPath: '/register'
, registerView: 'account/register.jade'
, loginSuccessRedirect: '/login/success'
, registerSuccessRedirect: '/register/success'
}
}
});
mongoose.model('User', UserSchema);
User = mongoose.model('User');
At this moment in time I'm really just trying to use MongoDb as the session store, but again I get a 500 error w/ no information whatsoever in the node.js console when I try to load the login page.
Any help would be greatly appreciated.
Thanks
It ended being a problem of the various modules: connect-session-mongo / express-session-mongo / connect-mongo, using connect 2.0.1 and Express using connect 1.8.5.
Apparently the dependency clash here prevented the session store modules to access the 'req.secret' property.
To make it work I ended using the module connect-mongodb that is still using connect 1.8.5, just like Express.
The reason I couldn't make connect-mongodb work before though was user error, I tried too hard to use copy/paste from online examples instead of my head.
Here is the configuration code that ended up working for me with connect-mongodb:
var Session = require('connect-mongodb');
app.configure('production', function(){
var oneWeek = 657450000;
app.use(express.static(__dirname + '/../public', { maxAge: oneWeek }));
var session = express.session({
store: new Session({
url: 'mongodb://localhost:27017/test',
maxAge: 300000
}),
secret: 'superTopSecret'
});
app.use(session);
app.use(mongooseAuth.middleware());
app.use(require('./mySite').middleware());
app.use(express.methodOverride());
app.use(express.errorHandler());
});
Hope this helps anyone else who runs into this issue. If you have any suggestion/improvement on this solution, I'd be glad to hear it. :)
const express = require('express')
const app = express()
const cookieParser = require('cookie-parser');
const session = require('express-session')
const mongoose = require('mongoose');
const MongoStore = require('connect-mongo')(session);
mongoose.connect('mongodb://localhost/my-database', {
useMongoClient: true
});
mongoose.Promise = global.Promise;
const db = mongoose.connection
app.use(cookieParser());
app.use(session({
secret: 'my-secret',
resave: false,
saveUninitialized: true,
store: new MongoStore({ mongooseConnection: db })
}));

Modifying the session data from inside the socket.io callback

I am currently using this stack expres, socket.io, sessionstore. I followed the article here http://www.danielbaulig.de/socket-ioexpress/.
Well the problem is that i cannot modify the session values in socket.io callback.
Access from express side works well, the item get increased after each refresh.
app.get('/mysession', function(req, res) {
req.session.item++;
console.log(req.session);
res.render('session.jade', {
title: 'Sample title'
});
});
Using in socket.io side it does not and here is the problem, maybe i am setting the wrong object.
var io = io.listen(app);
io.sockets.on('connection', function(socket) {
var handshake = socket.handshake;
onlineCount++;
console.log('Well done id %s', handshake.sessionID);
handshake.session.item++;
console.log(handshake.session);
});
Here is bridge code.
io.set('authorization', function(data, accept) {
if (data.headers.cookie) {
data.cookie = parseCookie(data.headers.cookie);
data.sessionID = data.cookie['express.sid'];
sessionStore.get(data.sessionID, function(err, session) {
if (err || !session) {
accept('Error', false);
} else {
data.session = session;
accept(null, true);
}
});
} else {
return accept('No cookie tansmitted', false);
}
});
The only way I found to make this work is to grab the cookie from the request object on the connect event, parse it with your favourite cookie parser (I use connect.utils.parseCookie), and set it on that socket so that I may access it in future events:
socket.on('connection', function(client) {
var cookie = client.request.headers.cookie;
var pcookie = connect.utils.parseCookie(cookie);
var session_id = pcookie["connect.sid"];
if (session_id) {
sessionStore.get(session_id, function(err, sess) {
// do whatever you want with sess here
// ...
// if you want to "save" the session for future events
client.set('session_id', session_id);
}
}
});
The sessionStore API changed a little bit, now its sessionStore.load(sessionId, cb) instead of .get.

Resources