Socket.io - Refresh tokens before reconnect - socket.io

I am creating an Electron app with Socket.io. When the user's computer goes into sleep mode the server disconnects from the client throwing an error "transport close". When the user tries to reconnect I check if the tokens are still valid, if they are not, I refresh them and try to send them to the socketIo server.
The problem I have is that on "reconnect_attempt" socket.io doesn't wait until I refresh the tokens to try reconnecting, it tries reconnecting right away with the old tokens, which get rejected by the server, which also seems to terminate the connection with the user impeding future reconnect attempts.
This is part of my code to connect to the server
module.exports.connect = async (JWT) => {
return new Promise( async resolve => {
console.log("connecting to the server")
const connectionOptions = {
secure: true,
query: {token: JWT},
reconnectionDelay: 4000
}
let socket = await socketIo.connect(`${process.env.SERVER_URL}:${process.env.SERVER_PORT}`, connectionOptions);
resolve(socket)
})
}
This is my code for reconnect_attempt
socket.on('reconnect_attempt', async () => {
const getCurrentJWT = require("../../main").getCurrentJWT;
let JWT = await getCurrentJWT(); //By the time this line returns, socket.io has already tried to reconnect
if(JWT.success) { //if refreshed successfully
console.log("Trying to submit new token......", JWT);
socket.query.token = JWT.JWT;
} else {
console.log("Token not refreshed.")
}
});
And this is part of what I have on the server
io.use(async (socket, next) => {
let token = socket.handshake.query.token;
//and the instruction from here https://docs.aws.amazon.com/cognito/latest/developerguide/amazon-cognito-user-pools-using-tokens-verifying-a-jwt.html
let tokenIsValid = await checkTokenValidity(token);
if( tokenIsValid ) {
next();
} else {
next(new Error('invalidToken'));
console.log("Not valid token")
}
})

In short, you can use auth for this.
While connecting
auth: {
token: token
}
In the time of reconnection
socket.auth.token = "NEW_TOKEN";
socket.connect();
I can share socket io implementation for this and you can modify it as your need.
For the client-side,
let unauthorized = false;
let socket = io.connect('ws://localhost:8080', {
transports: ["websocket"],
auth: {
token: GET_YOUR_TOKEN()
}
});
socket.on("connect", () => {
unauthorized = false;
});
socket.on('UNAUTHORIZED', () => {
unauthorized = true;
});
socket.on("disconnect", (reason) => {
if (reason === "io server disconnect") {
if(unauthorized) {
socket.auth.token = token;
}
socket.connect();
}
});
socket.on('PING', ()=>{
socket.emit('PONG', token);
});
For the server-side
io.on("connection", (socket) => {
socket.on('PONG', function (token) {
if (isValidToken(token) == false) {
socket.emit("UNAUTHORIZED");
socket.disconnect();
}
});
setInterval(() => {
socket.emit('PING');
}, <YOUR-TIME>);
});

Having the following in your server.
io.use( async function(socket, next) {
let address = socket.handshake.address;
run++; // 0 -> 1
// Validate Token
const token = socket.handshake.auth.token;
if(token !== undefined){
try{
await tokenVerify(token).then((payload) => {
const serverTimestamp = Math.floor(Date.now() / 1000);
const clientTimestamp = payload.exp;
if(clientTimestamp > serverTimestamp){
console.log("Connection from: " + address + " was accepted");
console.log("Token [" + token + "] from: " + address + " was accepted");
next();
}else{
console.log("Connection from: " + address + " was rejected");
console.log("Token [" + token + "] from: " + address + " was rejected");
next(new Error("unauthorized"));
}
});
}catch (e) {
console.log(e);
}
}
})
With the code above, the server will respond "unauthorized" if the token isn't valid.
So, on the client-side, we can catch that message as shown below.
socket_io.on("connect_error", (err) => {
if(err?.message === 'unauthorized'){
var timeout = (socket_reconnection_attempts === 0 ? 5000 : 60000)
console.log("Trying to reconnect in the next " + (timeout / 1000) + ' seconds')
setTimeout(function (){
console.log('Trying to reconnect manually')
socket_reconnection_attempts++;
loadAuthToken().then(function (token) {
socket_io.auth.token = token;
socket_io.connect();
})
}, timeout)
}
});
With the code above, the client-side will try to reconnect and refresh the token only if the error message is "unauthorized."
The variable "socket_reconnection_attempts" is to avoid sending a massive number of reconnection attempts in a short period of time.

Related

socket.io buffered messages loss after reconnection

This is on client side
socket.on('connect', () => {
console.log('client connect', socket.id);
const token = getToken();
socket.emit('token', token);
});
socket.on('message', data => {
....
//handle message
});
This is on server side
io.on('connection', (client) => {
client.on('token', token => {
verifyToken(token)
.then(({ _id: clientId }) => {
if (!clientId) return;
if (!connections[clientId]) {
connections[clientId] = new Map();
}
connections[clientId].set(client, 1);
client.on('disconnect', () => {
connections[clientId].delete(client);
});
});
});
});
}
async sendMessageToClients (workspaceId, message) {
const workspace = await getWorkspaceQuery(workspaceId);
if (!workspace) return;
const workspaceMembers = workspace.members.map(({ user }) => user);
for (const memberId of workspaceMembers) {
if (connections[memberId]) {
for (const clientConnection of connections[memberId].keys()) {
console.log('send to client', memberId, message.content, clientConnection.connected, clientConnection.id);
clientConnection.emit('message', message);
}
}
}
};
}
I purposely make a client offline by disconnect the wifi connection (make it in offline mode), what happen is that
a. if the disconnection is short, socket.id stay the same and I can get the buffered message send by other client when comes online;
b. but if I the disconnection is longer, the socket.id will change, and I can't get the buffered message send by other client when comes online.
How should I address that?
Since according to here the messages should be ideally buffered after reconnection.

AWS API gateway websocket receives messages inconsistently

I have a websocket in api gateway connected to a lambda that looks like this:
const AWS = require('aws-sdk');
const amqp = require('amqplib');
const api = new AWS.ApiGatewayManagementApi({
endpoint: 'MY_ENDPOINT',
});
async function sendMsgToApp(response, connectionId) {
console.log('=========== posting reply');
const params = {
ConnectionId: connectionId,
Data: Buffer.from(response),
};
return api.postToConnection(params).promise();
}
let rmqServerUrl =
'MY_RMQ_SERVER_URL';
let rmqServerConn = null;
exports.handler = async event => {
console.log('websocket event:', event);
const { routeKey: route, connectionId } = event.requestContext;
switch (route) {
case '$connect':
console.log('user connected');
const creds = event.queryStringParameters.x;
console.log('============ x.length:', creds.length);
const decodedCreds = Buffer.from(creds, 'base64').toString('utf-8');
try {
const conn = await amqp.connect(
`amqps://${decodedCreds}#${rmqServerUrl}`
);
const channel = await conn.createChannel();
console.log('============ created channel successfully:');
rmqServerConn = conn;
const [userId] = decodedCreds.split(':');
const { queue } = await channel.assertQueue(userId, {
durable: true,
autoDelete: false,
});
console.log('============ userId:', userId, 'queue:', queue);
channel.consume(queue, msg => {
console.log('========== msg:', msg);
const { content } = msg;
const msgString = content.toString('utf-8');
console.log('========== msgString:', msgString);
sendMsgToApp(msgString, connectionId)
.then(res => {
console.log(
'================= sent queued message to the app, will ack, outcome:',
res
);
try {
channel.ack(msg);
} catch (e) {
console.log(
'================= error acking message:',
e
);
}
})
.catch(e => {
console.log(
'================= error sending queued message to the app, will not ack, error:',
e
);
});
});
} catch (e) {
console.log(
'=========== error initializing amqp connection',
e
);
if (rmqServerConn) {
await rmqServerConn.close();
}
const response = {
statusCode: 401,
body: JSON.stringify('failed auth!'),
};
return response;
}
break;
case '$disconnect':
console.log('user disconnected');
if (rmqServerConn) {
await rmqServerConn.close();
}
break;
case 'message':
console.log('message route');
await sendMsgToApp('test', connectionId);
break;
default:
console.log('unknown route', route);
break;
}
const response = {
statusCode: 200,
body: JSON.stringify('Hello from websocket Lambda!'),
};
return response;
};
The amqp connection is for a rabbitmq server that's provisioned by amazonmq. The problem I have is that messages published to the queue either do not show up at all in the .consume callback, or they only show up after the websocket is disconnected and reconnected. Essentially they're missing until a point much later after which they show up unexpectedly. That's within the websocket. Even when they do show up, they don't get sent to the client (app in this case) that's connected to the websocket. What could be the problem here?
The problem here is that I had the wrong idea about how API Gateway's websockets work. API gateway maintains the websocket connection but not the lambda itself. I put my .consume subscription logic inside the lambda, which doesn't work because the lambda runs and terminates instead of being kept alive. A better method would be to make the queue an event source for the lambda. However this also didn't work for me because it requires you to know your queues when setting up the lambda. My queues are dynamically created so that violated the requirement. I ended up standing up a rmq server on a vps.

calling ApiGatewayManagementApi.postToConnection() gives me 500 internal server error?

so I am trying to develop an aws websocket function using lambda. But it seems that whenever I try to call "postToConnection" it just gives me 500 internal server error.
Cloud watch also doesn't logs the error that I am receiving.
And what I'm receiving on the terminal once I send the message is this:
"{"message": "Internal server error", "connectionId":"xxx", "requestId":"xxx"}"
(Which doesn't give me any information at all)
This is my whole code on the lambda function.
var AWS = require('aws-sdk');
AWS.config.update({ region: "us-west-2" });
var DDB = new AWS.DynamoDB({ apiVersion: "2012-10-08" });
require('aws-sdk/clients/apigatewaymanagementapi');
exports.handler = function (event, context, callback) {
var url_handler = event.requestContext.domainName + "/" + event.requestContext.stage;
// var params = event.requestContext;
// console.log(params);
var scanParams = {
TableName: "tbl-web-socket-connection",
ProjectionExpression: "id"
};
DDB.scan(scanParams, function (err, data) {
// callback(null, {
// statusCode: 200,
// body: "Data send to"
// });
if (err) {
callback(null, {
statusCode: 500,
body: JSON.stringify(err)
});
} else {
var apigwManagementApi = new AWS.ApiGatewayManagementApi({
apiVersion: "2018-11-29",
endpoint: event.requestContext.domainName + "/" + event.requestContext.stage
});
var postParams = {
Data: JSON.parse(event.body).data
};
var count = 0;
data.Items.forEach(function (element) {
postParams.ConnectionId = element.id.S;
console.log(postParams);
apigwManagementApi.postToConnection(postParams, function (err, data) {
if (err) {
// API Gateway returns a status of 410 GONE when the connection is no
// longer available. If this happens, we simply delete the identifier
// from our DynamoDB table.
if (err.statusCode === 410) {
console.log("Found stale connection, deleting " + postParams.connectionId);
DDB.deleteItem({ TableName: process.env.TABLE_NAME,
Key: { connectionId: { S: postParams.connectionId } } });
} else {
console.log("Failed to post. Error: " + JSON.stringify(err));
}
} else {
count++;
}
});
});
callback(null, {
statusCode: 200,
body: "Data send to " + count + " connection" + (count === 1 ? "" : "s")
});
}
});
};
The aws-sdk is also updated, I declared it on a lambda layer and that's what I'm using.
Any idea what's causing this?
This is due to a timeout, the dynamodb loops through all of the records which is causes timeout.
It looks like the cloudwatch was really logging the error, but I was just too focused on the terminal error which gives me the 500, Internal Server Error.
To fix this, just go to the lambda function and increase the time limit.

react-native fetch with authorization header sometime return 401

I'm facing some issue whereby I sometime will get status code 401 (Unauthorised) from my phone. I'm trying to access to an API from my computer localhost (192.168.0.7).
I've a screen, when I click on a button it will navigate to a page and it will request data through API. And when I go back and navigate to same page again, it sometime will return me code 401.
So if I repeat the same step (navigate and go back) let's say 10 times. I'm getting Unauthorised like 5-7 times.
Below are my code
export function getMyCarpool(param,token) {
return dispatch => {
var requestUrl = _api + 'GetMyProduct?' + param;
fetch(requestUrl, {
method: "get",
headers: new Headers({
'Accept': 'application/json',
'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': 'Bearer ' + token
})
})
.then((request) => {
console.log(request);
if(request.status == 200)
return request.json();
else if(request.status == 401) {
//dispatch(logout());
throw new Error('Unauthorized access.');
}
else
throw new Error('Failed to request, please try again.');
})
.then((response) => {
var message = response.message;
if(response.success == 'true')
dispatch({ message, type: GET_MY_PRODUCT_SUCCESS });
else
dispatch({ message, type: GET_MY_PRODUCT_FAILED });
})
.catch(error => {
var message = error.message;
dispatch({ message, type: GET_MY_PRODUCT_FAILED });
});
}
I've check the token in my phone and also trying to make many request using postman. So I don't think it's server side problem.
I'm using Laravel and using laravel passport for API authentication. I not sure why this happen if I continue to access many time, any help is greatly appreciated.
UPDATE :: I'm trying to capture whether the http request has the token from this link, and I don't get the problem anymore.
It's a healthy mechanism for token expire. Maybe you have your token (access_token) for 5 minutes, then the token expired, you should use refresh_token to regain another new token (access_token).
For code explanation:
async function fetchService(url) {
const reqSetting = {
headers: {
Accept: 'application/json',
Authorization: `Bearer ${Auth.access_token}`,
},
};
const prevRequest = { url, reqSetting };
const resp = await fetch(url, reqSetting);
if (!resp.ok) {
const error = new Error(resp.statusText || 'Request Failed!');
if (resp.status === 401 || resp.status === 400) {
const responseClone = resp.clone();
const errorInfo = await resp.json();
if (errorInfo.error == 'invalid_token') {
// console.log('Token Expired', errorInfo);
try {
await refreshToken();
const response = await fetchService(prevRequest.url);
return response;
} catch (err) {
// handle why not refresh a new token
}
}
return responseClone;
}
error.errorUrl = url;
error.code = resp.status;
throw error;
}
return resp;
}
Where the refresh token function is :
async function refreshToken() {
const url = 'https://example.com/oauth/token';
const data = {
grant_type: 'refresh_token',
refresh_token: Auth.refresh_token,
};
try {
const res = await fetch(url, data);
const data = res.json();
Auth.access_token = data.access_token;
Auth.refresh_token = data.refresh_token;
return true;
} catch (error) {
throw error;
}
}
This fetchService will automatic regain a new token if old expired and then handle old request.
PS.
If you have multiple requests same time, the fetchService will need a little optimization. You'd better choose another regain token strategy like saga.

Freeze on connect-redis session destroy?

I'm implementing an authentication system in node.js with express backed by a redis database for users and connect-redis for persistant, scalable session stores.
Here is the heart of my app, the server:
// Module Dependencies
var express = require('express');
var redis = require('redis');
var client = redis.createClient();
var RedisStore = require('connect-redis')(express);
var crypto = require('crypto');
var app = module.exports = express.createServer();
// Configuration
app.configure(function(){
app.set('views', __dirname + '/views');
app.set('view engine', 'jade');
app.use(express.bodyParser());
app.use(express.methodOverride());
app.use(express.cookieParser());
app.use(express.session({ secret: 'obqc487yusyfcbjgahkwfet73asdlkfyuga9r3a4', store: new RedisStore }));
app.use(require('stylus').middleware({ src: __dirname + '/public' }));
app.use(app.router);
app.use(express.static(__dirname + '/public'));
});
app.configure('development', function(){
app.use(express.errorHandler({ dumpExceptions: true, showStack: true }));
});
app.configure('production', function(){
app.use(express.errorHandler());
});
// Message Helper
app.dynamicHelpers({
// Index Alerts
indexMessage: function(req){
var msg = req.sessionStore.indexMessage;
if (msg) return '<p class="message">' + msg + '</p>';
},
// Login Alerts
loginMessage: function(req){
var err = req.sessionStore.loginError;
var msg = req.sessionStore.loginSuccess;
delete req.sessionStore.loginError;
delete req.sessionStore.loginSuccess;
if (err) return '<p class="error">' + err + '</p>';
if (msg) return '<p class="success">' + msg + '</p>';
},
// Register Alerts
registerMessage: function(req){
var err = req.sessionStore.registerError;
var msg = req.sessionStore.registerSuccess;
delete req.sessionStore.registerError;
delete req.sessionStore.registerSuccess;
if (err) return '<p class="error">' + err + '</p>';
if (msg) return '<p class="success">' + msg + '</p>';
},
// Session Access
sessionStore: function(req, res){
return req.sessionStore;
}
});
// Salt Generator
function generateSalt(){
var text = "";
var possible= "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!##$%^&*"
for(var i = 0; i < 40; i++)
text += possible.charAt(Math.floor(Math.random() * possible.length));
return text;
}
// Generate Hash
function hash(msg, key){
return crypto.createHmac('sha256', key).update(msg).digest('hex');
}
// Authenticate
function authenticate(username, pass, fn){
client.get('username:' + username + ':uid', function(err, uid){
if (uid !== null){
client.hgetall('uid:' + uid, function(err, user){
if (user.pass == hash(pass, user.salt)){
return fn(null, user);
}
else{
fn(new Error('invalid password'));
}
});
}
else{
return fn(new Error('cannot find user'));
}
});
}
function restrict(req, res, next){
if (req.sessionStore.user) {
next();
} else {
req.sessionStore.loginError = 'Access denied!';
res.redirect('/login');
}
}
function accessLogger(req, res, next) {
console.log('/restricted accessed by %s', req.sessionStore.user.username);
next();
}
// Routes
app.get('/', function(req, res){
res.render('index', {
title: 'TileTabs'
});
});
app.get('/restricted', restrict, accessLogger, function(req, res){
res.render('restricted', {
title: 'Restricted Section'
});
});
app.get('/logout', function(req, res){
req.sessionStore.destroy(function(err){
if (err){
console.log('Error destroying session...');
}
else{
console.log(req.sessionStore.user.username + ' has logged out.');
res.redirect('home');
}
});
});
app.get('/login', function(req, res){
res.render('login', {
title: 'TileTabs Login'
});
});
app.post('/login', function(req, res){
var usernameLength = req.body.username.length;
var passwordLength = req.body.password.length;
if (usernameLength == 0 && passwordLength == 0){
req.sessionStore.loginError = 'Authentication failed, please enter a username and password!';
res.redirect('back');
}
else{
authenticate(req.body.username, req.body.password, function(err, user){
if (user) {
req.session.regenerate(function(){
req.sessionStore.user = user;
req.sessionStore.indexMessage = 'Authenticated as ' + req.sessionStore.user.name + '. Click to logout. ' + ' You may now access the restricted section.';
console.log(req.sessionStore.user.username + ' logged in!');
res.redirect('home');
});
} else {
req.sessionStore.loginError = 'Authentication failed, please check your username and password.';
res.redirect('back');
}
});
}
});
app.get('/register', function(req, res){
res.render('register', {
title: 'TileTabs Register'
});
});
app.post('/register', function(req, res){
var name = req.body.name;
var username = req.body.username;
var password = req.body.password;
var salt = generateSalt();
if (name.length == 0 && username.length == 0 && password.length == 0){
req.sessionStore.registerError = 'Registration failed, please enter a name, username and password!';
res.redirect('back');
}
else{
client.get('username:' + username + ':uid', function(err, uid){
if (uid !== null){
req.sessionStore.registerError = 'Registration failed, ' + username + ' already taken.';
res.redirect('back');
}
else{
client.incr('global:nextUserId', function(err, uid){
client.set('username:' + username + ':uid', uid);
client.hmset('uid:' + uid, {
name: name,
username: username,
salt: salt,
pass: hash(password, salt)
}, function(){
req.sessionStore.loginSuccess = 'Thanks for registering! Try logging in!';
console.log(username + ' has registered!');
res.redirect('/login');
});
});
}
});
}
});
// Only listen on $ node app.js
if (!module.parent) {
app.listen(80);
console.log("Express server listening on port %d", app.address().port);
}
Registration and loggin authentication work great, but for some reason, I'm getting a hang when a connected user attempts to loggout.
As you can see from my /logout route,
app.get('/logout', function(req, res){
req.sessionStore.destroy(function(err){
if (err){
console.log('Error destroying session...');
}
else{
console.log(req.sessionStore.user.username + ' has logged out.');
res.redirect('home');
}
});
});
I have two console.log's to try and determine where the freeze occurs. Interestingly, nothing gets logged.
So, for whatever reason, destroy() isn't being called properly.
I'm not sure whether or not I'm just goofing on syntax, or what, but according to the connect documentation it appears as though I'm setting this up correctly.
You must use req.session instead of req.sessionStore. req.sessionStore is a single RedisStore instance and is not being set dynamically by connect. This means that your code works for one user only. Your users will share the same session data this way.
req.sessionStore has a destroy method too and that's why you are not getting any errors. Your callback is not being called because in this method the callback is the second parameter.
Just replace req.sessionStore for req.session in all your code. E.g.:
req.session.destroy(function(err) { ... });

Resources