redux-saga, websockets and actions queue - websocket

I have the following problem: server send's messages to the client through websocket. on the client, I need to display this messages to the user. but the problem is that sometimes messages come to fast, and I need to organize some sort of queue and show that messages one after another.
my saga:
import { eventChannel, effects, takeEvery } from 'redux-saga';
import { types, actionCreators } from './actions';
const { call, put, take, race } = effects;
function watchMessages(socket) {
return eventChannel((emitter) => {
socket.onopen = (e) => (emitter(actionCreators.socketOpen(e)));
socket.onclose = (e) => (emitter(actionCreators.socketClose(e)));
socket.onerror = (e) => (emitter(actionCreators.socketError(e)));
socket.onmessage = (e) => (emitter(actionCreators.socketMessage(e)));
return () => {
socket.close();
};
});
}
function* internalListener(socket) {
while (true) {
const data = yield take(types.SOCKET_SEND);
socket.send(data.payload);
}
}
function* externalListener(socketChannel) {
while (true) {
const action = yield take(socketChannel);
yield put(action);
}
}
function* wsHandling(action) {
const socket = action.payload.socket;
while (true) {
const socketChannel = yield call(watchMessages, socket);
const { cancel } = yield race({
task: [call(externalListener, socketChannel), call(internalListener, socket)],
cancel: take(types.SOCKET_CLOSE),
});
if (cancel) {
socketChannel.close();
}
}
}
export default function* rootSaga(action) {
yield takeEvery(types.SOCKET_CONNECT, wsHandling);
}
my reducer:
function dataReducer(state = initialStateData, action) {
switch (action.type) {
case types.SOCKET_MESSAGE:
if (action.payload.channel === 'channel1') {
return state
.set('apichannel1', action.payload);
} else if (action.payload.channel === 'channel2') {
return state
.set('apichannel2', action.payload);
} else if (action.payload.channel === 'channel3') {
return state
.set('apichannel3', action.payload);
}
return state;
default:
return state;
}
}
so now, when the new message arrives, I'm changing state and just display it on the screen.
any ideas how I can turn this into the following: put arrived messages into some sort of queue, and show them one by one on screen for some custom time?

You can buffer (queue) actions in redux saga using the actionChannel effect.
https://redux-saga.js.org/docs/advanced/Channels.html#using-the-actionchannel-effect
Then you can read from the channel's buffer at your own speed.
I created a simplified example that listens for messages and displays max 3 at a time, each for 5 seconds. See:
https://codesandbox.io/s/wt8uu?file=/src/index.js
At the bottom, there is a console panel, use it to call addMsg('Msg: ' + Math.random()) to simulate new socket.io message.

so I did it this way, here is the code, maybe it will be useful for someone
let pendingTasks = [];
let activeTasks = [];
function watchMessages(socket) {
return eventChannel((emitter) => {
socket.onopen = (e) => (emitter(actionCreators.socketOpen(e)));
socket.onclose = (e) => (emitter(actionCreators.socketClose(e)));
socket.onerror = (e) => (emitter(actionCreators.socketError(e)));
socket.onmessage = (e) => (emitter(actionCreators.socketMessage(e)));
return () => {
socket.close();
};
});
}
function* internalListener(socket) {
while (true) {
const data = yield take(types.SOCKET_SEND);
socket.send(data.payload);
}
}
function* externalListener(socketChannel) {
while (true) {
const action = yield take(socketChannel);
pendingTasks = [...pendingTasks, action];
}
}
function* wsHandling(action) {
const socket = action.payload.socket;
while (true) {
const socketChannel = yield call(watchMessages, socket);
const { cancel } = yield race({
task: [call(externalListener, socketChannel), call(internalListener, socket)],
cancel: take(types.SOCKET_CLOSE),
});
if (cancel) {
socketChannel.close();
}
}
}
function* tasksScheduler() {
while (true) {
const canDisplayTask = activeTasks.length < 1 && pendingTasks.length > 0;
if (canDisplayTask) {
const [firstTask, ...remainingTasks] = pendingTasks;
pendingTasks = remainingTasks;
yield fork(displayTask, firstTask);
yield call(delay, 300);
}
else {
yield call(delay, 50);
}
}
}
function* displayTask(task) {
activeTasks = [...activeTasks, task];
yield put(task);
yield call(delay, 3000);
activeTasks = _.without(activeTasks, task);
}
export default function* rootSaga(action) {
yield [
takeEvery(types.SOCKET_CONNECT, wsHandling),
takeEvery(types.SOCKET_CONNECT, tasksScheduler),
];
}

Related

Providing two combined Reducers for my redux saga store prevents my websocket channel message from triggering, but only one does not?

Configured my store this way with redux toolkit for sure
const rootReducer = combineReducers({
someReducer,
systemsConfigs
});
const store = return configureStore({
devTools: true,
reducer: rootReducer ,
// middleware: [middleware, logger],
middleware: (getDefaultMiddleware) => getDefaultMiddleware({ thunk: false }).concat(middleware),
});
middleware.run(sagaRoot)
And thats my channel i am connecting to it
export function createSocketChannel(
productId: ProductId,
pair: string,
createSocket = () => new WebSocket('wss://somewebsocket')
) {
return eventChannel<SocketEvent>((emitter) => {
const socket_OrderBook = createSocket();
socket_OrderBook.addEventListener('open', () => {
emitter({
type: 'connection-established',
payload: true,
});
socket_OrderBook.send(
`subscribe-asdqwe`
);
});
socket_OrderBook.addEventListener('message', (event) => {
if (event.data?.includes('bids')) {
emitter({
type: 'message',
payload: JSON.parse(event.data),
});
//
}
});
socket_OrderBook.addEventListener('close', (event: any) => {
emitter(new SocketClosedByServer());
});
return () => {
if (socket_OrderBook.readyState === WebSocket.OPEN) {
socket_OrderBook.send(
`unsubscribe-order-book-${pair}`
);
}
if (socket_OrderBook.readyState === WebSocket.OPEN || socket_OrderBook.readyState === WebSocket.CONNECTING) {
socket_OrderBook.close();
}
};
}, buffers.expanding<SocketEvent>());
}
And here's how my saga connecting handlers looks like
export function* handleConnectingSocket(ctx: SagaContext) {
try {
const productId = yield select((state: State) => state.productId);
const requested_pair = yield select((state: State) => state.requested_pair);
if (ctx.socketChannel === null) {
ctx.socketChannel = yield call(createSocketChannel, productId, requested_pair);
}
//
const message: SocketEvent = yield take(ctx.socketChannel!);
if (message.type !== 'connection-established') {
throw new SocketUnexpectedResponseError();
}
yield put(connectedSocket());
} catch (error: any) {
reportError(error);
yield put(
disconnectedSocket({
reason: SocketStateReasons.BAD_CONNECTION,
})
);
}
}
export function* handleConnectedSocket(ctx: SagaContext) {
try {
while (true) {
if (ctx.socketChannel === null) {
break;
}
const events = yield flush(ctx.socketChannel);
const startedExecutingAt = performance.now();
if (Array.isArray(events)) {
const deltas = events.reduce(
(patch, event) => {
if (event.type === 'message') {
patch.bids.push(...event.payload.data?.bids);
patch.asks.push(...event.payload.data?.asks);
//
}
//
return patch;
},
{ bids: [], asks: [] } as SocketMessage
);
if (deltas.bids.length || deltas.asks.length) {
yield putResolve(receivedDeltas(deltas));
}
}
yield call(delayNextDispatch, startedExecutingAt);
}
} catch (error: any) {
reportError(error);
yield put(
disconnectedSocket({
reason: SocketStateReasons.UNKNOWN,
})
);
}
}
After Debugging I got the following:
The Thing is that when I Provide one Reducer to my store the channel works well and data is fetched where as when providing combinedReducers I am getting
an established connection from my handleConnectingSocket generator function
and an empty event array [] from
const events = yield flush(ctx.socketChannel) written in handleConnectedSocket
Tried to clarify as much as possible
ok so I start refactoring my typescript by changing the types, then saw all the places that break, there was a problem in my sagas.tsx.
Ping me if someone faced such an issue in the future

Firebase Function Returns Before All Callback functions complete execution

I'm using the Google Storage NodeJS client library to list GCS Bucket paths.
Here's the code to the Firebase Function:
import * as functions from 'firebase-functions';
import { Storage } from '#google-cloud/storage';
import { globVars } from '../admin/admin';
const projectId = process.env.GCLOUD_PROJECT;
// shared global variables setup
const { keyFilename } = globVars;
// Storage set up
const storage = new Storage({
projectId,
keyFilename,
});
export const gcsListPath = functions
.region('europe-west2')
.runWith({ timeoutSeconds: 540, memory: '256MB' })
.https.onCall(async (data, context) => {
if (context.auth?.token.email_verified) {
const { bucketName, prefix, pathList = false, fileList = false } = data;
let list;
const options = {
autoPaginate: false,
delimiter: '',
prefix,
};
if (pathList) {
options.delimiter = '/';
let test: any[] = [];
const callback = (_err: any, _files: any, nextQuery: any, apiResponse: any) => {
test = test.concat(apiResponse.prefixes);
console.log('test : ', test);
console.log('nextQuery : ', nextQuery);
if (nextQuery) {
storage.bucket(bucketName).getFiles(nextQuery, callback);
} else {
// prefixes = The finished array of prefixes.
list = test;
}
}
storage.bucket(bucketName).getFiles(options, callback);
}
if (fileList) {
const [files] = await storage
.bucket(bucketName)
.getFiles(options);
list = files.map((file) => file.name);
}
return { list }; //returning null as it exec before callback fns finish
} else {
return {
error: { message: 'Bad Request', status: 'INVALID_ARGUMENT' },
};
}
});
My problem is that my Firebase function returns the list (null) before all the callback functions finish execution.
Could someone spot and point out what needs to be changed/added to make the function wait for all the callback functions to finish. I've tried adding async/await but can't seem to get it right.
The reason for your error is that you use a callback. It's not awaited in the code. I would recommend to turn the callback code to a promise. Something like this.
import * as functions from "firebase-functions";
import { Storage } from "#google-cloud/storage";
import { globVars } from "../admin/admin";
const projectId = process.env.GCLOUD_PROJECT;
// shared global variables setup
const { keyFilename } = globVars;
// Storage set up
const storage = new Storage({
projectId,
keyFilename,
});
const getList = (bucketName, options) => {
return new Promise((resolve, reject) => {
let list;
let test: any[] = [];
const callback = (
_err: any,
_files: any,
nextQuery: any,
apiResponse: any
) => {
test = test.concat(apiResponse.prefixes);
console.log("test : ", test);
console.log("nextQuery : ", nextQuery);
if (nextQuery) {
storage.bucket(bucketName).getFiles(nextQuery, callback);
} else {
// prefixes = The finished array of prefixes.
list = test;
}
resolve(list);
};
try {
storage.bucket(bucketName).getFiles(options, callback);
} catch (error) {
reject(eror);
}
});
};
export const gcsListPath = functions
.region("europe-west2")
.runWith({ timeoutSeconds: 540, memory: "256MB" })
.https.onCall(async (data, context) => {
if (context.auth?.token.email_verified) {
const { bucketName, prefix, pathList = false, fileList = false } = data;
let list;
const options = {
autoPaginate: false,
delimiter: "",
prefix,
};
if (pathList) {
options.delimiter = "/";
list = await getList(bucketName, options);
}
if (fileList) {
const [files] = await storage.bucket(bucketName).getFiles(options);
list = files.map((file) => file.name);
}
return { list }; //returning null as it exec before callback fns finish
} else {
return {
error: { message: "Bad Request", status: "INVALID_ARGUMENT" },
};
}
});
I'm not sure if the part with fileList will work as expectedt. It looks like the API doesn't support await but only callbacks.
import * as functions from "firebase-functions";
import { GetFilesOptions, Storage } from "#google-cloud/storage";
import { globVars } from "../admin/admin";
const projectId = process.env.GCLOUD_PROJECT;
// shared global variables setup
const { keyFilename } = globVars;
// Storage set up
const storage = new Storage({
projectId,
keyFilename,
});
const getList = (bucketName: string, options: GetFilesOptions) => {
return new Promise((resolve, reject) => {
// let test: any[] = [];
let list: any[] = [];
const callback = (
_err: any,
_files: any,
nextQuery: any,
apiResponse: any
) => {
list = list.concat(apiResponse.prefixes);
console.log("list : ", list);
console.log("nextQuery : ", nextQuery);
if (nextQuery) {
storage.bucket(bucketName).getFiles(nextQuery, callback);
} else {
// prefixes = The finished array of prefixes.
resolve(list);
}
};
try {
storage.bucket(bucketName).getFiles(options, callback);
} catch (error) {
reject(error);
}
});
};
export const gcsListPath = functions
.region("europe-west2")
.runWith({ timeoutSeconds: 540, memory: "256MB" })
.https.onCall(async (data, context) => {
if (context.auth?.token.email_verified) {
const { bucketName, prefix, pathList = false, fileList = false } = data;
let list;
const options = {
autoPaginate: false,
delimiter: "",
prefix,
};
if (pathList) {
options.delimiter = "/";
list = await getList(bucketName, options);
}
if (fileList) {
const [files] = await storage.bucket(bucketName).getFiles(options);
list = files.map((file) => file.name);
}
return { list }; //returning null as it exec before callback fns finish
} else {
return {
error: { message: "Bad Request", status: "INVALID_ARGUMENT" },
};
}
});

Rendering logger output to component on Tesseract.js (with React) slows down

I would like to add a progress indicator to Tesseract.js logging.
The example in docs works just fine, until setting a state hook into logger:
const worker = createWorker({
logger: (m) => {
setProgress(m) //new
}});
...
const [ocr, setOcr] = useState('Recognizing...');
const [progress, setProgress] = useState(null); //new
...
return (
<div className="App">
<p>
<LogComponent progress={progress}/> //new
</p>
</div>);
This causes the browser to slow down significantly (probably due React's way to re-render on each state update). Is there a way to get around this? Using React.memo perhaps?
You could design it into a hook so your entire component doesn't re-render. Here is a useTesseract hook you can use that I created: https://gist.github.com/KevinDanikowski/25cdcdda2ef4750bcf443f2027cc375a
Copy and Pasted:
import { useState, useEffect } from 'react'
import { createWorker } from 'tesseract.js'
export default function useTesseract({ tesseractLanguage = 'eng', log = false }) {
const [tesseractWorker, setTesseractWorker] = useState(null)
const [loadingModel, setLoadingModel] = useState(true)
const [modelError, setModelError] = useState(false)
const [imgResults, setImgResults] = useState({})
const [processing, setProcessing] = useState(false)
const [progress, setProgress] = useState(0)
const extractTextFromImage = (imageUrl) => {
const recognize = async () => {
const {
data: {
hocr: htmlOutput,
text,
// tsv, box, unlv
},
} = await tesseractWorker.recognize(imageUrl)
setProcessing(false)
setImgResults({ html: htmlOutput, text })
}
if (loadingModel) {
try {
setTimeout(recognize, 400)
} catch (e) {
console.error('Timeout Error:', e.message)
setImgResults({ error: true })
}
} else {
try {
setProcessing(true)
recognize()
} catch (e) {
console.error('Tesseract Error:', e.message)
setProcessing(false)
setImgResults({ error: true })
}
}
}
const logger = (m) => {
setProgress(m.progress)
if (log) {
console.info(m)
}
}
useEffect(() => {
const loadTesseract = async () => {
if (tesseractWorker) {
await tesseractWorker.loadLanguage(tesseractLanguage)
await tesseractWorker.initialize(tesseractLanguage)
console.info(`INFO: loaded ${tesseractLanguage} tesseract model`)
} else {
const tesseractWorker = createWorker({
logger,
// specify paths because sometimes the free CDN goes down
// corePath: '/static/tesseract-core.wasm.2.2.0.js',
// workerPath: '/static/tesseract-worker.v2.1.4.min.js',
})
setTesseractWorker(tesseractWorker)
await tesseractWorker.load()
await tesseractWorker.loadLanguage(tesseractLanguage)
await tesseractWorker.initialize(tesseractLanguage)
console.info(`INFO: loaded ${tesseractLanguage} tesseract model`)
setLoadingModel(false)
setModelError(true)
setLoadingModel(false)
}
}
loadTesseract().catch((e) => {
console.error(`ERROR: Failed to load tesseract model`, e.message)
setModelError(true)
setLoadingModel(false)
})
// TODO: Have to add a ref to reference the latest tesseractWorker in order to terminate
// return () => tesseractWorker.terminate()
}, [tesseractLanguage])
return {
imgResults,
loadingModel,
processing,
modelError,
progress,
extractTextFromImage,
}
}
I managed to implement and render the progress of the tesseract worker by putting my application render inside a class and using the setState method:
class App extends React.Component {
constructor(props){
super(props)
this.state = {
file: null
}
this.handleChange = this.handleChange.bind(this)
}
setProgress(m) {
if (m.progress !== 0 && m.progress !== 0.5 && m.progress !== 1){
var prog = "Progress: " + Math.round(m.progress*100) + "%"
this.setState({progress: prog})
}
}
worker = createWorker({
logger: m => this.setProgress(m),
});
doOCR = async () => {
await this.worker.load();
await this.worker.loadLanguage('eng');
await this.worker.initialize('eng');
const { data: { text } } = await this.worker.recognize(this.state.file);
this.setState({text: extractTotal(text),
progress: ""});
};
handleChange(event) {
this.setState({text: placeholder});
this.setState({
file: URL.createObjectURL(event.target.files[0]),
})
this.doOCR()
}
setText(input){
if (!input) {
return "Please select a receipt"
}
else {
return input
}
}
render() {
console.log("Text: " + this.state.text)
return (
<div className="container">
<p>{this.setText(this.state.text)}</p>
<p>{this.state.progress}</p>
<input type="file" onChange={this.handleChange}/>
<img src={this.state.file} className='logo' alt=""/>
</div>
);
}
}

reconnect web socket if it is closed

My web socket connection code :
public connect(): Subject<MessageEvent> {
if (!this.subject) {
this.subject = this.create(this.url);
}
this.ws.onerror = () => {
this.close();
let refresh = setInterval(() => {
this.subject = null;
this.connect();
this.ws.onopen = () => {
clearInterval(refresh)
}
}, 5000);
}
return this.subject;
}
private create(url: string){
this.ws = new WebSocket(url);
const observable = Observable.create((obs: Subject<MessageEvent>) => {
this.ws.onmessage = obs.next.bind(obs);
this.ws.onerror = obs.error.bind(obs);
this.ws.onclose = obs.complete.bind(obs);
this.ws.onclose = function () {
console.log("trying to reconnect");
this.connect();
}
return this.ws.close.bind(this.ws);
});
const observer = {
next: (data: any) => {
if (this.ws.readyState === WebSocket.OPEN) {
this.ws.send(JSON.stringify(data));
}
}
};
return Subject.create(observer, observable);
}
I want to reconnect web socket if connection closes. At the moment the function gets truggered when i stop the web socket. BUt is not connecting again .I See error "this.connect is not a function" .How to work with angular recursive functions?
Don't use function keyword to create your callback when using this inside of it if you aren't aware of how it changes the this reference depending on the execution context, use arrow function instead
To make it reconnect, change this
this.ws.onclose = function () {
console.log("trying to reconnect");
this.connect();
}
To this
this.ws.onclose = () => {
console.log("trying to reconnect");
this.subject = null;
this.connect();
}

Rx.Observable.webSocket() immediately complete after reconnect?

Having a bit of trouble working with the Subject exposed by Rx.Observable.webSocket. While the WebSocket does become reconnected after complete, subsequent subscriptions to the Subject are immediately completed as well, instead of pushing the next messages that come over the socket.
I think I'm missing something fundamental about how this is supposed to work.
Here's a requirebin/paste that I hope illustrates a bit better what I mean, and the behavior I was expecting. Thinking it'll be something super simple I overlooked.
Requirebin
var Rx = require('rxjs')
var subject = Rx.Observable.webSocket('wss://echo.websocket.org')
subject.next(JSON.stringify('one'))
subject.subscribe(
function (msg) {
console.log('a', msg)
},
null,
function () {
console.log('a complete')
}
)
setTimeout(function () {
subject.complete()
}, 1000)
setTimeout(function () {
subject.next(JSON.stringify('two'))
}, 3000)
setTimeout(function () {
subject.next(JSON.stringify('three'))
subject.subscribe(
function (msg) {
// Was hoping to get 'two' and 'three'
console.log('b', msg)
},
null,
function () {
// Instead, we immediately get here.
console.log('b complete')
}
)
}, 5000)
Another neat solution would be to use a wrapper over WebSocketSubject.
class RxWebsocketSubject<T> extends Subject<T> {
private reconnectionObservable: Observable<number>;
private wsSubjectConfig: WebSocketSubjectConfig;
private socket: WebSocketSubject<any>;
private connectionObserver: Observer<boolean>;
public connectionStatus: Observable<boolean>;
defaultResultSelector = (e: MessageEvent) => {
return JSON.parse(e.data);
}
defaultSerializer = (data: any): string => {
return JSON.stringify(data);
}
constructor(
private url: string,
private reconnectInterval: number = 5000,
private reconnectAttempts: number = 10,
private resultSelector?: (e: MessageEvent) => any,
private serializer?: (data: any) => string,
) {
super();
this.connectionStatus = new Observable((observer) => {
this.connectionObserver = observer;
}).share().distinctUntilChanged();
if (!resultSelector) {
this.resultSelector = this.defaultResultSelector;
}
if (!this.serializer) {
this.serializer = this.defaultSerializer;
}
this.wsSubjectConfig = {
url: url,
closeObserver: {
next: (e: CloseEvent) => {
this.socket = null;
this.connectionObserver.next(false);
}
},
openObserver: {
next: (e: Event) => {
this.connectionObserver.next(true);
}
}
};
this.connect();
this.connectionStatus.subscribe((isConnected) => {
if (!this.reconnectionObservable && typeof(isConnected) == "boolean" && !isConnected) {
this.reconnect();
}
});
}
connect(): void {
this.socket = new WebSocketSubject(this.wsSubjectConfig);
this.socket.subscribe(
(m) => {
this.next(m);
},
(error: Event) => {
if (!this.socket) {
this.reconnect();
}
});
}
reconnect(): void {
this.reconnectionObservable = Observable.interval(this.reconnectInterval)
.takeWhile((v, index) => {
return index < this.reconnectAttempts && !this.socket
});
this.reconnectionObservable.subscribe(
() => {
this.connect();
},
null,
() => {
this.reconnectionObservable = null;
if (!this.socket) {
this.complete();
this.connectionObserver.complete();
}
});
}
send(data: any): void {
this.socket.next(this.serializer(data));
}
}
for more information refer to the following article and source code:
Auto WebSocket reconnection with RxJS
GitHub - Full working rxjs websocket example
I ended up not using Rx.Observable.webSocket, instead opting for observable-socket and a bit of code to make reconnections once sockets are closed:
requirebin
const observableSocket = require('observable-socket')
const Rx = require('rxjs')
const EventEmitter = require('events')
function makeObservableLoop (socketEmitter, send, receive) {
socketEmitter.once('open', function onSocketEmit (wSocket) {
const oSocket = observableSocket(wSocket)
const sendSubscription = send.subscribe(msg => oSocket.next(msg))
oSocket.subscribe(
function onNext (msg) {
receive.next(msg)
},
function onError (err) {
error(err)
sendSubscription.unsubscribe()
makeObservableLoop(socketEmitter, send, receive)
},
function onComplete () {
sendSubscription.unsubscribe()
makeObservableLoop(socketEmitter, send, receive)
}
)
})
}
function makeSocketLoop (emitter) {
const websocket = new WebSocket('wss://echo.websocket.org')
function onOpen () {
emitter.emit('open', websocket)
setTimeout(function () {
websocket.close()
}, 5000)
}
function onClose () {
makeSocketLoop(emitter)
}
websocket.onopen = onOpen
websocket.onclose = onClose
}
function init (socketEmitter) {
const _send = new Rx.Subject()
const _receive = new Rx.Subject()
makeObservableLoop(socketEmitter, _send, _receive)
const send = msg => _send.next(JSON.stringify(msg))
const receive = _receive.asObservable()
return {
send: send,
read: receive,
}
}
const emitter = new EventEmitter()
makeSocketLoop(emitter)
const theSubjectz = init(emitter)
setInterval(function () {
theSubjectz.send('echo, you there?')
}, 1000)
theSubjectz.read.subscribe(function (el) {
console.log(el)
})

Resources