Given some arrays:
const recordings = {
foo: [{delay: 5, data: 'a'}, {delay: 2, data: 'b'}],
bar: [{delay: 3, data: 'x'}, {delay: 7, data: 'y'}, {delay: 4, data: 'z'}]
};
Is there a way that I can pass them to a playback method that will do something like the following?:
beginPlayback(timeline, fromIndex = 0) {
let postedIndex = -1;
const observable = from(timeline.slice(fromIndex)).pipe(
concatMap((instant, i) => of({ i, instant }).pipe(delay(instant.delay))),
tap(async ({i, instant}) => {
try {
await post(instant.data);
postedIndex = i;
} catch (err) {
console.error(err);
// Continue playback
}
})
);
const handle = {
postedIndex,
subscription: observable.subscribe()
};
return handle;
}
...and then allow me to manage concurrent playback with an external/public interface like this?:
const handles = {};
const paused = {};
onAddNewRecordingRequest(id, timeline) {
recordings[id] = timeline;
}
onBeginPlaybackRequest(id) {
handles[id] = beginPlayback(recordings[id]);
}
onPauseRequested(id) {
const handle = handles[id];
paused[id] = handle.postedIndex;
handle.subscription.unsubscribe() // dispose of observable
}
onResumeRequested(id) {
const alreadyThroughIndex = paused[id];
handles[id] = beginPlayback(recordings[id], alreadyThroughIndex);
delete paused[id];
}
onStopRequested(id) {
const handle = handles[id];
handle.subscription.unsubscribe() // dispose of observable
}
Is there also a way to automatically remove the observable and the handle when the observable is completed?
Related
I am trying to build up a WebSocket with oak (not the native deno one).
The following code is how I build the server.
import {Application, Router, Context, send } from "https://deno.land/x/oak#v10.6.0/mod.ts";
const runWS = async (ctx: Context, next: () => Promise<unknown>) => {
try{
const ws = await ctx.upgrade();
ws.onopen = () => {
chatConnection(ws);
};
ws.onclose = () => { console.log('Disconnected from the client!');};
}catch{await next();}
}
let sockets = new Map<string, WebSocket>();
const chatConnection = async (ws: WebSocket) => {
console.log('new websocket, ws: ',ws);
const uid = globalThis.crypto.randomUUID();
sockets.set(uid,ws);
console.log('socket: ',sockets);
for await (const ev of ws){
console.log('ev: ', ev);
}
}
export const wsRoutes = new Router()
.get('/ws', runWS);
But in the for loop (at the end), for ws it says Type 'WebSocket' must have a '[Symbol.asyncIterator]()' method that returns an async iterator.. What's the deal with this and how to fix it?
The error message is providing you with useful information: the WebSocket is not AsyncIterable, which means that it cannot be used with a for await...of loop.
Here is the type documentation for WebSocket in Deno. It is (for the most part) the same as the WHATWG standard WebSocket that is documented on MDN.
If your intention is to respond to incoming message events, you'll need to attach an event listener:
webSocket.addEventListener("message", (messageEvent) => {
// Do something in response to each message event
});
Additional:
Here's an observation based on the code you've shown, but not in response to your question:
It's probably more ergonomic to store the sockets as the keys of your map, and the associated state data in the values. (This is the inverse of what you've shown). Here's an example of why:
import {
Router,
type RouterMiddleware,
} from "https://deno.land/x/oak#v10.6.0/mod.ts";
// You seem to want to log data to the console.
// This function will help you easily log only certain properties of objects:
/**
* Functional implementation of the type utility
* [`Pick<Type, Keys>`](https://www.typescriptlang.org/docs/handbook/utility-types.html#picktype-keys)
*/
function pick<T, K extends keyof T>(
obj: T,
keys: readonly K[],
): Pick<T, K> {
const result = {} as Pick<T, K>;
for (const key of keys) result[key] = obj[key];
return result;
}
type SocketData = { id: string };
const socketMap = new Map<WebSocket, SocketData>();
// Do something when a connection is opened
function handleOpen(ev: Event, ws: WebSocket) {
const socketData: SocketData = { id: window.crypto.randomUUID() };
socketMap.set(ws, socketData);
console.log({
event: pick(ev, ["type"]),
socketData,
});
}
// Do something when an error occurs
function handleError(ev: Event, ws: WebSocket) {
const socketData = socketMap.get(ws);
console.log({
event: pick(ev, ["type"]),
socketData,
});
socketMap.delete(ws);
}
// Do something when a connection is closed
function handleClose(ev: CloseEvent, ws: WebSocket) {
ev.code; // number
ev.reason; // string
ev.wasClean; // boolean
const socketData = socketMap.get(ws);
console.log({
event: pick(ev, ["type", "code", "reason", "wasClean"]),
socketData,
});
socketMap.delete(ws);
}
// Do something when a message is received
// Change `unknown` to the type of message payloads used in your application.
// (for example, JSON messages are `string`)
function handleMessage(ev: MessageEvent<unknown>, ws: WebSocket) {
ev.data; // unknown
ev.lastEventId; // string
ev.ports; // readonly MessagePort[]
const socketData = socketMap.get(ws);
if (socketData) {
socketData.id; // string
}
console.log({
event: pick(ev, ["type", "data", "lastEventId", "ports"]),
socketData,
});
}
const webSocketMiddleware: RouterMiddleware<"/ws"> = async (ctx, next) => {
const ws = ctx.upgrade();
ws.addEventListener("open", (ev) => handleOpen(ev, ws));
ws.addEventListener("error", (ev) => handleError(ev, ws));
ws.addEventListener("close", (ev) => handleClose(ev, ws));
ws.addEventListener("message", (ev) => handleMessage(ev, ws));
await next();
};
export const router = new Router();
router.get("/ws", webSocketMiddleware);
This is my updated code. It avoids the problem entirely
import {Application, Router, Context, send } from "https://deno.land/x/oak#v10.6.0/mod.ts";
interface BroadcastObj{
name: string,
mssg: string
}
const runWS = async (ctx: Context, next: () => Promise<unknown>) => {
if(!ctx.isUpgradable){
ctx.throw(501);
}
const uid = globalThis.crypto.randomUUID();
try{
const ws = await ctx.upgrade();
ws.onopen = () => {
chatConnection(ws);
};
ws.onmessage = (m) => {
let mssg = m.data as string;
if(typeof(mssg) === 'string'){
chatMessage(JSON.parse(mssg));
}
};
ws.onerror = (e) => {console.log('error occured: ', e);};
ws.onclose = () => { chatDisconnect(uid);};
}catch{await next();}
}
let sockets = new Map<string, WebSocket>();
const chatConnection = async (ws: WebSocket, uid: string) => {
await sockets.set(uid,ws);
}
const chatMessage = async (msg: BroadcastObj) => {
await sockets.forEach((ws: WebSocket) => {
ws.send(JSON.stringify(msg));
});
}
const chatDisconnect = async (uid: string) => {
await sockets.delete(uid);
}
export const wsRoutes = new Router()
.get('/ws', runWS);
I'm using the Google Storage NodeJS client library to list GCS Bucket paths.
Here's the code to the Firebase Function:
import * as functions from 'firebase-functions';
import { Storage } from '#google-cloud/storage';
import { globVars } from '../admin/admin';
const projectId = process.env.GCLOUD_PROJECT;
// shared global variables setup
const { keyFilename } = globVars;
// Storage set up
const storage = new Storage({
projectId,
keyFilename,
});
export const gcsListPath = functions
.region('europe-west2')
.runWith({ timeoutSeconds: 540, memory: '256MB' })
.https.onCall(async (data, context) => {
if (context.auth?.token.email_verified) {
const { bucketName, prefix, pathList = false, fileList = false } = data;
let list;
const options = {
autoPaginate: false,
delimiter: '',
prefix,
};
if (pathList) {
options.delimiter = '/';
let test: any[] = [];
const callback = (_err: any, _files: any, nextQuery: any, apiResponse: any) => {
test = test.concat(apiResponse.prefixes);
console.log('test : ', test);
console.log('nextQuery : ', nextQuery);
if (nextQuery) {
storage.bucket(bucketName).getFiles(nextQuery, callback);
} else {
// prefixes = The finished array of prefixes.
list = test;
}
}
storage.bucket(bucketName).getFiles(options, callback);
}
if (fileList) {
const [files] = await storage
.bucket(bucketName)
.getFiles(options);
list = files.map((file) => file.name);
}
return { list }; //returning null as it exec before callback fns finish
} else {
return {
error: { message: 'Bad Request', status: 'INVALID_ARGUMENT' },
};
}
});
My problem is that my Firebase function returns the list (null) before all the callback functions finish execution.
Could someone spot and point out what needs to be changed/added to make the function wait for all the callback functions to finish. I've tried adding async/await but can't seem to get it right.
The reason for your error is that you use a callback. It's not awaited in the code. I would recommend to turn the callback code to a promise. Something like this.
import * as functions from "firebase-functions";
import { Storage } from "#google-cloud/storage";
import { globVars } from "../admin/admin";
const projectId = process.env.GCLOUD_PROJECT;
// shared global variables setup
const { keyFilename } = globVars;
// Storage set up
const storage = new Storage({
projectId,
keyFilename,
});
const getList = (bucketName, options) => {
return new Promise((resolve, reject) => {
let list;
let test: any[] = [];
const callback = (
_err: any,
_files: any,
nextQuery: any,
apiResponse: any
) => {
test = test.concat(apiResponse.prefixes);
console.log("test : ", test);
console.log("nextQuery : ", nextQuery);
if (nextQuery) {
storage.bucket(bucketName).getFiles(nextQuery, callback);
} else {
// prefixes = The finished array of prefixes.
list = test;
}
resolve(list);
};
try {
storage.bucket(bucketName).getFiles(options, callback);
} catch (error) {
reject(eror);
}
});
};
export const gcsListPath = functions
.region("europe-west2")
.runWith({ timeoutSeconds: 540, memory: "256MB" })
.https.onCall(async (data, context) => {
if (context.auth?.token.email_verified) {
const { bucketName, prefix, pathList = false, fileList = false } = data;
let list;
const options = {
autoPaginate: false,
delimiter: "",
prefix,
};
if (pathList) {
options.delimiter = "/";
list = await getList(bucketName, options);
}
if (fileList) {
const [files] = await storage.bucket(bucketName).getFiles(options);
list = files.map((file) => file.name);
}
return { list }; //returning null as it exec before callback fns finish
} else {
return {
error: { message: "Bad Request", status: "INVALID_ARGUMENT" },
};
}
});
I'm not sure if the part with fileList will work as expectedt. It looks like the API doesn't support await but only callbacks.
import * as functions from "firebase-functions";
import { GetFilesOptions, Storage } from "#google-cloud/storage";
import { globVars } from "../admin/admin";
const projectId = process.env.GCLOUD_PROJECT;
// shared global variables setup
const { keyFilename } = globVars;
// Storage set up
const storage = new Storage({
projectId,
keyFilename,
});
const getList = (bucketName: string, options: GetFilesOptions) => {
return new Promise((resolve, reject) => {
// let test: any[] = [];
let list: any[] = [];
const callback = (
_err: any,
_files: any,
nextQuery: any,
apiResponse: any
) => {
list = list.concat(apiResponse.prefixes);
console.log("list : ", list);
console.log("nextQuery : ", nextQuery);
if (nextQuery) {
storage.bucket(bucketName).getFiles(nextQuery, callback);
} else {
// prefixes = The finished array of prefixes.
resolve(list);
}
};
try {
storage.bucket(bucketName).getFiles(options, callback);
} catch (error) {
reject(error);
}
});
};
export const gcsListPath = functions
.region("europe-west2")
.runWith({ timeoutSeconds: 540, memory: "256MB" })
.https.onCall(async (data, context) => {
if (context.auth?.token.email_verified) {
const { bucketName, prefix, pathList = false, fileList = false } = data;
let list;
const options = {
autoPaginate: false,
delimiter: "",
prefix,
};
if (pathList) {
options.delimiter = "/";
list = await getList(bucketName, options);
}
if (fileList) {
const [files] = await storage.bucket(bucketName).getFiles(options);
list = files.map((file) => file.name);
}
return { list }; //returning null as it exec before callback fns finish
} else {
return {
error: { message: "Bad Request", status: "INVALID_ARGUMENT" },
};
}
});
I know this probaly is a longshot, but I was hoping for someone to point me in the right direction here.
I've made a simple peer to peer video connection, and wrapped it inside a function, so I can call it on a button click, but it's not running.
When it's not wrapped inside the "activateVideoStream" function, but just on load, it works fine. I have a feeling that the issue is around the async function, but I can't wrap my head around it.
Here is the code:
let isAlreadyCalling = false;
const remoteVideo = document.getElementById("remote-video");
const {
RTCPeerConnection,
RTCSessionDescription
} = window;
let peerConnection;
function activateVideoStream() {
const configuration = {
"iceServers": [{
"urls": "stun:stun.l.google.com:19302"
}]
};
console.log("Activate Video Stream");
peerConnection = new RTCPeerConnection(configuration);
navigator.getUserMedia({
video: true,
audio: true
},
stream => {
const localVideo = document.getElementById("local-video");
if (localVideo) {
localVideo.srcObject = stream;
}
stream.getTracks().forEach(track => peerConnection.addTrack(track, stream));
},
error => {
console.warn(error.message);
}
);
peerConnection.ontrack = function ({
streams: [stream]
}) {
if (remoteVideo) {
remoteVideo.srcObject = stream;
}
};
}
async function callUser(socketId) {
console.log("Call User");
remoteVideo.style.display = "block";
const offer = await peerConnection.createOffer();
await peerConnection.setLocalDescription(new RTCSessionDescription(offer));
socket.emit("callUser", {
offer,
to: socketId
});
}
socket.on("callMade", async data => {
console.log("Call made");
await peerConnection.setRemoteDescription(
new RTCSessionDescription(data.offer)
);
const answer = await peerConnection.createAnswer();
await peerConnection.setLocalDescription(new RTCSessionDescription(answer));
remoteVideo.style.display = "block";
socket.emit("makeAnswer", {
answer,
to: data.socket
});
});
socket.on("answerMade", async data => {
console.log("Answer made");
await peerConnection.setRemoteDescription(
new RTCSessionDescription(data.answer)
);
if (!isAlreadyCalling) {
callUser(data.socket);
isAlreadyCalling = true;
}
});
I've noticed that "peerConnection.connectionState" inside "callUser" is being set to "new", but without the function wrapped around, it's set to "complete", so that's probaly the issue.
I define an Observable like this:
const obs$ = Observable.create(...)
.publishReplay(1)
.refCount();
So that it puts a ReplaySubject(1) between my source Observable and all observers.
Since ReplaySubject has in its state the number of observers (via its observers array property), how is it possible to access the ReplaySubject from obs$?
I actually only need to know if obs$ has any observers or not. RxJS4 had a hasObservers() method on Subject, but it got removed in RxJS5. How can I achieve this with RxJS5?
Not sure about your usage but for my needs I created a custom operator that allowed me to transparently perform side-effects (similar to tap) based on the state of the refCount. It just does a pass-through subscription and duck-punches the sub/unsub. The callback gets the current refCount and the previous so that you can tell the state and direction. I like using an operator for this since I can insert it at any point in my stream. If you simply want a binary output for whether there are any subscriptions or not it could be easily modified for that.
const { Observable, Observer, interval } = rxjs;
const { publishReplay, refCount } = rxjs.operators;
const tapRefCount = (onChange) => (source) => {
let refCount = 0;
// mute the operator if it has nothing to do
if (typeof onChange !== 'function') {
return source;
}
// mute errors from side-effects
const safeOnChange = (refCount, prevRefCount) => {
try {
onChange(refCount, prevRefCount);
} catch (e) {
}
};
// spy on subscribe
return Observable.create((observer) => {
const subscription = source.subscribe(observer);
const prevRefCount = refCount;
refCount++;
safeOnChange(refCount, prevRefCount);
// spy on unsubscribe
return () => {
subscription.unsubscribe();
const prevRefCount = refCount;
refCount--;
safeOnChange(refCount, prevRefCount);
};
});
};
const source = interval(1000).pipe(
publishReplay(1),
refCount(),
tapRefCount((refCount, prevRefCount) => { console.log('refCount', refCount, prevRefCount > refCount ? 'down': 'up'); })
);
const firstSub = source.subscribe((x) => { console.log('first', x); });
let secondSub;
setTimeout(() => {
secondSub = source.subscribe((x) => { console.log('second', x); });
}, 1500);
setTimeout(() => {
firstSub.unsubscribe();
}, 4500);
setTimeout(() => {
secondSub.unsubscribe();
}, 5500);
<script src="https://unpkg.com/rxjs#rc/bundles/rxjs.umd.min.js"></script>
The typescript version:
import { Observable } from 'rxjs/Observable';
import { Observer } from 'rxjs/Observer';
export const tapRefCount = (
onChange: (refCount: number, prevRefCount: number) => void
) => <T>(source: Observable<T>): Observable<T> => {
let refCount = 0;
// mute the operator if it has nothing to do
if (typeof onChange !== 'function') {
return source;
}
// mute errors from side-effects
const safeOnChange = (refCount, prevRefCount) => {
try {
onChange(refCount, prevRefCount);
} catch (e) {
}
};
// spy on subscribe
return Observable.create((observer: Observer<T>) => {
const subscription = source.subscribe(observer);
const prevRefCount = refCount;
refCount++;
safeOnChange(refCount, prevRefCount);
// spy on unsubscribe
return () => {
subscription.unsubscribe();
const prevRefCount = refCount;
refCount--;
safeOnChange(refCount, prevRefCount);
};
}) as Observable<T>;
};
The Subject class has a public property called observers (see https://github.com/ReactiveX/rxjs/blob/5.5.10/src/Subject.ts#L28)
So you can use just:
const s = new Subject();
...
if (s.observers.length > 0) {
// whatever
}
Be aware that refCount returns an Observable so you won't be able to do what I mentioned above. However, you can provide your own Subject instance to publishReplay as the third argument and use s.observers on that, see http://reactivex.io/rxjs/class/es6/Observable.js~Observable.html#instance-method-publishReplay
I have written integration tests for graphql-js subscriptions, which are showing weird behavior.
My graphq-js subscription works perfectly in GraphiQL. But when the same subscriptions is called from unit test, it fails.
Ggraphql-Js object, with resolve function and subscribe function
return {
type: outputType,
args: {
input: {type: new GraphQLNonNull(inputType)},
},
resolve(payload, args, context, info) {
const clientSubscriptionId = (payload) ? payload.subscriptionId : null;
const object = (payload) ? payload.object : null;
var where = null;
var type = null;
var target = null;
if (object) {
where = (payload) ? payload.object.where : null;
type = (payload) ? payload.object.type : null;
target = (payload) ? payload.object.target : null;
}
return Promise.resolve(subscribeAndGetPayload(payload, args, context, info))
.then(payload => ({
clientSubscriptionId, where, type, target, object: payload.data,
}));
},
subscribe: withFilter(
() => pubSub.asyncIterator(modelName),
(payload, variables, context, info) => {
const subscriptionPayload = {
clientSubscriptionId: variables.input.clientSubscriptionId,
remove: variables.input.remove,
create: variables.input.create,
update: variables.input.update,
opts: variables.input.options,
};
subscriptionPayload.model = model;
try {
pubSub.subscribe(info.fieldName, null, subscriptionPayload);
} catch (ex) {
console.log(ex);
}
return true;
}
),
};
Subscription query
subscription {
Customer(input: {create: true, clientSubscriptionId: 112}) {
customer {
id
name
age
}
}
}
Mutation query
mutation {
Customer {
CustomerCreate (input:{data:{name:"Atif 50", age:50}}) {
obj {
id
name
}
}
}
}
Integration Test
'use strict';
const ws = require('ws');
const { SubscriptionClient } = require('subscriptions-transport-ws');
const { ApolloClient } = require('apollo-client');
const { HttpLink } = require('apollo-link-http');
const { InMemoryCache } = require('apollo-cache-inmemory');
const Promise = require('bluebird');
const expect = require('chai').expect;
const chai = require('chai').use(require('chai-http'));
const server = require('../server/server');
const gql = require('graphql-tag');
let apollo;
let networkInterface;
const GRAPHQL_ENDPOINT = 'ws://localhost:5000/subscriptions';
describe('Subscription', () => {
before(async () => {
networkInterface = new SubscriptionClient(
GRAPHQL_ENDPOINT, { reconnect: true }, ws);
apollo = new ApolloClient({
networkInterface ,
link: new HttpLink({ uri: 'http://localhost:3000/graphql' }),
cache: new InMemoryCache()
});
});
after(done => {
networkInterface.close() ;
});
it('subscription', async () => {
const client = () => apollo;
// SUBSCRIBE and make a promise
const subscriptionPromise = new Promise((resolve, reject) => {
client().subscribe({
query: gql`
subscription {
Customer(input: {create: true,
clientSubscriptionId: 112,
options: {where: {age: 50}}}) {
customer {
name
}
}
}
`
}).subscribe({
next: resolve,
error: reject
});
});
let execGraphQL;
// MUTATE
await execGraphQL(
`mutation {
Customer {
CustomerCreate (input:{data:{name:"Atif 21", age:50}}) {
obj {
id
name
}
}
}
}`
);
// ASSERT SUBSCRIPTION RECEIVED EVENT
expect(await subscriptionPromise).to.deep.equal({});
});
});
Issue Here
When test in run, payload in the resolve function contains global data, where as it should contain the subscription payload. So the code breaks.