when i uncomment the axios call the ret variable returns {}, but when i comment the axios call it returns a filled object with the certificate information. I just don't know why the axios call interfere in https get
import { HttpService } from '#nestjs/axios';
import { Injectable } from '#nestjs/common';
import * as https from 'https';
import { DetailedPeerCertificate, TLSSocket } from 'tls';
#Injectable()
export class CertificateValidationService {
constructor(private readonly httpService: HttpService) { }
certificateInfoCallBAck(domain: string):Promise<DetailedPeerCertificate> {
return new Promise((resolve, reject)=>{
const options = {
hostname: domain,
port: 443,
path: '/',
method: 'GET'
}
const req = https.get(options, (res) => {
req.end()
resolve(((res.socket) as TLSSocket).getPeerCertificate(true));
});
});
}
async getValidationDateFromCertificate(domain: string) {
try {
//const test = await this.httpService.axiosRef.get(`https://${domain}`);
let ret = await this.certificateInfoCallBAck(domain);
console.log(ret);
const validFrom = (typeof ret === 'object' && 'valid_from' in ret) ? ret['valid_from']:'';
const validTo = (typeof ret === 'object' && 'valid_to' in ret) ? ret['valid_to']:'';
return {validFrom,validTo,certificate:true}
} catch (error) {
return {info: "domain without certificate", certificate:false}
}
}
}
the return with axios call
the return without axios call
Related
I'm pretty new to graphql and apollo, and I'm working on a project where I need to be able to use context to get a variable for my query. The issue that I'm having is that my context.user is coming back null because my req.headers.authorization is undefined. I'm unsure as to why this is happening as in my frontend
const authLink = setContext(async (_, { headers }) => {
const token = await AsyncStorage.getItem('token');
try {
if (token !== null) {
return {
headers: {
...headers,
authorization: `Bearer ${token}` || null,
}
}
}
}
catch (error) {
throw error;
}
});
my token is not null and when I tested const auth: `Bearer ${token}` || null console.log(auth) after if (token !== null) { it came back with Bearer and my token value. Does anyone know why this is happening? I would really appreciate any help or advice. Thank you!
rest of frontend client.js
import { ApolloClient, split, createHttpLink, HttpLink, InMemoryCache } from '#apollo/client';
import { getMainDefinition } from '#apollo/client/utilities';
import { setContext } from "#apollo/client/link/context";
import AsyncStorage from '#react-native-async-storage/async-storage';
import { GraphQLWsLink } from "#apollo/client/link/subscriptions";
import { createClient } from "graphql-ws";
const wslink = new GraphQLWsLink(
createClient({
url: "ws://localhost:4000/subscriptions",
/* connectionParams: {
authToken: user.authToken,
},*/
}),
);
const httpLink = new HttpLink({
uri: 'http://localhost:4000/graphql',
});
const authLink = setContext(async (_, { headers }) => {
const token = await AsyncStorage.getItem('token');
try {
if (token !== null) {
return {
headers: {
...headers,
authorization: `Bearer ${token}` || null,
}
}
}
}
catch (error) {
throw error;
}
});
const splitLink = split(
({ query }) => {
const definition = getMainDefinition(query);
return (
definition.kind === 'OperationDefinition' &&
definition.operation === 'subscription'
);
},
wslink,
authLink.concat(httpLink)
);
export const client = new ApolloClient({
link: splitLink,
cache: new InMemoryCache(),
});
backend index.js
import express from 'express';
import mongoose from 'mongoose';
import WebSocket from 'ws';
import { createServer } from 'http';
import { ApolloServer } from 'apollo-server-express';
import { makeExecutableSchema } from '#graphql-tools/schema';
import {
ApolloServerPluginDrainHttpServer,
ApolloServerPluginLandingPageLocalDefault,
} from "apollo-server-core";
import { useServer } from 'graphql-ws/lib/use/ws';
import constants from './config/constants.js';
import typeDefs from './graphql/schema.js';
import resolvers from './graphql/resolvers/index.js';
import { decodeToken } from './services/auth.js';
const app = express();
const httpServer = createServer(app);
const schema = makeExecutableSchema({ typeDefs, resolvers });
const server = new ApolloServer({
schema,
context: ({ req }) => ({
user: req.user
}),
csrfPrevention: true,
cache: "bounded",
plugins: [
ApolloServerPluginDrainHttpServer({ httpServer }),
{
async serverWillStart() {
return {
async drainServer() {
await serverCleanup.dispose();
},
};
},
},
ApolloServerPluginLandingPageLocalDefault({ embed: true }),
],
});
const wsServer = new WebSocket.Server({
server: httpServer,
path: '/graphql',
});
const serverCleanup = useServer({ schema }, wsServer);
async function auth(req, res, next) {
try {
const token = req.headers.authorization;
//token is undefined Why???
if (token != null) {
const user = await decodeToken(token);
req.user = user; // eslint-disable-line
}
else {
req.user = null; // eslint-disable-line
}
next();
} catch (error) {
throw error;
}
}
app.use(auth);
await server.start();
server.applyMiddleware({ app });
mongoose
.connect(process.env.MONGODB_URL || 'mongodb://localhost/AMO', {
useNewUrlParser: true,
useUnifiedTopology: true
})
.then(() => {
console.log("MongoDB Connected");
return httpServer.listen(4000, () => {
console.log(`Server ready at http://localhost:4000/graphql`);
});
})
I've been working on a way to set up authentication and authorization for my NextJS app, so far it was pretty easy but I've hit a wall.
I have a value that lives and is watched on a context, and I have a HOC that I need for my NextJS app to be able to use hooks with GraphQl, the issues is that I don't think I can call the context and use the value from a HOC, since it is simply not allowed.
Is there a way I can dynamically change the value on the HOC so that when the user logs in, I can then update the HOC to have the proper access token?
Some context: the user is first anonymous, whenever he/she logs in, I get an auth state change from Firebase from which I can extract the access token and add it to any future requests. But the point of the hoc is to provide next with full Graphql capabilities, the thing is that I need that hoc go listen for changes on a context state.
This is the Connection Builder:
import {
ApolloClient,
InMemoryCache,
HttpLink,
NormalizedCacheObject,
} from "#apollo/client";
import { WebSocketLink } from "#apollo/client/link/ws";
import { SubscriptionClient } from "subscriptions-transport-ws";
const connectionString = process.env.HASURA_GRAPHQL_API_URL || "";
const createHttpLink = (authState: string, authToken: string) => {
const isIn = authState === "in";
const httpLink = new HttpLink({
uri: `https${connectionString}`,
headers: {
// "X-hasura-admin-secret": `https${connectionString}`,
lang: "en",
"content-type": "application/json",
Authorization: isIn && `Bearer ${authToken}`,
},
});
return httpLink;
};
const createWSLink = (authState: string, authToken: string) => {
const isIn = authState === "in";
return new WebSocketLink(
new SubscriptionClient(`wss${connectionString}`, {
lazy: true,
reconnect: true,
connectionParams: async () => {
return {
headers: {
// "X-hasura-admin-secret": process.env.HASURA_GRAPHQL_ADMIN_SECRET,
lang: "en",
"content-type": "application/json",
Authorization: isIn && `Bearer ${authToken}`,
},
};
},
})
);
};
export default function createApolloClient(
initialState: NormalizedCacheObject,
authState: string,
authToken: string
) {
const ssrMode = typeof window === "undefined";
let link;
if (ssrMode) {
link = createHttpLink(authState, authToken);
} else {
link = createWSLink(authState, authToken);
}
return new ApolloClient({
ssrMode,
link,
cache: new InMemoryCache().restore(initialState),
});
}
This is the context:
import { useState, useEffect, createContext, useContext } from "react";
import { getDatabase, ref, set, onValue } from "firebase/database";
import { useFirebase } from "./use-firebase";
import { useGetUser } from "../hooks/use-get-user";
import { getUser_Users_by_pk } from "../types/generated/getUser";
import { getApp } from "firebase/app";
const FirebaseAuthContext = createContext<FirebaseAuthContextProps>({
authUser: null,
authState: "",
authToken: null,
currentUser: undefined,
loading: true,
login: () => Promise.resolve(undefined),
registerUser: () => Promise.resolve(undefined),
loginWithGoogle: () => Promise.resolve(undefined),
loginWithMicrosoft: () => Promise.resolve(undefined),
});
export const FirebaseAuthContextProvider: React.FC = ({ children }) => {
const [loading, setLoading] = useState<boolean>(true);
const [authUser, setAuthUser] = useState<User | null>(null);
const { data } = useGetUser(authUser?.uid || "");
const [authState, setAuthState] = useState("loading");
const [authToken, setAuthToken] = useState<string | null>(null);
const currentUser = data?.Users_by_pk;
// ...
const authStateChanged = async (user: User | null) => {
if (!user) {
setAuthUser(null);
setLoading(false);
setAuthState("out");
return;
}
const token = await user.getIdToken();
const idTokenResult = await user.getIdTokenResult();
const hasuraClaim = idTokenResult.claims["https://hasura.io/jwt/claims"];
if (hasuraClaim) {
setAuthState("in");
setAuthToken(token);
setAuthUser(user);
} else {
// Check if refresh is required.
const metadataRef = ref(
getDatabase(getApp()),
"metadata/" + user.uid + "/refreshTime"
);
onValue(metadataRef, async (data) => {
if (!data.exists) return;
const token = await user.getIdToken(true);
setAuthState("in");
setAuthUser(user);
setAuthToken(token);
});
}
};
useEffect(() => {
const unsubscribe = getAuth().onAuthStateChanged(authStateChanged);
return () => unsubscribe();
}, []);
const contextValue: FirebaseAuthContextProps = {
authUser,
authState,
authToken,
currentUser,
loading,
login,
registerUser,
loginWithGoogle,
loginWithMicrosoft,
};
return (
<FirebaseAuthContext.Provider value={contextValue}>
{children}
</FirebaseAuthContext.Provider>
);
};
export const useFirebaseAuth = () =>
useContext<FirebaseAuthContextProps>(FirebaseAuthContext);
This is the HOC:
export const withApollo =
({ ssr = true } = {}) =>
(PageComponent: NextComponentType<NextPageContext, any, {}>) => {
const WithApollo = ({
apolloClient,
apolloState,
...pageProps
}: {
apolloClient: ApolloClient<NormalizedCacheObject>;
apolloState: NormalizedCacheObject;
}) => {
let client;
if (apolloClient) {
// Happens on: getDataFromTree & next.js ssr
client = apolloClient;
} else {
// Happens on: next.js csr
// client = initApolloClient(apolloState, undefined);
client = initApolloClient(apolloState);
}
return (
<ApolloProvider client={client}>
<PageComponent {...pageProps} />
</ApolloProvider>
);
};
const initApolloClient = (initialState: NormalizedCacheObject) => {
// Make sure to create a new client for every server-side request so that data
// isn't shared between connections (which would be bad)
if (typeof window === "undefined") {
return createApolloClient(initialState, "", "");
}
// Reuse client on the client-side
if (!globalApolloClient) {
globalApolloClient = createApolloClient(initialState, "", "");
}
return globalApolloClient;
};
I fixed it by using this whenever I have an update on the token:
import { setContext } from "#apollo/client/link/context";
const authStateChanged = async (user: User | null) => {
if (!user) {
setAuthUser(null);
setLoading(false);
setAuthState("out");
return;
}
setAuthUser(user);
const token = await user.getIdToken();
const idTokenResult = await user.getIdTokenResult();
const hasuraClaim = idTokenResult.claims["hasura"];
if (hasuraClaim) {
setAuthState("in");
setAuthToken(token);
// THIS IS THE FIX
setContext(() => ({
headers: { Authorization: `Bearer ${token}` },
}));
} else {
// Check if refresh is required.
const metadataRef = ref(
getDatabase(getApp()),
"metadata/" + user.uid + "/refreshTime"
);
onValue(metadataRef, async (data) => {
if (!data.exists) return;
const token = await user.getIdToken(true);
setAuthState("in");
setAuthToken(token);
// THIS IS THE FIX
setContext(() => ({
headers: { Authorization: `Bearer ${token}` },
}));
});
}
};
I have built a test app using nestjs + Sequelize ORM + docker database (as of now local). As per documentation, I am using umzug library and AWS Lambda SAM template and triggering lambda handler. Below is the code for it. Connection Pooling is implemented to reuse existing sequelize connection. Below is the lambdaEntry.ts file where I trigger umzug.up() function. It is triggering but not migrating files.
When done from command prompt node migrate up it works correctly. I am testing using sam invoke command to test it.
require('ts-node/register');
import { Server } from 'http';
import { NestFactory } from '#nestjs/core';
import { Context } from 'aws-lambda';
import * as serverlessExpress from 'aws-serverless-express';
import * as express from 'express';
import { ExpressAdapter } from '#nestjs/platform-express';
import { eventContext } from 'aws-serverless-express/middleware';
import { AppModule } from './app.module';
import sharedBootstrap from './sharedBootstrap';
const { Sequelize } = require('sequelize');
const { Umzug, SequelizeStorage } = require('umzug');
import configuration from '.././config/config';
const fs = require('fs');
let lambdaProxy: Server;
let sequelize = null;
async function bootstrap() {
const expressServer = express();
const nestApp = await NestFactory.create(
AppModule,
new ExpressAdapter(expressServer),
);
nestApp.use(eventContext());
sharedBootstrap(nestApp);
await nestApp.init();
return serverlessExpress.createServer(expressServer);
}
export const handler = (event: any, context: Context) => {
if (!lambdaProxy) {
bootstrap().then((server) => {
lambdaProxy = server;
serverlessExpress.proxy(lambdaProxy, event, context);
(async () => {
if (!sequelize) {
console.log('New connection::');
sequelize = await loadSequelize();
} else {
sequelize.connectionManager.initPools();
if (sequelize.connectionManager.hasOwnProperty('getConnection')) {
delete sequelize.connectionManager.getConnection;
}
}
try {
console.log('MIGRATOR::');
const umzug = new Umzug({
migrations: { glob: 'src/migrations/*.ts' },
context: sequelize.getQueryInterface(),
storage: new SequelizeStorage({ sequelize }),
logger: console,
});
await umzug
.pending()
.then((migrations: any) => {
console.log('pending ? : ', JSON.stringify(migrations));
//test for file exists.
for (const migration of migrations) {
try {
if (fs.existsSync(migration.path)) {
console.log('file exists');
}
} catch (err) {
console.log('file does not exists');
console.error(err);
}
}
async () => {
//BELOW FUNCTION IS TRIGGERING BUT NOT GETTING MIGRATION LOADED.
await umzug.up();
};
})
.catch((e: any) => console.log('error2 ? ', e));
} finally {
await sequelize.connectionManager.close();
}
})();
});
} else {
serverlessExpress.proxy(lambdaProxy, event, context);
}
};
async function loadSequelize() {
const sequelize = new Sequelize(
configuration.database,
configuration.username,
configuration.password,
{
dialect: 'mysql',
host: configuration.host,
port: Number(configuration.port),
pool: {
max: 2,
min: 0,
idle: 0,
acquire: 3000,
evict: 600,
},
},
);
await sequelize.authenticate();
return sequelize;
}
I am able to solve the issue after lot of tries. I seperated out the sequelize connection code and called it from app side and triggered from lambdaentry
lambdaEntry.js file.
async function bootstrap(uuid = null) {
console.log('Calling bootstrap');
const expressServer = express();
const nestApp = await NestFactory.create(
AppModule,
new ExpressAdapter(expressServer),
);
nestApp.use(eventContext());
sharedBootstrap(nestApp);
await nestApp.init();
try {
// Write a function in Service (ex: purhaslistservice) and trigger the service with umzug up from here.
const migrateResult1 = await nestApp.get(PurchaseListService).migrate('down');
console.log(migrateResult1);
const migrateResult2 = await nestApp.get(PurchaseListService).migrate('up');
console.log(migrateResult2);
} catch (err) {
throw err;
}
return serverlessExpress.createServer(expressServer);
}
export const handler = (event: any, context: Context) => {
if (!lambdaProxy) {
bootstrap(uuid).then((server) => {
lambdaProxy = server;
serverlessExpress.proxy(lambdaProxy, event, context);
});
} else {
serverlessExpress.proxy(lambdaProxy, event, context);
}
};
/code/src/purchaselist/purchaselist.service.ts
async migrate(id: string): Promise<any> {
console.log('migrate script triggered', id);
const sequelize = PurchaseListItem.sequelize;
const umzug = new Umzug({
migrations: { glob: 'src/migrations/*.{ts,js}' },
context: sequelize.getQueryInterface(),
storage: new SequelizeStorage({ sequelize }),
logger: console,
});
let consoleDisplay = 'Umzug LOGS:::<br/>';
switch (id) {
default:
case 'up':
await umzug.up().then(function (migrations) {
console.log('Umzug Migrations UP::<br/>', migrations);
consoleDisplay +=
'Umzug Migrations UP::<br/>' + JSON.stringify(migrations);
});
break;
}
return consoleDisplay;
}
I struggle to upload .csv file to nestjs-graphql-fastify server. Tried following code:
#Mutation(() => Boolean)
async createUsers(
#Args({ name: 'file', type: () => GraphQLUpload })
{ createReadStream, filename }: FileUpload,
): Promise<boolean> {
try {
// backend logic . . .
} catch {
return false;
}
return true;
}
but all I get when testing with postman is this response:
{
"statusCode": 415,
"code": "FST_ERR_CTP_INVALID_MEDIA_TYPE",
"error": "Unsupported Media Type",
"message": "Unsupported Media Type: multipart/form-data; boundary=--------------------------511769018912715357993837"
}
Developing with code-first approach.
Update: Tried to use fastify-multipart but issue still remains. What has changed is response in postman:
POST body missing, invalid Content-Type, or JSON object has no keys.
Found some answer's on Nestjs discord channel.
You had to do following changes:
main.ts
async function bootstrap() {
const adapter = new FastifyAdapter();
const fastify = adapter.getInstance();
fastify.addContentTypeParser('multipart', (request, done) => {
request.isMultipart = true;
done();
});
fastify.addHook('preValidation', async function (request: any, reply) {
if (!request.raw.isMultipart) {
return;
}
request.body = await processRequest(request.raw, reply.raw);
});
const app = await NestFactory.create<NestFastifyApplication>(
AppModule,
adapter,
);
await app.listen(apiServerPort, apiServerHost);
}
bootstrap();
upload.scalar.ts
import { Scalar } from '#nestjs/graphql';
import { GraphQLUpload } from 'graphql-upload';
#Scalar('Upload')
export class UploadGraphQLScalar {
protected parseValue(value) {
return GraphQLUpload.parseValue(value);
}
protected serialize(value) {
return GraphQLUpload.serialize(value);
}
protected parseLiteral(ast) {
return GraphQLUpload.parseLiteral(ast, ast.value);
}
}
users.resolver.ts
#Mutation(() => CreateUsersOutput, {name: 'createUsers'})
async createUsers(
#Args('input', new ValidationPipe()) input: CreateUsersInput,
#ReqUser() reqUser: RequestUser,
): Promise<CreateUsersOutput> {
return this.usersService.createUsers(input, reqUser);
}
create-shared.input.ts
#InputType()
export class DataObject {
#Field(() => UploadGraphQLScalar)
#Exclude()
public readonly csv?: Promise<FileUpload>;
}
#InputType()
#ArgsType()
export class CreateUsersInput {
#Field(() => DataObject)
public readonly data: DataObject;
}
Also, I want to mention you should not use global validation pipes (in my case they made files unreadable)
// app.useGlobalPipes(new ValidationPipe({ transform: true }));
You could use graphql-python/gql to try to upload a file:
from gql import Client, gql
from gql.transport.aiohttp import AIOHTTPTransport
transport = AIOHTTPTransport(url='YOUR_URL')
client = Client(transport=transport)
query = gql('''
mutation($file: Upload!) {
createUsers(file: $file)
}
''')
with open("YOUR_FILE_PATH", "rb") as f:
params = {"file": f}
result = client.execute(
query, variable_values=params, upload_files=True
)
print(result)
If you activate logging, you can see some message exchanged between the client and the backend.
I have a angular 5 app with the rxjs WebsocketSubject sending jsonrpc messages.
This is my sendRequest function
sendRequest(request: Request): Promise<Response> {
console.log(request);
this.socket.next(JSON.stringify(request));
return this.onResponse().filter((response: Response) => {
return response.id === request.id;
}).first().toPromise().then((response) => {
console.log(response);
if (response.error) {
console.log('error');
throw new RpcError(response.error);
}
return response;
});
}
I am using the first() operator to complete this filter subscription. But onResponse() comes directly from my WebsocketSubject and this will then be completed.
Are there any methods for decoupling the original subject?
Or should I create a new Observale.create(...)?
What happens with the written .filter function. Does it last anywhere or do I have to remove it anywhere preventing ever lasting filter calls?
Edit 1
Also using this does not help.
sendRequest(request: Request): Promise<Response> {
console.log(request);
this.socket.next(JSON.stringify(request));
return new Promise<Response>((resolve, reject) => {
const responseSubscription = this.onResponse().filter((response: Response) => {
console.log('filter');
return response.id === request.id;
}).subscribe((response: Response) => {
// responseSubscription.unsubscribe();
resolve(response);
});
});
}
If I execute the unsubscribe the whole websocketSubject is closed. Not doing so logs 'filter' on time more per request !!
Edit 2
Here is the whole websocketService i have written
import {Injectable} from "#angular/core";
import {WebSocketSubject, WebSocketSubjectConfig} from "rxjs/observable/dom/WebSocketSubject";
import {MessageFactory, Notification, Request, Response, RpcError} from "../misc/jsonrpc";
import {ReplaySubject} from "rxjs/ReplaySubject";
import {Observable} from "rxjs/Observable";
import 'rxjs/add/operator/toPromise';
import 'rxjs/add/operator/filter';
import 'rxjs/add/operator/first';
import 'rxjs/add/observable/from';
export enum ConnectionState {
CONNECTED = "Connected",
CONNECTING = "Connecting",
CLOSING = "Closing",
DISCONNECTED = "Disconnected"
}
#Injectable()
export class WebsocketService {
private connectionState = new ReplaySubject<ConnectionState>(1);
private socket: WebSocketSubject<ArrayBuffer | Object>;
private config: WebSocketSubjectConfig;
constructor() {
console.log('ctor');
const protocol = location.protocol === 'https' ? 'wss' : 'ws';
const host = location.hostname;
const port = 3000; // location.port;
this.config = {
binaryType: "arraybuffer",
url: `${protocol}://${host}:${port}`,
openObserver: {
next: () => this.connectionState.next(ConnectionState.CONNECTED)
},
closingObserver: {
next: () => this.connectionState.next(ConnectionState.CLOSING)
},
closeObserver: {
next: () => this.connectionState.next(ConnectionState.DISCONNECTED)
},
resultSelector: (e: MessageEvent) => {
try {
if (e.data instanceof ArrayBuffer) {
return e.data;
} else {
return JSON.parse(e.data);
}
} catch (e) {
console.error(e);
return null;
}
}
};
this.connectionState.next(ConnectionState.CONNECTING);
this.socket = new WebSocketSubject(this.config);
this.connectionState.subscribe((state) => {
console.log(`WS state ${state}`);
});
}
onBinaryData(): Observable<ArrayBuffer> {
return this.socket.filter((message: any) => {
return message instanceof ArrayBuffer;
});
}
onMessageData(): Observable<Object> {
return this.socket.filter((message: any) => {
return !(message instanceof ArrayBuffer);
});
}
onResponse(): Observable<Response> {
return this.onMessageData().filter((message) => {
return MessageFactory.from(message).isResponse();
}).map((message): Response => {
return MessageFactory.from(message).toResponse();
});
}
sendRequest(request: Request): Promise<Response> {
console.log(request);
this.socket.next(JSON.stringify(request));
return new Promise<Response>((resolve, reject) => {
const responseSubscription = this.onResponse().filter((response: Response) => {
console.log('filter');
return response.id === request.id;
}).subscribe((response: Response) => {
responseSubscription.unsubscribe();
resolve(response);
});
});
}
sendNotification(notification: Notification): void {
this.socket.next(JSON.stringify(notification));
}
}
And the result in my log
Using Angular 5.0.2
websocket.service.ts:27 ctor
websocket.service.ts:69 WS state Connecting
core.js:3565 Angular is running in the development mode. Call enableProdMode() to enable the production mode.
websocket.service.ts:96 Request {jsonrpc: "2.0", id: "b042005c-5fbf-5ffc-fbd1-df68fae5882e", method: "appointment_list_get", params: undefined}
websocket.service.ts:69 WS state Connected
websocket.service.ts:103 filter
websocket.service.ts:69 WS state Disconnected
I need to find a way decoupling my filter from the original stream somehow.
This is working.
The key was to decouple the message handling from the underlaying websocketSubject.
import {Injectable} from "#angular/core";
import {WebSocketSubject, WebSocketSubjectConfig} from "rxjs/observable/dom/WebSocketSubject";
import {MessageFactory, Notification, Request, Response, RpcError} from "../misc/jsonrpc";
import {ReplaySubject} from "rxjs/ReplaySubject";
import {Observable} from "rxjs/Observable";
import 'rxjs/add/operator/toPromise';
import 'rxjs/add/operator/filter';
import 'rxjs/add/operator/first';
import 'rxjs/add/observable/from';
import {Subject} from "rxjs/Subject";
export enum ConnectionState {
CONNECTED = "Connected",
CONNECTING = "Connecting",
CLOSING = "Closing",
DISCONNECTED = "Disconnected"
}
#Injectable()
export class WebsocketService {
private connectionState = new ReplaySubject<ConnectionState>(1);
private socket: WebSocketSubject<ArrayBuffer | Object>;
private config: WebSocketSubjectConfig;
private messageObserver = new Subject<MessageFactory>();
private binaryObserver = new Subject<ArrayBuffer>();
constructor() {
const protocol = location.protocol === 'https' ? 'wss' : 'ws';
const host = location.hostname;
const port = 3000; // location.port;
this.config = {
binaryType: "arraybuffer",
url: `${protocol}://${host}:${port}`,
openObserver: {
next: () => this.connectionState.next(ConnectionState.CONNECTED)
},
closingObserver: {
next: () => this.connectionState.next(ConnectionState.CLOSING)
},
closeObserver: {
next: () => this.connectionState.next(ConnectionState.DISCONNECTED)
},
resultSelector: (e: MessageEvent) => {
try {
if (e.data instanceof ArrayBuffer) {
return e.data;
} else {
return JSON.parse(e.data);
}
} catch (e) {
console.error(e);
return null;
}
}
};
this.connectionState.next(ConnectionState.CONNECTING);
this.socket = new WebSocketSubject(this.config);
this.socket.filter((message: any) => {
return message instanceof ArrayBuffer;
}).subscribe((message: ArrayBuffer) => {
this.binaryObserver.next(message);
});
this.socket.filter((message: any) => {
return !(message instanceof ArrayBuffer);
}).subscribe((message: ArrayBuffer) => {
this.messageObserver.next(MessageFactory.from(message));
});
this.connectionState.subscribe((state) => {
console.log(`WS state ${state}`);
});
}
onResponse(): Observable<Response> {
return this.messageObserver.filter((message: MessageFactory) => {
return message.isResponse();
}).map((message: MessageFactory): Response => {
return message.toResponse();
});
}
sendRequest(request: Request): Promise<Response> {
console.log(request);
this.socket.next(JSON.stringify(request));
return this.onResponse().filter((response: Response) => {
return request.id === response.id;
}).first().toPromise().then((response) => {
console.log(response);
if (response.error) {
console.log('error');
throw new RpcError(response.error);
}
return response;
});
}
sendNotification(notification: Notification): void {
this.socket.next(JSON.stringify(notification));
}
}