Convert Webrtc track stream to URL (RTSP/UDP/RTP/Http) in Video tag - ffmpeg

I am new in WebRTC and i have done client/server connection, from client i choose WebCam and post stream to server using Track and on Server side i am getting that track and assign track stream to video source. Everything till now fine but problem is now i include AI(Artificial Intelligence) and now i want to convert my track stream to URL maybe UDP/RTSP/RTP etc. So AI will use that URL for object detection. I don't know how we can convert track stream to URL.
Although there is a couple of packages like https://ffmpeg.org/ and RTP to Webrtc etc, i am using Nodejs, Socket.io and Webrtc, below you can check my client and server side code for getting and posting stream, i am following thi github code https://github.com/Basscord/webrtc-video-broadcast.
Now my main concern is to make track as a URL for video tag, is it possible or not or please suggest, any help would be appreciated.
Server.js
This is nodejs server code
const express = require("express");
const app = express();
let broadcaster;
const port = 4000;
const http = require("http");
const server = http.createServer(app);
const io = require("socket.io")(server);
app.use(express.static(__dirname + "/public"));
io.sockets.on("error", e => console.log(e));
io.sockets.on("connection", socket => {
socket.on("broadcaster", () => {
broadcaster = socket.id;
socket.broadcast.emit("broadcaster");
});
socket.on("watcher", () => {
socket.to(broadcaster).emit("watcher", socket.id);
});
socket.on("offer", (id, message) => {
socket.to(id).emit("offer", socket.id, message);
});
socket.on("answer", (id, message) => {
socket.to(id).emit("answer", socket.id, message);
});
socket.on("candidate", (id, message) => {
socket.to(id).emit("candidate", socket.id, message);
});
socket.on("disconnect", () => {
socket.to(broadcaster).emit("disconnectPeer", socket.id);
});
});
server.listen(port, () => console.log(`Server is running on port ${port}`));
Broadcast.js
This is the code for emit stream(track)
const peerConnections = {};
const config = {
iceServers: [
{
urls: ["stun:stun.l.google.com:19302"]
}
]
};
const socket = io.connect(window.location.origin);
socket.on("answer", (id, description) => {
peerConnections[id].setRemoteDescription(description);
});
socket.on("watcher", id => {
const peerConnection = new RTCPeerConnection(config);
peerConnections[id] = peerConnection;
let stream = videoElement.srcObject;
stream.getTracks().forEach(track => peerConnection.addTrack(track, stream));
peerConnection.onicecandidate = event => {
if (event.candidate) {
socket.emit("candidate", id, event.candidate);
}
};
peerConnection
.createOffer()
.then(sdp => peerConnection.setLocalDescription(sdp))
.then(() => {
socket.emit("offer", id, peerConnection.localDescription);
});
});
socket.on("candidate", (id, candidate) => {
peerConnections[id].addIceCandidate(new RTCIceCandidate(candidate));
});
socket.on("disconnectPeer", id => {
peerConnections[id].close();
delete peerConnections[id];
});
window.onunload = window.onbeforeunload = () => {
socket.close();
};
// Get camera and microphone
const videoElement = document.querySelector("video");
const audioSelect = document.querySelector("select#audioSource");
const videoSelect = document.querySelector("select#videoSource");
audioSelect.onchange = getStream;
videoSelect.onchange = getStream;
getStream()
.then(getDevices)
.then(gotDevices);
function getDevices() {
return navigator.mediaDevices.enumerateDevices();
}
function gotDevices(deviceInfos) {
window.deviceInfos = deviceInfos;
for (const deviceInfo of deviceInfos) {
const option = document.createElement("option");
option.value = deviceInfo.deviceId;
if (deviceInfo.kind === "audioinput") {
option.text = deviceInfo.label || `Microphone ${audioSelect.length + 1}`;
audioSelect.appendChild(option);
} else if (deviceInfo.kind === "videoinput") {
option.text = deviceInfo.label || `Camera ${videoSelect.length + 1}`;
videoSelect.appendChild(option);
}
}
}
function getStream() {
if (window.stream) {
window.stream.getTracks().forEach(track => {
track.stop();
});
}
const audioSource = audioSelect.value;
const videoSource = videoSelect.value;
const constraints = {
audio: { deviceId: audioSource ? { exact: audioSource } : undefined },
video: { deviceId: videoSource ? { exact: videoSource } : undefined }
};
return navigator.mediaDevices
.getUserMedia(constraints)
.then(gotStream)
.catch(handleError);
}
function gotStream(stream) {
window.stream = stream;
audioSelect.selectedIndex = [...audioSelect.options].findIndex(
option => option.text === stream.getAudioTracks()[0].label
);
videoSelect.selectedIndex = [...videoSelect.options].findIndex(
option => option.text === stream.getVideoTracks()[0].label
);
videoElement.srcObject = stream;
socket.emit("broadcaster");
}
function handleError(error) {
console.error("Error: ", error);
}
RemoteServer.js
This code is getting track and assign to video tag
let peerConnection;
const config = {
iceServers: [
{
urls: ["stun:stun.l.google.com:19302"]
}
]
};
const socket = io.connect(window.location.origin);
const video = document.querySelector("video");
socket.on("offer", (id, description) => {
peerConnection = new RTCPeerConnection(config);
peerConnection
.setRemoteDescription(description)
.then(() => peerConnection.createAnswer())
.then(sdp => peerConnection.setLocalDescription(sdp))
.then(() => {
socket.emit("answer", id, peerConnection.localDescription);
});
peerConnection.ontrack = event => {
video.srcObject = event.streams[0];
};
peerConnection.onicecandidate = event => {
if (event.candidate) {
socket.emit("candidate", id, event.candidate);
}
};
});
socket.on("candidate", (id, candidate) => {
peerConnection
.addIceCandidate(new RTCIceCandidate(candidate))
.catch(e => console.error(e));
});
socket.on("connect", () => {
socket.emit("watcher");
});
socket.on("broadcaster", () => {
socket.emit("watcher");
});
socket.on("disconnectPeer", () => {
peerConnection.close();
});
window.onunload = window.onbeforeunload = () => {
socket.close();
};

rtp-to-webrtc does exactly what you want.
Unfortunately you will need to run some sort of server to make this happen, it can’t all be in the browser. You could also upload via other protocols (captured via MediaRecorder) if you don’t want to use WebRTC.

Related

Deno oak websocket must have a method that returns an async iterator

I am trying to build up a WebSocket with oak (not the native deno one).
The following code is how I build the server.
import {Application, Router, Context, send } from "https://deno.land/x/oak#v10.6.0/mod.ts";
const runWS = async (ctx: Context, next: () => Promise<unknown>) => {
try{
const ws = await ctx.upgrade();
ws.onopen = () => {
chatConnection(ws);
};
ws.onclose = () => { console.log('Disconnected from the client!');};
}catch{await next();}
}
let sockets = new Map<string, WebSocket>();
const chatConnection = async (ws: WebSocket) => {
console.log('new websocket, ws: ',ws);
const uid = globalThis.crypto.randomUUID();
sockets.set(uid,ws);
console.log('socket: ',sockets);
for await (const ev of ws){
console.log('ev: ', ev);
}
}
export const wsRoutes = new Router()
.get('/ws', runWS);
But in the for loop (at the end), for ws it says Type 'WebSocket' must have a '[Symbol.asyncIterator]()' method that returns an async iterator.. What's the deal with this and how to fix it?
The error message is providing you with useful information: the WebSocket is not AsyncIterable, which means that it cannot be used with a for await...of loop.
Here is the type documentation for WebSocket in Deno. It is (for the most part) the same as the WHATWG standard WebSocket that is documented on MDN.
If your intention is to respond to incoming message events, you'll need to attach an event listener:
webSocket.addEventListener("message", (messageEvent) => {
// Do something in response to each message event
});
Additional:
Here's an observation based on the code you've shown, but not in response to your question:
It's probably more ergonomic to store the sockets as the keys of your map, and the associated state data in the values. (This is the inverse of what you've shown). Here's an example of why:
import {
Router,
type RouterMiddleware,
} from "https://deno.land/x/oak#v10.6.0/mod.ts";
// You seem to want to log data to the console.
// This function will help you easily log only certain properties of objects:
/**
* Functional implementation of the type utility
* [`Pick<Type, Keys>`](https://www.typescriptlang.org/docs/handbook/utility-types.html#picktype-keys)
*/
function pick<T, K extends keyof T>(
obj: T,
keys: readonly K[],
): Pick<T, K> {
const result = {} as Pick<T, K>;
for (const key of keys) result[key] = obj[key];
return result;
}
type SocketData = { id: string };
const socketMap = new Map<WebSocket, SocketData>();
// Do something when a connection is opened
function handleOpen(ev: Event, ws: WebSocket) {
const socketData: SocketData = { id: window.crypto.randomUUID() };
socketMap.set(ws, socketData);
console.log({
event: pick(ev, ["type"]),
socketData,
});
}
// Do something when an error occurs
function handleError(ev: Event, ws: WebSocket) {
const socketData = socketMap.get(ws);
console.log({
event: pick(ev, ["type"]),
socketData,
});
socketMap.delete(ws);
}
// Do something when a connection is closed
function handleClose(ev: CloseEvent, ws: WebSocket) {
ev.code; // number
ev.reason; // string
ev.wasClean; // boolean
const socketData = socketMap.get(ws);
console.log({
event: pick(ev, ["type", "code", "reason", "wasClean"]),
socketData,
});
socketMap.delete(ws);
}
// Do something when a message is received
// Change `unknown` to the type of message payloads used in your application.
// (for example, JSON messages are `string`)
function handleMessage(ev: MessageEvent<unknown>, ws: WebSocket) {
ev.data; // unknown
ev.lastEventId; // string
ev.ports; // readonly MessagePort[]
const socketData = socketMap.get(ws);
if (socketData) {
socketData.id; // string
}
console.log({
event: pick(ev, ["type", "data", "lastEventId", "ports"]),
socketData,
});
}
const webSocketMiddleware: RouterMiddleware<"/ws"> = async (ctx, next) => {
const ws = ctx.upgrade();
ws.addEventListener("open", (ev) => handleOpen(ev, ws));
ws.addEventListener("error", (ev) => handleError(ev, ws));
ws.addEventListener("close", (ev) => handleClose(ev, ws));
ws.addEventListener("message", (ev) => handleMessage(ev, ws));
await next();
};
export const router = new Router();
router.get("/ws", webSocketMiddleware);
This is my updated code. It avoids the problem entirely
import {Application, Router, Context, send } from "https://deno.land/x/oak#v10.6.0/mod.ts";
interface BroadcastObj{
name: string,
mssg: string
}
const runWS = async (ctx: Context, next: () => Promise<unknown>) => {
if(!ctx.isUpgradable){
ctx.throw(501);
}
const uid = globalThis.crypto.randomUUID();
try{
const ws = await ctx.upgrade();
ws.onopen = () => {
chatConnection(ws);
};
ws.onmessage = (m) => {
let mssg = m.data as string;
if(typeof(mssg) === 'string'){
chatMessage(JSON.parse(mssg));
}
};
ws.onerror = (e) => {console.log('error occured: ', e);};
ws.onclose = () => { chatDisconnect(uid);};
}catch{await next();}
}
let sockets = new Map<string, WebSocket>();
const chatConnection = async (ws: WebSocket, uid: string) => {
await sockets.set(uid,ws);
}
const chatMessage = async (msg: BroadcastObj) => {
await sockets.forEach((ws: WebSocket) => {
ws.send(JSON.stringify(msg));
});
}
const chatDisconnect = async (uid: string) => {
await sockets.delete(uid);
}
export const wsRoutes = new Router()
.get('/ws', runWS);

How to connect peers using simple-peer with React and Node.js?

I am trying to make a video call on my project and I am using simple-peer to connect 2 peers. I can see all of console.log on Node.js but can get console.log(4) on React. I am not fully understood to WebRTC yet so please understand my broken codes...
This is my code with React.
const myVideo = useRef();
const userVideo = useRef();
const connectionRef = useRef();
const roomName = "123";
const [stream, setStream] = useState();
let creator = false;
useEffect(() => {
const socket = io("url");
socket.emit("joinRoom", roomName);
socket.on("created", () => {
creator = true;
navigator.mediaDevices
.getUserMedia({ video: true, audio: true })
.then((stream) => {
setStream(stream);
myVideo.current.srcObject = stream;
});
});
socket.on("joined", () => {
navigator.mediaDevices
.getUserMedia({ video: true, audio: true })
.then((stream) => {
setStream(stream);
userVideo.current.srcObject = stream;
});
socket.emit("ready", roomName);
});
socket.on("ready", () => {
if (creator) {
const peer = new Peer({ initiator: true, trickle: false, stream });
peer.on("signal", (signal) => {
socket.emit("sendingSignal", {
signal,
roomName,
});
});
peer.on("stream", (stream) => {
userVideo.current.srcObject = stream;
});
socket.on("receivingSignal", (signal) => {
peer.signal(signal);
});
connectionRef.current = peer;
}
});
socket.on("offer", (incomingSignal) => {
console.log(incomingSignal);
if (!creator) {
const peer = new Peer({ initiator: false, trickle: false, stream });
console.log(peer);
console.log(1);
peer.on("signal", (signal) => {
socket.emit("returningSignal", { signal, roomName });
console.log(2);
});
console.log(3);
peer.on("stream", (stream) => {
userVideo.current.srcObject = stream;
console.log(4);
});
console.log(5);
peer.signal(incomingSignal);
console.log(6);
connectionRef.current = peer;
}
});
}, []);
And this is my code with Node.js.
io.on(“connection”, (socket) => {
console.log(1)
socket.on(‘joinRoom’, (roomName)=>{
let rooms = io.sockets.adapter.rooms;
let room = rooms.get(roomName);
console.log(2)
if (room == undefined) {
console.log(2.1)
socket.join(roomName);
socket.emit(“created”);
console.log(2.2)
} else if (room?.size == 1) {
console.log(2.3)
socket.join(roomName);
socket.emit(“joined”);
console.log(2.4)
} else {
socket.emit(“full”);
console.log(2.5)
}
});
socket.on(“ready”, function (roomName) {
socket.broadcast.to(roomName).emit(“ready”);
});
socket.on(‘sendingSignal’,({signal, roomName})=>{
console.log(3)
console.log({signal,roomName})
io.to(roomName).emit(“offer”,signal)
console.log(3.5)
})
socket.on(“returningSignal”, ({ signal, roomName }) => {
console.log({signal, roomName})
console.log(4)
io.to(roomName).emit(“receivingSignal”, signal)
console.log(4.5)
});
})

how to send react-native-audio-record recorded audio file to server?

I need to be record audio and upload audio to server and for the record audio i am using "react-native-audio-record" react native package.
When i am using file_get_contents($request->file('inputFile')) all time file_get_contents returning 500 internal server error to me in Laravel.
I tried form-data, blob object.
Here is my React Native code and everything what i used to solve this:
onStartRecord = async () => {
this.setState({ isPlaying: false })
let dirs = RNFetchBlob.fs.dirs
if (Platform.OS === 'android') {
try {
const granted = await PermissionsAndroid.request(
PermissionsAndroid.PERMISSIONS.WRITE_EXTERNAL_STORAGE,
{
title: 'Permissions for write access',
message: 'Give permission to your storage to write a file',
buttonPositive: 'ok',
},
);
if (granted === PermissionsAndroid.RESULTS.GRANTED) {
console.log('You can use the storage');
} else {
console.log('permission denied');
return;
}
} catch (err) {
console.warn(err);
return;
}
}
if (Platform.OS === 'android') {
try {
const granted = await PermissionsAndroid.request(
PermissionsAndroid.PERMISSIONS.RECORD_AUDIO,
{
title: 'Permissions for write access',
message: 'Give permission to your storage to write a file',
buttonPositive: 'ok',
},
);
if (granted === PermissionsAndroid.RESULTS.GRANTED) {
console.log('You can use the camera');
} else {
console.log('permission denied');
return;
}
} catch (err) {
console.warn(err);
return;
}
}
const path = Platform.select({
ios: 'hello.m4a',
//android: dirs.DocumentDir+'/hello.aac',
android: 'sdcard/hello.mp3',
});
const audioSet: AudioSet = {
// AudioEncoderAndroid: AudioEncoderAndroidType.AAC,
// AudioSourceAndroid: AudioSourceAndroidType.MIC,
// AVEncoderAudioQualityKeyIOS: AVEncoderAudioQualityIOSType.high,
// AVNumberOfChannelsKeyIOS: 2,
// AVFormatIDKeyIOS: AVEncodingOption.aac,
};
//console.log('audioSet', audioSet);
const uri = await this.audioRecorderPlayer.startRecorder(path);
console.log("URI => ",uri);
// RNFS.readFile(uri, 'base64')
// .then(res =>{
// console.log(res);
// });
// RNFetchBlob.fs.writeFile(path, base64Str, 'base64');
// RNFetchBlob.android.actionViewIntent(path, 'application/aac');
this.audioRecorderPlayer.addRecordBackListener((e: any) => {
//console.log("E ====>>>>>>>>>",e);
this.setState({
recordSecs: e.current_position,
recordTime: this.audioRecorderPlayer.mmssss(
Math.floor(e.current_position),
),
});
});
//alert(`uri: ${uri}`);
// var body = new FormData();
// //console.log("BODY",abc);
// body.append('file', uri);
//
// console.log("+++++++=========body=========++++++",body);
var body = new FormData();
//console.log("BODY",abc);
body.append('inputFile', {
name: 'sound.mp4',
type: 'audio/mp3',
uri: uri
});
console.log("+++++++=========body=========++++++",body);
// console.log("BODY",body);
// RNFS.readFile(uri, "base64").then(data => {
// // binary data
// console.log("+++++++=========URI=========++++++",data);
// });
// const formData = [];
// formData.push({
// name: "sound",
// filename: `sound.mp4`,
// data: RNFetchBlob.wrap(uri)
// });
const blob = await (await fetch(uri)).blob();
// const file = new File(this.state.recordTime, `me-at-thevoice${1}.mp3`, {
// type: blob.type,
// lastModified: Date.now()
// });
// console.log("Bolb data file",file);
var bodyData = new FormData();
//console.log("BODY",abc);
bodyData.append('inputFile', { blob });
//
// console.log("RNFetchBlob blob",blob);
// await new Promise(resolve => {
// var reader = new FileReader();
// reader.readAsDataURL(blob);
// reader.onloadend = () => {
// var base64data = reader.result;
// console.log("reader",reader);
// console.log("base64data =--->>>",base64data);
// // let pth = path
// // RNFetchBlob.fs.writeFile(pth, reader.result.substr(base64data.indexOf(',')+1), 'base64').then((res) => {
// // console.log("RNFetchBlob res",res);
// // blob.close()
// // resolve();
// // });
//
this.props.setLoader(true);
this.props.uploadAudio(bodyData).then(result => {
console.log("this.props.audioRecordingResponse |||||=====|||||",this.props.audioRecordingResponse);
if (this.props.audioRecordingResponse.success) {
this.handler('success','Success',this.props.audioRecordingResponse.message);
// this.refs["sign"].resetImage();
// this.setState({
// signatures: [],
// isDragged: false,
// signatureCount: 0
// })
//this.props.navigation.navigate('AudioRecording',{templateId:templateId, documentId: documentId});
} else {
this.props.setLoader(false);
this.handler('error','Error',this.props.audioRecordingResponse.message);
}
})
// }
// })
};
Please let me know if anyone having solution for the same.
I am not sure whether this answers your specific case, but this is how I send my code from a react native app:
import AudioRecord from 'react-native-audio-record';
import * as RNFS from 'react-native-fs'
.....
record = () => {
if (!this.state.recording) {
this.setState({recording: true}, () => {
AudioRecord.start()
})
} else {
AudioRecord.stop().then(r => {
this.setState({recording: false})
RNFS.readFile(r, 'base64') // r is the path to the .wav file on the phone
.then((data) => {
this.context.socket.emit('sendingAudio', {
sound: data
});
})
});
}
}
I use sockets for my implementation but you can use pretty much anything as all I am sending is a long string. On the server side I then decode the string as so:
export async function sendingAudio(data) {
let fileName = `sound.wav`
let buff = Buffer.from(data.sound, 'base64');
await fs.writeFileSync(fileName, buff)
}
So basically I create a wav file on the phone, then read it into a base64 encoding, send that to the server and on the server I decode it from base64 into a .wav file.
For Laravel I believe this could help you Decode base64 audio . Just dont save it as mp3 but a wav.

sendResponse in Port.postmessage()

I have the following code
browser.runtime.onConnect.addListener(function (externalPort) {
externalPort.onMessage.addListener((message, sender, sendResponse) => {
sendResponse(42);
}
});
However, it seems that listeners for Port.onMessage do not get called with a sendResponse as listeners for browser.runtime.onMessage.
Any idea how to send responses for messages to ports?
Port-based messaging doesn't use sendResponse. Simply post another message to the port.
Here's a very simplified example of a port-based messaging system. It doesn't transfer errors or exceptions, doesn't have a timeout. The idea is to pass an id, save the callback for the id in a map, and use the same id in the response to call that saved callback.
Unlike browser.runtime.sendMessage that creates a new port each time (a relatively expensive operation in case you send a lot of messages), we reuse the same port.
sender:
const port = browser.runtime.connect({name: 'foo'});
const portMap = new Map();
let portMessageId = 0;
port.onMessage.addListener(msg => {
const {id, data} = msg;
const resolve = portMap.get(id);
portMap.delete(id);
resolve(data);
});
function send(data) {
return new Promise(resolve => {
const id = ++portMessageId;
portMap.set(id, resolve);
port.postMessage({id, data});
});
}
usage:
(async () => {
const response = await send({foo: 'whatever'});
console.log(response);
})();
receiver:
/** #param {chrome.runtime.Port} port */
browser.runtime.onConnect.addListener(port => {
if (port.name === 'foo') {
port.onMessage.addListener(msg => {
const {id, data} = msg;
port.postMessage({id, data: processMessage(data)});
});
}
});
The Port.postMessage() is a push-only messaging method, so you need to use regular runtime.sendMessage() method in parallel. Here is an example:
manifest.json:
{
"name": "panel example",
"version": "1",
"manifest_version": 2,
"background": {
"scripts": ["background.js"]
},
"browser_action": {
"default_title": "panel",
"default_popup": "panel.html"
},
"permissions": [
"tabs"
]
}
background.js:
browser.runtime.onConnect.addListener(port => {
let tabId;
const listenerForPort = (message, sender) => {
if (message &&
typeof message == 'object' &&
message.portName == port.name) {
switch (message.type) {
case 'get-tabId':
return Promise.resolve(tabId);
}
}
};
browser.runtime.onMessage.addListener(listenerForPort);
port.onMessage.addListener(message => {
if (message &&
typeof message == 'object' &&
message.tabId)
tabId = message.tabId;
});
port.onDisconnect.addListener(port => {
browser.runtime.onMessage.removeListener(listenerForPort);
if (tabId)
browser.tabs.remove(tabId);
});
});
panel.html:
<!DOCTYPE html>
<script type="application/javascript" src="panel.js"></script>
<button id="button">Click Me</button>
panel.js:
browser.windows.getCurrent({ populate: true }).then(win => {
const portName = `port for window ${win.id}`;
const activeTab = win.tabs.find(tab => tab.active);
const port = browser.runtime.connect({
name: portName
});
port.postMessage({ tabId: activeTab.id });
const button = document.getElementById('button');
button.addEventListener('click', async event => {
const tabIdFromBackground = await browser.runtime.sendMessage({
type: 'get-tabId',
portName
});
button.textContent = tabIdFromBackground;
});
});
In this example, there is a listener corresponding to a connection and it is designed to respond only to messages sent with the corresponding port name.

Webrtc connect peers on call function

I know this probaly is a longshot, but I was hoping for someone to point me in the right direction here.
I've made a simple peer to peer video connection, and wrapped it inside a function, so I can call it on a button click, but it's not running.
When it's not wrapped inside the "activateVideoStream" function, but just on load, it works fine. I have a feeling that the issue is around the async function, but I can't wrap my head around it.
Here is the code:
let isAlreadyCalling = false;
const remoteVideo = document.getElementById("remote-video");
const {
RTCPeerConnection,
RTCSessionDescription
} = window;
let peerConnection;
function activateVideoStream() {
const configuration = {
"iceServers": [{
"urls": "stun:stun.l.google.com:19302"
}]
};
console.log("Activate Video Stream");
peerConnection = new RTCPeerConnection(configuration);
navigator.getUserMedia({
video: true,
audio: true
},
stream => {
const localVideo = document.getElementById("local-video");
if (localVideo) {
localVideo.srcObject = stream;
}
stream.getTracks().forEach(track => peerConnection.addTrack(track, stream));
},
error => {
console.warn(error.message);
}
);
peerConnection.ontrack = function ({
streams: [stream]
}) {
if (remoteVideo) {
remoteVideo.srcObject = stream;
}
};
}
async function callUser(socketId) {
console.log("Call User");
remoteVideo.style.display = "block";
const offer = await peerConnection.createOffer();
await peerConnection.setLocalDescription(new RTCSessionDescription(offer));
socket.emit("callUser", {
offer,
to: socketId
});
}
socket.on("callMade", async data => {
console.log("Call made");
await peerConnection.setRemoteDescription(
new RTCSessionDescription(data.offer)
);
const answer = await peerConnection.createAnswer();
await peerConnection.setLocalDescription(new RTCSessionDescription(answer));
remoteVideo.style.display = "block";
socket.emit("makeAnswer", {
answer,
to: data.socket
});
});
socket.on("answerMade", async data => {
console.log("Answer made");
await peerConnection.setRemoteDescription(
new RTCSessionDescription(data.answer)
);
if (!isAlreadyCalling) {
callUser(data.socket);
isAlreadyCalling = true;
}
});
I've noticed that "peerConnection.connectionState" inside "callUser" is being set to "new", but without the function wrapped around, it's set to "complete", so that's probaly the issue.

Resources