how to send react-native-audio-record recorded audio file to server? - laravel

I need to be record audio and upload audio to server and for the record audio i am using "react-native-audio-record" react native package.
When i am using file_get_contents($request->file('inputFile')) all time file_get_contents returning 500 internal server error to me in Laravel.
I tried form-data, blob object.
Here is my React Native code and everything what i used to solve this:
onStartRecord = async () => {
this.setState({ isPlaying: false })
let dirs = RNFetchBlob.fs.dirs
if (Platform.OS === 'android') {
try {
const granted = await PermissionsAndroid.request(
PermissionsAndroid.PERMISSIONS.WRITE_EXTERNAL_STORAGE,
{
title: 'Permissions for write access',
message: 'Give permission to your storage to write a file',
buttonPositive: 'ok',
},
);
if (granted === PermissionsAndroid.RESULTS.GRANTED) {
console.log('You can use the storage');
} else {
console.log('permission denied');
return;
}
} catch (err) {
console.warn(err);
return;
}
}
if (Platform.OS === 'android') {
try {
const granted = await PermissionsAndroid.request(
PermissionsAndroid.PERMISSIONS.RECORD_AUDIO,
{
title: 'Permissions for write access',
message: 'Give permission to your storage to write a file',
buttonPositive: 'ok',
},
);
if (granted === PermissionsAndroid.RESULTS.GRANTED) {
console.log('You can use the camera');
} else {
console.log('permission denied');
return;
}
} catch (err) {
console.warn(err);
return;
}
}
const path = Platform.select({
ios: 'hello.m4a',
//android: dirs.DocumentDir+'/hello.aac',
android: 'sdcard/hello.mp3',
});
const audioSet: AudioSet = {
// AudioEncoderAndroid: AudioEncoderAndroidType.AAC,
// AudioSourceAndroid: AudioSourceAndroidType.MIC,
// AVEncoderAudioQualityKeyIOS: AVEncoderAudioQualityIOSType.high,
// AVNumberOfChannelsKeyIOS: 2,
// AVFormatIDKeyIOS: AVEncodingOption.aac,
};
//console.log('audioSet', audioSet);
const uri = await this.audioRecorderPlayer.startRecorder(path);
console.log("URI => ",uri);
// RNFS.readFile(uri, 'base64')
// .then(res =>{
// console.log(res);
// });
// RNFetchBlob.fs.writeFile(path, base64Str, 'base64');
// RNFetchBlob.android.actionViewIntent(path, 'application/aac');
this.audioRecorderPlayer.addRecordBackListener((e: any) => {
//console.log("E ====>>>>>>>>>",e);
this.setState({
recordSecs: e.current_position,
recordTime: this.audioRecorderPlayer.mmssss(
Math.floor(e.current_position),
),
});
});
//alert(`uri: ${uri}`);
// var body = new FormData();
// //console.log("BODY",abc);
// body.append('file', uri);
//
// console.log("+++++++=========body=========++++++",body);
var body = new FormData();
//console.log("BODY",abc);
body.append('inputFile', {
name: 'sound.mp4',
type: 'audio/mp3',
uri: uri
});
console.log("+++++++=========body=========++++++",body);
// console.log("BODY",body);
// RNFS.readFile(uri, "base64").then(data => {
// // binary data
// console.log("+++++++=========URI=========++++++",data);
// });
// const formData = [];
// formData.push({
// name: "sound",
// filename: `sound.mp4`,
// data: RNFetchBlob.wrap(uri)
// });
const blob = await (await fetch(uri)).blob();
// const file = new File(this.state.recordTime, `me-at-thevoice${1}.mp3`, {
// type: blob.type,
// lastModified: Date.now()
// });
// console.log("Bolb data file",file);
var bodyData = new FormData();
//console.log("BODY",abc);
bodyData.append('inputFile', { blob });
//
// console.log("RNFetchBlob blob",blob);
// await new Promise(resolve => {
// var reader = new FileReader();
// reader.readAsDataURL(blob);
// reader.onloadend = () => {
// var base64data = reader.result;
// console.log("reader",reader);
// console.log("base64data =--->>>",base64data);
// // let pth = path
// // RNFetchBlob.fs.writeFile(pth, reader.result.substr(base64data.indexOf(',')+1), 'base64').then((res) => {
// // console.log("RNFetchBlob res",res);
// // blob.close()
// // resolve();
// // });
//
this.props.setLoader(true);
this.props.uploadAudio(bodyData).then(result => {
console.log("this.props.audioRecordingResponse |||||=====|||||",this.props.audioRecordingResponse);
if (this.props.audioRecordingResponse.success) {
this.handler('success','Success',this.props.audioRecordingResponse.message);
// this.refs["sign"].resetImage();
// this.setState({
// signatures: [],
// isDragged: false,
// signatureCount: 0
// })
//this.props.navigation.navigate('AudioRecording',{templateId:templateId, documentId: documentId});
} else {
this.props.setLoader(false);
this.handler('error','Error',this.props.audioRecordingResponse.message);
}
})
// }
// })
};
Please let me know if anyone having solution for the same.

I am not sure whether this answers your specific case, but this is how I send my code from a react native app:
import AudioRecord from 'react-native-audio-record';
import * as RNFS from 'react-native-fs'
.....
record = () => {
if (!this.state.recording) {
this.setState({recording: true}, () => {
AudioRecord.start()
})
} else {
AudioRecord.stop().then(r => {
this.setState({recording: false})
RNFS.readFile(r, 'base64') // r is the path to the .wav file on the phone
.then((data) => {
this.context.socket.emit('sendingAudio', {
sound: data
});
})
});
}
}
I use sockets for my implementation but you can use pretty much anything as all I am sending is a long string. On the server side I then decode the string as so:
export async function sendingAudio(data) {
let fileName = `sound.wav`
let buff = Buffer.from(data.sound, 'base64');
await fs.writeFileSync(fileName, buff)
}
So basically I create a wav file on the phone, then read it into a base64 encoding, send that to the server and on the server I decode it from base64 into a .wav file.
For Laravel I believe this could help you Decode base64 audio . Just dont save it as mp3 but a wav.

Related

Firebase Function Returns Before All Callback functions complete execution

I'm using the Google Storage NodeJS client library to list GCS Bucket paths.
Here's the code to the Firebase Function:
import * as functions from 'firebase-functions';
import { Storage } from '#google-cloud/storage';
import { globVars } from '../admin/admin';
const projectId = process.env.GCLOUD_PROJECT;
// shared global variables setup
const { keyFilename } = globVars;
// Storage set up
const storage = new Storage({
projectId,
keyFilename,
});
export const gcsListPath = functions
.region('europe-west2')
.runWith({ timeoutSeconds: 540, memory: '256MB' })
.https.onCall(async (data, context) => {
if (context.auth?.token.email_verified) {
const { bucketName, prefix, pathList = false, fileList = false } = data;
let list;
const options = {
autoPaginate: false,
delimiter: '',
prefix,
};
if (pathList) {
options.delimiter = '/';
let test: any[] = [];
const callback = (_err: any, _files: any, nextQuery: any, apiResponse: any) => {
test = test.concat(apiResponse.prefixes);
console.log('test : ', test);
console.log('nextQuery : ', nextQuery);
if (nextQuery) {
storage.bucket(bucketName).getFiles(nextQuery, callback);
} else {
// prefixes = The finished array of prefixes.
list = test;
}
}
storage.bucket(bucketName).getFiles(options, callback);
}
if (fileList) {
const [files] = await storage
.bucket(bucketName)
.getFiles(options);
list = files.map((file) => file.name);
}
return { list }; //returning null as it exec before callback fns finish
} else {
return {
error: { message: 'Bad Request', status: 'INVALID_ARGUMENT' },
};
}
});
My problem is that my Firebase function returns the list (null) before all the callback functions finish execution.
Could someone spot and point out what needs to be changed/added to make the function wait for all the callback functions to finish. I've tried adding async/await but can't seem to get it right.
The reason for your error is that you use a callback. It's not awaited in the code. I would recommend to turn the callback code to a promise. Something like this.
import * as functions from "firebase-functions";
import { Storage } from "#google-cloud/storage";
import { globVars } from "../admin/admin";
const projectId = process.env.GCLOUD_PROJECT;
// shared global variables setup
const { keyFilename } = globVars;
// Storage set up
const storage = new Storage({
projectId,
keyFilename,
});
const getList = (bucketName, options) => {
return new Promise((resolve, reject) => {
let list;
let test: any[] = [];
const callback = (
_err: any,
_files: any,
nextQuery: any,
apiResponse: any
) => {
test = test.concat(apiResponse.prefixes);
console.log("test : ", test);
console.log("nextQuery : ", nextQuery);
if (nextQuery) {
storage.bucket(bucketName).getFiles(nextQuery, callback);
} else {
// prefixes = The finished array of prefixes.
list = test;
}
resolve(list);
};
try {
storage.bucket(bucketName).getFiles(options, callback);
} catch (error) {
reject(eror);
}
});
};
export const gcsListPath = functions
.region("europe-west2")
.runWith({ timeoutSeconds: 540, memory: "256MB" })
.https.onCall(async (data, context) => {
if (context.auth?.token.email_verified) {
const { bucketName, prefix, pathList = false, fileList = false } = data;
let list;
const options = {
autoPaginate: false,
delimiter: "",
prefix,
};
if (pathList) {
options.delimiter = "/";
list = await getList(bucketName, options);
}
if (fileList) {
const [files] = await storage.bucket(bucketName).getFiles(options);
list = files.map((file) => file.name);
}
return { list }; //returning null as it exec before callback fns finish
} else {
return {
error: { message: "Bad Request", status: "INVALID_ARGUMENT" },
};
}
});
I'm not sure if the part with fileList will work as expectedt. It looks like the API doesn't support await but only callbacks.
import * as functions from "firebase-functions";
import { GetFilesOptions, Storage } from "#google-cloud/storage";
import { globVars } from "../admin/admin";
const projectId = process.env.GCLOUD_PROJECT;
// shared global variables setup
const { keyFilename } = globVars;
// Storage set up
const storage = new Storage({
projectId,
keyFilename,
});
const getList = (bucketName: string, options: GetFilesOptions) => {
return new Promise((resolve, reject) => {
// let test: any[] = [];
let list: any[] = [];
const callback = (
_err: any,
_files: any,
nextQuery: any,
apiResponse: any
) => {
list = list.concat(apiResponse.prefixes);
console.log("list : ", list);
console.log("nextQuery : ", nextQuery);
if (nextQuery) {
storage.bucket(bucketName).getFiles(nextQuery, callback);
} else {
// prefixes = The finished array of prefixes.
resolve(list);
}
};
try {
storage.bucket(bucketName).getFiles(options, callback);
} catch (error) {
reject(error);
}
});
};
export const gcsListPath = functions
.region("europe-west2")
.runWith({ timeoutSeconds: 540, memory: "256MB" })
.https.onCall(async (data, context) => {
if (context.auth?.token.email_verified) {
const { bucketName, prefix, pathList = false, fileList = false } = data;
let list;
const options = {
autoPaginate: false,
delimiter: "",
prefix,
};
if (pathList) {
options.delimiter = "/";
list = await getList(bucketName, options);
}
if (fileList) {
const [files] = await storage.bucket(bucketName).getFiles(options);
list = files.map((file) => file.name);
}
return { list }; //returning null as it exec before callback fns finish
} else {
return {
error: { message: "Bad Request", status: "INVALID_ARGUMENT" },
};
}
});

How can i add a custom video in broadcast in opentok

I wanted to add video while broadcasting.
To do this i am refering this link :
https://github.com/opentok/opentok-web-samples/tree/main/Publish-Video
After OT.initPublisher i am publishing this publisher in to session session.publish
But video is not showing in livestreaming.
Can anybody help me with this?
We can Publish custom audio source and video source from Video Element, using the captureStream() / mozCaptureStream() methods
Like mentioned in the below code snip
const contentVideoElement = document.createElement('VIDEO');
let screenPublisher = null;
contentVideoElement.autoplay = true;
contentVideoElement.controls = true;
contentVideoElement.classList.add('cameraContainer');
const url = URL.createObjectURL(file); // choose video file from input file control
contentVideoElement.src = url;
try {
await contentVideoElement.play();
} catch (error) {
console.log(error);
return;
}
let mediaStream = null;
if (contentVideoElement.captureStream) {
mediaStream = contentVideoElement.captureStream();
} else if (contentVideoElement.mozCaptureStream) {
mediaStream = contentVideoElement.mozCaptureStream();
} else {
console.error('Stream capture is not supported');
mediaStream = null;
return;
}
const videoTracks = mediaStream.getVideoTracks();
const audioTracks = mediaStream.getAudioTracks();
if (videoTracks.length > 0 && audioTracks.length > 0) {
const el = document.createElement('div');
screenPublisher = window.OT.initPublisher(
'content-video-element-id',
{
insertMode: 'append',
videoSource: videoTracks[0],
audioSource: audioTracks[0],
fitMode: 'contain', // Using default
width: '100%',
height: '100%',
showControls: false,
name:`Guest (Video)`,
},
(error) => {
if (error) {
contentVideoElement.pause();
console.log(error);
} else {
contentVideoElement.play();
openToksession.publish(screenPublisher, (error) => {
if (error) {
console.log(error);
} else {
// write here code after success publish video stream
}
});
}
},
);
screenPublisher.on({
streamDestroyed: ({ stream }) => {
contentVideoElement.pause();
},
});
contentVideoElement.addEventListener(
'ended',
() => {
console.log('Shared video ended');
},
false,
);
}
For capture MediaStream in reactjs: click here

Convert Webrtc track stream to URL (RTSP/UDP/RTP/Http) in Video tag

I am new in WebRTC and i have done client/server connection, from client i choose WebCam and post stream to server using Track and on Server side i am getting that track and assign track stream to video source. Everything till now fine but problem is now i include AI(Artificial Intelligence) and now i want to convert my track stream to URL maybe UDP/RTSP/RTP etc. So AI will use that URL for object detection. I don't know how we can convert track stream to URL.
Although there is a couple of packages like https://ffmpeg.org/ and RTP to Webrtc etc, i am using Nodejs, Socket.io and Webrtc, below you can check my client and server side code for getting and posting stream, i am following thi github code https://github.com/Basscord/webrtc-video-broadcast.
Now my main concern is to make track as a URL for video tag, is it possible or not or please suggest, any help would be appreciated.
Server.js
This is nodejs server code
const express = require("express");
const app = express();
let broadcaster;
const port = 4000;
const http = require("http");
const server = http.createServer(app);
const io = require("socket.io")(server);
app.use(express.static(__dirname + "/public"));
io.sockets.on("error", e => console.log(e));
io.sockets.on("connection", socket => {
socket.on("broadcaster", () => {
broadcaster = socket.id;
socket.broadcast.emit("broadcaster");
});
socket.on("watcher", () => {
socket.to(broadcaster).emit("watcher", socket.id);
});
socket.on("offer", (id, message) => {
socket.to(id).emit("offer", socket.id, message);
});
socket.on("answer", (id, message) => {
socket.to(id).emit("answer", socket.id, message);
});
socket.on("candidate", (id, message) => {
socket.to(id).emit("candidate", socket.id, message);
});
socket.on("disconnect", () => {
socket.to(broadcaster).emit("disconnectPeer", socket.id);
});
});
server.listen(port, () => console.log(`Server is running on port ${port}`));
Broadcast.js
This is the code for emit stream(track)
const peerConnections = {};
const config = {
iceServers: [
{
urls: ["stun:stun.l.google.com:19302"]
}
]
};
const socket = io.connect(window.location.origin);
socket.on("answer", (id, description) => {
peerConnections[id].setRemoteDescription(description);
});
socket.on("watcher", id => {
const peerConnection = new RTCPeerConnection(config);
peerConnections[id] = peerConnection;
let stream = videoElement.srcObject;
stream.getTracks().forEach(track => peerConnection.addTrack(track, stream));
peerConnection.onicecandidate = event => {
if (event.candidate) {
socket.emit("candidate", id, event.candidate);
}
};
peerConnection
.createOffer()
.then(sdp => peerConnection.setLocalDescription(sdp))
.then(() => {
socket.emit("offer", id, peerConnection.localDescription);
});
});
socket.on("candidate", (id, candidate) => {
peerConnections[id].addIceCandidate(new RTCIceCandidate(candidate));
});
socket.on("disconnectPeer", id => {
peerConnections[id].close();
delete peerConnections[id];
});
window.onunload = window.onbeforeunload = () => {
socket.close();
};
// Get camera and microphone
const videoElement = document.querySelector("video");
const audioSelect = document.querySelector("select#audioSource");
const videoSelect = document.querySelector("select#videoSource");
audioSelect.onchange = getStream;
videoSelect.onchange = getStream;
getStream()
.then(getDevices)
.then(gotDevices);
function getDevices() {
return navigator.mediaDevices.enumerateDevices();
}
function gotDevices(deviceInfos) {
window.deviceInfos = deviceInfos;
for (const deviceInfo of deviceInfos) {
const option = document.createElement("option");
option.value = deviceInfo.deviceId;
if (deviceInfo.kind === "audioinput") {
option.text = deviceInfo.label || `Microphone ${audioSelect.length + 1}`;
audioSelect.appendChild(option);
} else if (deviceInfo.kind === "videoinput") {
option.text = deviceInfo.label || `Camera ${videoSelect.length + 1}`;
videoSelect.appendChild(option);
}
}
}
function getStream() {
if (window.stream) {
window.stream.getTracks().forEach(track => {
track.stop();
});
}
const audioSource = audioSelect.value;
const videoSource = videoSelect.value;
const constraints = {
audio: { deviceId: audioSource ? { exact: audioSource } : undefined },
video: { deviceId: videoSource ? { exact: videoSource } : undefined }
};
return navigator.mediaDevices
.getUserMedia(constraints)
.then(gotStream)
.catch(handleError);
}
function gotStream(stream) {
window.stream = stream;
audioSelect.selectedIndex = [...audioSelect.options].findIndex(
option => option.text === stream.getAudioTracks()[0].label
);
videoSelect.selectedIndex = [...videoSelect.options].findIndex(
option => option.text === stream.getVideoTracks()[0].label
);
videoElement.srcObject = stream;
socket.emit("broadcaster");
}
function handleError(error) {
console.error("Error: ", error);
}
RemoteServer.js
This code is getting track and assign to video tag
let peerConnection;
const config = {
iceServers: [
{
urls: ["stun:stun.l.google.com:19302"]
}
]
};
const socket = io.connect(window.location.origin);
const video = document.querySelector("video");
socket.on("offer", (id, description) => {
peerConnection = new RTCPeerConnection(config);
peerConnection
.setRemoteDescription(description)
.then(() => peerConnection.createAnswer())
.then(sdp => peerConnection.setLocalDescription(sdp))
.then(() => {
socket.emit("answer", id, peerConnection.localDescription);
});
peerConnection.ontrack = event => {
video.srcObject = event.streams[0];
};
peerConnection.onicecandidate = event => {
if (event.candidate) {
socket.emit("candidate", id, event.candidate);
}
};
});
socket.on("candidate", (id, candidate) => {
peerConnection
.addIceCandidate(new RTCIceCandidate(candidate))
.catch(e => console.error(e));
});
socket.on("connect", () => {
socket.emit("watcher");
});
socket.on("broadcaster", () => {
socket.emit("watcher");
});
socket.on("disconnectPeer", () => {
peerConnection.close();
});
window.onunload = window.onbeforeunload = () => {
socket.close();
};
rtp-to-webrtc does exactly what you want.
Unfortunately you will need to run some sort of server to make this happen, it can’t all be in the browser. You could also upload via other protocols (captured via MediaRecorder) if you don’t want to use WebRTC.

Webrtc connect peers on call function

I know this probaly is a longshot, but I was hoping for someone to point me in the right direction here.
I've made a simple peer to peer video connection, and wrapped it inside a function, so I can call it on a button click, but it's not running.
When it's not wrapped inside the "activateVideoStream" function, but just on load, it works fine. I have a feeling that the issue is around the async function, but I can't wrap my head around it.
Here is the code:
let isAlreadyCalling = false;
const remoteVideo = document.getElementById("remote-video");
const {
RTCPeerConnection,
RTCSessionDescription
} = window;
let peerConnection;
function activateVideoStream() {
const configuration = {
"iceServers": [{
"urls": "stun:stun.l.google.com:19302"
}]
};
console.log("Activate Video Stream");
peerConnection = new RTCPeerConnection(configuration);
navigator.getUserMedia({
video: true,
audio: true
},
stream => {
const localVideo = document.getElementById("local-video");
if (localVideo) {
localVideo.srcObject = stream;
}
stream.getTracks().forEach(track => peerConnection.addTrack(track, stream));
},
error => {
console.warn(error.message);
}
);
peerConnection.ontrack = function ({
streams: [stream]
}) {
if (remoteVideo) {
remoteVideo.srcObject = stream;
}
};
}
async function callUser(socketId) {
console.log("Call User");
remoteVideo.style.display = "block";
const offer = await peerConnection.createOffer();
await peerConnection.setLocalDescription(new RTCSessionDescription(offer));
socket.emit("callUser", {
offer,
to: socketId
});
}
socket.on("callMade", async data => {
console.log("Call made");
await peerConnection.setRemoteDescription(
new RTCSessionDescription(data.offer)
);
const answer = await peerConnection.createAnswer();
await peerConnection.setLocalDescription(new RTCSessionDescription(answer));
remoteVideo.style.display = "block";
socket.emit("makeAnswer", {
answer,
to: data.socket
});
});
socket.on("answerMade", async data => {
console.log("Answer made");
await peerConnection.setRemoteDescription(
new RTCSessionDescription(data.answer)
);
if (!isAlreadyCalling) {
callUser(data.socket);
isAlreadyCalling = true;
}
});
I've noticed that "peerConnection.connectionState" inside "callUser" is being set to "new", but without the function wrapped around, it's set to "complete", so that's probaly the issue.

How come drive API return a result when using invalid access token

My Scenario
I'm using Google Drive API to create a file and to get a list of files.
My problem
1. No matter what value I put in my access_token the API keeps working
2. If I change the order of events and I call createDriveFile before I call listDriveFiles I get this error:
Error: Invalid Credentials
at Gaxios._request (/Users/tamirklein/superquery/bd/lambda/node_modules/googleapis-common/node_modules/google-auth-library/node_modules/gaxios/src/gaxios.ts:109:15)
at
at process._tickDomainCallback (internal/process/next_tick.js:228:7)
My code
if (!global._babelPolyfill) {
var a = require("babel-polyfill")
}
import {google} from 'googleapis'
describe('Run query with API', async () => {
it('check Drive APIs', async () => {
process.env.x_region = 'us-east-1';
let result = await test('start')
})
async function test(p1) {
let auth = getBasicAuthObj();
auth.setCredentials({
access_token: "anyValueWork",
refresh_token: "Replace With a valid refresh Token"
});
let fileList = await listDriveFiles(auth);
let newFile = await createDriveFile(auth);
}
async function listDriveFiles(auth) {
return new Promise((resolved) => {
const {google} = require('googleapis');
const drive = google.drive({version: 'v3', auth});
drive.files.list({
pageSize: 10,
fields: 'nextPageToken, files(id, name)',
q: 'trashed=false'
}, (err, res) => {
if (err) {
console.log('The API returned an error: ' + err);
resolved([err, null]);
} else {
const files = res.data.files;
if (files.length) {
console.log(`We fetched ${files.length} Files`);
// files.map((file) => {
// console.log(`${file.name} (${file.id})`);
// });
} else {
console.log('No files found.');
}
resolved([err, res]);
}
});
});
}
async function createDriveFile(auth) {
return new Promise(async (resolved) => {
//const fs = require('fs');
const {google} = require('googleapis');
const drive = google.drive({version: 'v3', auth});
let data = {
value: 'value'
};
let fileName = 'fileName.txt';
let fileMetadata = {
'name': fileName
};
// create buffer
let stream = require('stream');
let bufferStream = new stream.PassThrough();
bufferStream.end(Buffer.from(JSON.stringify(data)));
let media = {
mimeType: 'application/json',
body: bufferStream // fs.createReadStream("test.txt") //bufferStream //
};
drive.files.create({
resource: fileMetadata,
media: media,
fields: 'id'
}, function (err, file) {
if (err) {
// Handle error
console.error("Error: savePasswordInDrive" + err);
} else {
console.log('File Id: ', file.data.id);
}
resolved([err, file]);
});
})
}
async function _wait(milliseconds) {
return new Promise(resolved => {
setTimeout(() => {
resolved()
}, milliseconds)
})
}
/**
* Create oAuth object
* #returns {OAuth2Client}
*/
function getBasicAuthObj() {
let clientId = 'Replace With a valid clientId';
let clientSecret = 'Replace With a valid clientSecret';
let redirectUrl = 'URL';
return new google.auth.OAuth2(
clientId,
clientSecret,
redirectUrl
)
}
})
Any ideas on how to resolve this?

Resources