How to parse binary data ("multipart/form-data") in KOA? - koa

If I send POST-query with text options, all OK:
query from front-end:
const request = require("request")
const options = {
method: 'POST',
url: 'http://localhost:4000/user',
headers: form: { data: '12345' }
}
On server-side (KOA) I can get parsed data of a.m.query:
ctx.request.method: "POST"
ctx.request.originalUrl: "user"
ctx.request.body.data: "12345"
But if I send a POST query with binary data (file):
const fs = require("fs");
const request = require("request");
const options = { method: 'POST',
url: 'http://localhost:4000/user',
headers:
{
'content-type': 'multipart/form-data},
formData:
{ '':
{ value: 'fs.createReadStream("F:\\image.jpg")',
options:
{ filename: 'F:\\image.jpg',
contentType: null }
} } };
I don't know, how can I access for this binary data ("image.jpg) on server-part (KOA), in ctx.request have any field with this data...

You can use busboy for this. I wrote a gist for doing this, but I'm going to embed it here with some comments.
Let's create a helper for parsing out the file in a promise-friendly way.
// parse.js
import Busboy from 'busboy'
/**
* Parses a single file from a Node request.
*
* #param {http.IncommingRequest} req
* #return {Promise<{ file: Stream, filename: string>}
*/
export default function parse (req) {
return new Promise((resolve, reject) => {
const busboy = new Busboy({
headers: req.headers,
limits: {
files: 1 // allow only a single upload at a time.
}
})
busboy.once('file', _onFile)
busboy.once('error', _onError)
req.pipe(busboy)
function _cleanup () {
busboy.removeListener('file', _onFile)
busboy.removeListener('error', _onError)
}
function _onFile (fieldname, file, filename) {
_cleanup()
resolve({ file, filename })
}
function _onError (err) {
_cleanup()
reject(err)
}
})
}
Now we need to use it. Let's assume you want to upload to AWS S3.
import Koa from 'koa'
import parse from './busboy'
import AWS from 'aws-sdk'
const app = new Koa()
const s3 = new AWS.S3({
params: { Bucket: 'myBucket' }
})
// Assuming this is a route handler.
app.use(async (ctx) => {
const { file, filename } = await parse(ctx.req)
// `file` is a Stream. Pass this to S3, Azure Blob Storage or whatever you want.
// `filename` is the file name specified by the client.
const result = await s3.upload({
Key: filename,
Body: file
}).promise()
ctx.body = result
})
For brevity's sake, this is how you upload the file using axios on the client.
// `file` is a DOM File object.
function upload (file) {
const data = new window.FormData()
data.append('file', file, file.name)
return axios.post('/upload', data)
}

Related

post bulk.json with axios elasticsearch

I am trying to make bulk post with elasticsearch and axios ,
I have a problem with post request :
axios.put('http://localhost:9200/indexrandom881', {'products-bulk.json'
});
For adding and deleting index it work :
Deleting and index
axios.delete('http://localhost:9200/indexrandom'+x, {
});
Adding an index
axios.put('http://localhost:9200/indexrandom881'+x, {
});
Please do anyone has an idea.
In brief I need this command in axios form
curl -H "Content-Type: application/x-ndjson" -XPOST http://localhost:9200/products/_bulk --data-binary "#products-bulk.json"
Thanks
Thanks #Joe Sorocin but that working only in node js , I need to implement it in react , in react it shows error fs.readfile is not a function
the full essay is :
File : App.js
function App() {
const axios = require('axios');
const fs = require('fs');
const filePath = __dirname + '/national-es-bulk-index22.json';
const indexName = 'indexrandom881';
const url = `http://localhost:9200/${indexName}/_bulk`;
fs.readFile(filePath, async (err, jsonData) => {
if (err) {
console.error({ err });
return;
}
const { data } = await axios.post(url, jsonData, {
headers: {
'Content-Type': 'application/x-ndjson'
}
});
console.info({ data });
});
return (
<div className="App">
<h1>test</h1>
</div>
);
}
export default App;
Use post instead of put. Also, you'll need to first read the file using fs before you pass it along to Elasticsearch with the application/x-ndjson header:
const axios = require('axios');
const fs = require('fs');
const filePath = __dirname + '/products-bulk.json';
const indexName = 'indexrandom881';
const url = `http://localhost:9200/${indexName}/_bulk`;
fs.readFile(filePath, async (err, jsonData) => {
if (err) {
console.error({ err });
return;
}
const { data } = await axios.post(url, jsonData, {
headers: {
'Content-Type': 'application/x-ndjson'
}
});
console.info({ data });
});

gapi.client.drive.files.create does not work

I'm writing a vue app. I read this sample code and wrote code like this:
const apiKey = 'mykey';
const discoveryDocs = ["https://www.googleapis.com/discovery/v1/apis/drive/v3/rest"]
const clientId = 'myclientid'
const scopes = 'https://www.googleapis.com/auth/drive.appdata'
function handleClientLoad() {
gapi.load('client:auth2', initClient);
}
function initClient() {
gapi.client.init({
apiKey,
discoveryDocs,
clientId,
scope: scopes
}).then(function () {
createFile()
});
}
function createFile() {
console.log('createFile')
var fileMetadata = {
'name': 'config.json',
'parents': ['appDataFolder']
};
var media = {
mimeType: 'application/json',
body: "body"
};
gapi.client.drive.files.create({
resource: fileMetadata,
media,
fields: 'id'
}, function (err, file) {
console.log('function in createFile')
if (err) {
console.error(err);
} else {
console.log('Folder Id:', file.id);
}
});
}
window.onload=handleClientLoad()
In the console, 'createFile' is logged but 'function in createFile' is not logged, so I think function(err, file)... does not work.
What is wrong?
I want the sample code to work.
I had the same issue. The function create() returns a promise, to execute the request, it seems to need a then(). See also this post.
The example code though does not work since you will get a 403 The user does not have sufficient permissions for this file error. This seems to happen since example code will create the file not in appDataFolder but in the root directory.
I managed to get it to work using the following code. Putting all request parameters flat into the object passed to create() seems to do the trick.
const s = new Readable();
s.push("beep"); // the string you want
s.push(null);
gapi.client.drive.files
.create({
name: "config.json",
parents: ["appDataFolder"],
mimeType: "application/json",
upload_type: "media",
fields: "id",
body: s,
})
.then(function (response) {
if (response.status === 200) {
var file = response.result;
console.log(file);
}
})
.catch(function (error) {
console.error(error);
});

How to take a photo and upload it to the server with Nativecript-camera

I am new to Nativescript Vue development, I am trying to take a photo and send it to the server. My code works fine on Android, but when I run on iOS, errors occur, the image doesn’t even paste onto the page and doesn’t upload to the server.
import * as camera from "nativescript-camera";
import * as bghttp from "nativescript-background-http";
const firebase = require("nativescript-plugin-firebase");
var session = bghttp.session("image-upload");
takePicture() {
camera.requestPermissions()
.then(() => {
camera.takePicture({ width: 300, height: 300, keepAspectRatio: true, saveToGallery:true })
.then(imageAsset => {
this.img = imageAsset.android;
})
.catch(e => {
console.log('error:', e);
});
})
.catch(e => {
console.log('Error requesting permission');
});
}
upload() {
var file = this.img;
var url = "https://bocorp.ru/assets/mobileNewOrder.php";
var name = file.substr(file.lastIndexOf("/") + 1);
// upload configuration
var bghttp = require("nativescript-background-http");
var session = bghttp.session("image-upload");
var request = {
url: url,
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"File-Name": name,
},
content: JSON.stringify({
Title: title
}),
description: "Uploading " + name
};
var task = session.uploadFile(file, request);
I understand that another code should be used in "this.img = imageAsset.android;" but I don’t understand how can I get a photo from the Iphone camera. I will be glad to any prompt
We save our images to the device, and then upload later as a multipart upload. You might be able to skip the file saving part, but it does allow us to keep from reading in the entire image for uploading later in our app flow (I guess if you already have the image source for display you could reuse it for upload on the same page).
Hope you find this helpful.
const imageSource = require('tns-core-modules/image-source')
// ...
camera.takePicture(cameraOpts)
.then(imageAsset => {
return imageSource.fromAsset(imageAsset)
})
.then(imageSource => {
let pathDest = '/path/on/device' // you define
console.log(`Created image source with width=${imageSource.width} height=${imageSource.height} at ${pathDest}`)
imageSource.saveToFile(pathDest, 'jpg', 50)
return pathDest // save this to look up later
})
Then when we need to upload
const mime = require('mime-types')
import * as bghttp from 'nativescript-background-http'
...
let session = bghttp.session('image-upload')
let request = {
url: 'https://yourendpoint.com/here',
method: 'POST',
androidAutoDeleteAfterUpload: true,
headers: {
'Content-Type': 'application/octet-stream',
}
}
// photoPath is known somehow. We use Vuex, but somehow it makes it to this page
let params = [
{ name: 'photo1', filename: photoPath, mimeType: mime.lookup(photoPath) }
]
return new Promise((resolve, reject) => {
let task = session.multipartUpload(params, request)
task.on('error', (e) => {
reject(e)
})
task.on('complete', res => {
resolve()
})
})

How I can encode with Protobuf a Cypress stub response?

I have some fixtures to stub a server that encode the messages with protobuf (I'm using protobufjs). I'd like to have the fixtures decoded to easily manipulate them and let Cypress encode the stub body before sending the response to the client, how can I do it?
[UPDATE] it's now available as a Cypress plugin
That's my solution:
cypress/plugins/protobufjs/index.js file (where the protobuf definitions are imported)
const path = require("path");
const protobufjs = require("protobufjs");
const definition = path.join(__dirname, "../../../public/escrow/ui.proto");
const proto = protobufjs.loadSync(definition);
module.exports = {
Status: proto.lookupType("escrow.Status"),
};
cypress/plugins/index.js file (where the encoding happens with a custom Cypress task)
const { StringDecoder } = require("string_decoder");
const Messages = require("./protobufjs");
module.exports = on => {
on("task", {
protobufEncode: ({ data, encoderName }) => {
const decoder = new StringDecoder("utf8");
const bufferValue = Messages[encoderName].encode(data).finish();
return decoder.end(Buffer.from(bufferValue));
}
});
};
in your test
cy.fixture("YOUR_FIXTURE.json").then(async json => {
cy.task("protobufEncode", { encoderName: "Status", data: json }).then(result => {
cy.route({
headers: {
"content-type": "application/octet-stream"
},
method: "GET",
response: result,
status: 200,
url: `**/YOUR_URL`
}).as("route_status_one_usb_key");
});
});

redux saga ajax call - not working as expected

I have this redux saga code where everything works okay...until the promise, after that things start to go wrong
here's the relevant code
const firstApiRequest = ()=>{
return $.ajax({
url: myUrl,// ,
type:'POST',
headers: {
"Accept":"application/json",
"Content-Type":"application/json",
},
data:JSON.stringify(bodyData),
success:function(res){
console.log(res);
return res;
}
})
};
export function *startCheckout() {
try {
yield put(showLoading());
const data = yield call(firstApiRequest);//const data ends
yield put({type:FIRST_REQUEST_DONE,payload:data});
} catch (err) {
yield put(firstRequestFail(err));
}
}
export function *checkout() {
yield takeEvery(SEND_FIRST_REQUEST, startCheckout);
}
The problem is that after the return res in firstApiRequest , I wanted to use the data to send the FIRST_REQUEST_DONE action , but what happens is that the flow goes to FIRST_REQUEST_FAIL and shows error as true.
The problem is that the api response is coming back successfully and I am getting the data inside the error when the flow goes to FIRST_REQUEST_FAIL part of reducer and data shows up as error.
here's the code for reducer
where flow goes to
case 'FIRST_REQUEST_FAIL':
return {
loading: false,
error: true,
errorMessage: action.err,
};
instead of going to
case 'FIRST_REQUEST_DONE':
return {
id: action.id,
};
so, what's wrong with the code here? why does it show error even after a succesful response from server?
You shouldn't be defining the success in your api request.
$.ajax will return a promise on its own:
const firstApiRequest = () => (
$.ajax({
url: myUrl,// ,
type:'POST',
headers:{
"Accept":"application/json",
"Content-Type":"application/json",
},
data:JSON.stringify(bodyData),
}));
Also, why are you using jQuery for making the API requests? I'd suggest using axios or fetch
Here is an approach to handle API request using redux-saga:
First create a request helper
import 'whatwg-fetch';
function parseJSON(response) {
return response.json ? response.json() : response;
}
/**
* Checks if a network request came back fine, and throws an error if
not
*
* #param {object} response A response from a network request
*
* #return {object|undefined} Returns either the response, or throws an
* error
*/
function checkStatus(response, checkToken = true) {
if (response.status >= 200 && response.status < 300) {
return response;
}
return parseJSON(response).then(responseFormatted => {
const error = new Error(response.statusText);
error.response = response;
error.response.payload = responseFormatted;
throw error;
});
}
/**
* Requests a URL, returning a promise
*
* #param {string} url The URL we want to request
* #param {object} [options] The options we want to pass to "fetch"
*
* #return {object} The response data
*/
export default function request(url, options = {}) {
// Set headers
if (!options.headers) {
options.headers = Object.assign({
'Content-Type': 'application/json',
}, options.headers, {});
}
// Stringify body object
if (options && options.body) {
options.body = JSON.stringify(options.body);
}
return fetch(url, options)
.then(checkStatus)
.then(parseJSON)
}
In your saga
import { call, fork, put, takeLatest } from 'redux-saga/effects';
import request from 'utils/request';
import { submitSuccess, submitError } from './actions'; // path
to your actions.
import { SUBMIT } from './constants'; // The event you're listening
export function* submitData(action) {
try {
const response = yield call(request, 'your_url', { method: 'POST', body: action.body });
yield put(submitSuccess(response));
} catch(err) {
yield put(submitError(response.payload.message);
}
}
export function* defaultSaga() {
yield fork(takeLatest, SUBMIT, submitData);
}
export default defaultSaga;
Reducer
const initialState = fromJS({
submitSuccess: false,
submitReponse: '',
errorMessage: '',
});
function fooReducer(state = initialState, action) {
switch (action.type) {
case SUBMIT_SUCCESS:
return state
.update('submitSuccess', () => true)
.update('submitResponse', () => action.response);
case SUBMIT_ERROR:
return state.update('errorMessage', () => action.errorMessage);
//...
}
}
With this structure you should be able to catch your success and you error when you're making your request.

Resources