MEAN : Serve File from GridFs - mean-stack

I am using MEAN stack and I am trying to serve a file stored using Mongo GridFs.
This is the code I have tried so far.
exports.getFile = function(req, res) {
var fileId = req.params.fileId;
gfs.findOne({ _id: fileId }, function (err, file) {
if (err) { res.status(500).send(err) }
console.log('file meta', file);
res.header("Content-Type", file.contentType);
var readstream = gfs.createReadStream({
filename: file.filename
});
//error handling, e.g. file does not exist
readstream.on('error', function (err) {
console.log('An error occurred!', err);
throw err;
});
readstream.on('close', function(file) {
console.log('file content', file);
});
readstream.pipe(res);
});
};
This is what I get on console:
file meta { _id: 566c0b6ddfb625ae52ce99c7,
filename: 'profile_picture_by_kyo_tux-d4hrimy.png',
contentType: 'binary/octet-stream',
length: 0,
chunkSize: 261120,
uploadDate: Sat Dec 12 2015 17:26:29 GMT+0530 (IST),
aliases: null,
metadata: null,
md5: 'd41d8cd98f00b204e9800998ecf8427e' }
file content undefined
GET /api/users/file/566c0b6ddfb625ae52ce99c7 200 9ms
I am not getting the file on webpage.
What am I doing wrong?

Related

async await , nodejs readfile and then do other

I tried to read file through async/await and update the credentials , But i got response HI first and then hello in console.And Credentials also not updated in oauthClient2.
const getFile = async (req, res, next) => {
await fs.readFile(TOKEN_PATH, (err, token) => {
if (err) return null;
console.log("hello")
console.log(JSON.parse(token));
oauth2Client.credentials = JSON.parse(token);
});
console.log("HI")
var service = google.drive({
version: 'v3',
encoding: null
});
console.log(oauth2Client);
await service.files.get({
auth: oauth2Client,
fileId: "1ZR8kkvb2JYVxcUjmlgfBJD2IYnisaiFn",
alt: 'media'
}, function(err, response) {
if (err) {
console.log('The API returned an error: ' + err);
return;
}
responder(res)(null,response);
});
}
Is there a way in which everything should run in a order?
Thank You.
The reason you are experiencing the error you see, is because the method you are running is asynchronous. You should instead use the alternative synchronous version:
let token;
try {
token = fs.readFileSync(TOKEN_PATH, 'utf8');
} catch (err) {
console.error(err)
}
if(!token){ return; }
oauth2Client.credentials = token;
const service = google.drive({
version: 'v3',
encoding: null
});
await service.files.get({
auth: oauth2Client,
fileId: "1ZR8kkvb2JYVxcUjmlgfBJD2IYnisaiFn",
alt: 'media'
}, function(err, response) {
if (err) {
console.log('The API returned an error: ' + err);
return;
}
responder(res)(null,response);
});
Ref: https://nodejs.dev/learn/reading-files-with-nodejs

gapi.client.drive.files.create does not work

I'm writing a vue app. I read this sample code and wrote code like this:
const apiKey = 'mykey';
const discoveryDocs = ["https://www.googleapis.com/discovery/v1/apis/drive/v3/rest"]
const clientId = 'myclientid'
const scopes = 'https://www.googleapis.com/auth/drive.appdata'
function handleClientLoad() {
gapi.load('client:auth2', initClient);
}
function initClient() {
gapi.client.init({
apiKey,
discoveryDocs,
clientId,
scope: scopes
}).then(function () {
createFile()
});
}
function createFile() {
console.log('createFile')
var fileMetadata = {
'name': 'config.json',
'parents': ['appDataFolder']
};
var media = {
mimeType: 'application/json',
body: "body"
};
gapi.client.drive.files.create({
resource: fileMetadata,
media,
fields: 'id'
}, function (err, file) {
console.log('function in createFile')
if (err) {
console.error(err);
} else {
console.log('Folder Id:', file.id);
}
});
}
window.onload=handleClientLoad()
In the console, 'createFile' is logged but 'function in createFile' is not logged, so I think function(err, file)... does not work.
What is wrong?
I want the sample code to work.
I had the same issue. The function create() returns a promise, to execute the request, it seems to need a then(). See also this post.
The example code though does not work since you will get a 403 The user does not have sufficient permissions for this file error. This seems to happen since example code will create the file not in appDataFolder but in the root directory.
I managed to get it to work using the following code. Putting all request parameters flat into the object passed to create() seems to do the trick.
const s = new Readable();
s.push("beep"); // the string you want
s.push(null);
gapi.client.drive.files
.create({
name: "config.json",
parents: ["appDataFolder"],
mimeType: "application/json",
upload_type: "media",
fields: "id",
body: s,
})
.then(function (response) {
if (response.status === 200) {
var file = response.result;
console.log(file);
}
})
.catch(function (error) {
console.error(error);
});

What is the correct field to pass --staging-location parameter for a Dataflow job in Node.js?

I wonder if I've hit a bug. A wrote a Node.js piece of code to trigger a "GCS Text to PubSub" Dataflow. The function is triggered upon file upload into a GCS bucket.
But it never executes successfully: "textPayload: "problem running dataflow template, error was: { Error: Invalid JSON payload received. Unknown name "staging_location": Cannot find field." It is an issue with the syntax of I specify the staging location for the job. I have tried "staginglocation", "stagingLocation", etc...none of them have worked.
Here's my code. Thanks for your help.
var {google} = require('googleapis');
exports.moveDataFromGCStoPubSub = (event, callback) => {
const file = event.data;
const context = event.context;
console.log(`Event ${context.eventId}`);
console.log(` Event Type: ${context.eventType}`);
console.log(` Bucket: ${file.bucket}`);
console.log(` File: ${file.name}`);
console.log(` Metageneration: ${file.metageneration}`);
console.log(` Created: ${file.timeCreated}`);
console.log(` Updated: ${file.updated}`);
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
console.log(projectId);
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`gs://${file.bucket}/${file.name}`);
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`,
outputTopic: `projects/iot-fitness-198120/topics/MemberFitnessData`,
},
jobName: 'CStoPubSub',
gcsPath: 'gs://dataflow-templates/latest/GCS_Text_to_Cloud_PubSub',
stagingLocation: 'gs://fitnessanalytics-tmp/tmp'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
callback();
};
I don't think this is actually possible.
Looking at the documentation for the Dataflow API itself, there's nothing like a staging location in the parameter section, and the library you're using is basically a wrapper for this API.
I'm a bit surprised it changes the name of the parameter though.
So i finally got this to work. It was indeed a syntax issue in the parameters section. The code below works like a charm:
var {google} = require('googleapis');
exports.moveDataFromGCStoPubSub = (event, callback) => {
const file = event.data;
const context = event.context;
console.log(`Event ${context.eventId}`);
console.log(` Event Type: ${context.eventType}`);
console.log(` Bucket: ${file.bucket}`);
console.log(` File: ${file.name}`);
console.log(` Metageneration: ${file.metageneration}`);
console.log(` Created: ${file.timeCreated}`);
console.log(` Updated: ${file.updated}`);
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
console.log(projectId);
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`gs://${file.bucket}/${file.name}`);
dataflow.projects.templates.create({
gcsPath: 'gs://dataflow-templates/latest/GCS_Text_to_Cloud_PubSub',
projectId: projectId,
resource: {
parameters: {
inputFilePattern: `gs://${file.bucket}/${file.name}`,
outputTopic: 'projects/iot-fitness-198120/topics/MemberFitnessData2'
},
environment: {
tempLocation: 'gs://fitnessanalytics-tmp/tmp'
},
jobName: 'CStoPubSub',
//gcsPath: 'gs://dataflow-templates/latest/GCS_Text_to_Cloud_PubSub',
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
callback();
};

uploading profile pic in hapijs 17.0

I am using hapijs version 17.0.1. I am trying to upload an image using ajax request on a hapijs route. Here is my AJAX code to upload profile pic:
var image_file_input = document.getElementById("user_profile_upload");
image_file_input.onchange = function () {
if(this.files != undefined)
{
if(this.files[0] != undefined)
{
var formData = tests.formdata ? new FormData() : null;
if (tests.formdata)
{
//alert(file)
formData.append('image_file', this.files[0]);
formData.append('userId', user_id);
formData.append('memberId', member_id);
}
$.ajax({
url: "/v1/User/uploadUserPic",
data: formData,
type: "POST",
dataType: "json",
contentType: false,
processData: false,
contentType: "multipart/form-data",
success: function(data){
console.log(data);
var errMsg = null;
var resData = null;
if(data.statusCode == 200)
{
resData = data.result;
}
else
{
alert(data.message)
}
},
error: function(error){
alert(error);
}
});
}
}
}
And here is my Hapijs route Code:
var uploadUserPic = {
method: 'POST',
path: '/v1/Module/uploadUserPic',
config: {
description: 'Update Image For User',
tags: ['api', 'User'],
auth: 'session',
payload: {
output: 'stream',
parse: true,
allow: 'multipart/form-data'
},
validate: {
payload: {
userId : Joi.string().regex(/^[a-f\d]{24}$/i).required(),
memberId: Joi.string().required(),
image_file: Joi.object().required(),
},
failAction: FailCallBack
}
},
handler: function (request, reply) {
var resultData = null;
var error = null;
return new Promise(function (resolve) {
var multiparty = require('multiparty');
var fs = require('fs');
var form = new multiparty.Form();
form.parse(request.payload, function (err, fields, files) {
if(err)
{
error = err;
resolve();
}
else
{
var mkdirp = require('mkdirp');
var img_dir = "./files/users/";
mkdirp(img_dir, function (err) {
if (err)
{
error = err;
console.error(err);
resolve();
}
else
{
var oldpath = files.image_file.path;
var newpath = "./files/users/"+requestPayload.userId+".png";
fs.rename(oldpath, newpath, function (err) {
if(err)
{
error = err;
}
resolve();
});
}
});
}
});
}).then(function (err, result) {
if(err) return sendError(err);
if(error) return sendError(error)
return {
"statusCode": 200,
"success": true
};
});
}
}
The above code gives me following error cannot read property 'content-length' of undefined on line form.parse(request.payload, function (err, fields, files) {});
Please let me know If I am doing something wrong. If I replace the url in ajax request with anohter url that I have written in php then it works perfectly. which means that something is wrong with my hapijs/nodejs code.
There's a good post on how to handle file uploads in Hapi.js (written in version 16) https://scotch.io/bar-talk/handling-file-uploads-with-hapi-js
Since you are using payload.parse = true, I am not seeing a particular reason why you have to use multiparty. I have the following working code that would save files (of any type) uploaded from client into uploads directory on the server (Please do not use directly on production as no sanitation is done)
{
path: '/upload',
method: 'POST',
config: {
payload: {
output: 'stream',
parse: true,
allow: 'multipart/form-data'
},
validate: {
payload: {
files: Joi.array().single()
}
}
},
handler: function(request) {
const p = request.payload, files = p.files
if(files) {
console.log(`${files.length} files`)
files.forEach(async file => {
const filename= file.hapi.filename
console.log(`Saving ${filename} to ./uploads`)
const out = fs.createWriteStream(`./uploads/${filename}`)
await file.pipe(out)
})
}
return {result: 'ok'}
}
}
You can use the following curl command to test
curl http://localhost:8080/upload -F 'files=#/path/to/a/note.txt' -F 'files=#/path/to/test.png' -vvv
There are a few issues with your code. First in your $.ajax call, you have specified contentType twice, although it's not a syntax error but it's careless to code like that. Second the function's signature inside your .then() block is incorrect. You are mixing the idea of Promise and callback. I don't think the following line will be triggered
if(err) return sendError(err);
One last trivial thing, you said you are using Hapi 17 but based on the handler function's signature
handler: function (request, reply) {
...
Seems you are not totally onboard with Hapi17 as the new signature is
handler: function (request, h) {
And it's not just the rename of reply to h.

MongoDB/Multer Query DB then upload File

I want to query a database and if result is found upload a file. At the moment it's the other way around. First the image is uploaded then the db is searched and updated.
A value I need, cid(req.body.cid), can only be accessed after upload() is called so that kinda complicates things, this is my current, working, code :
var multer = require('multer');
var upload = multer({
dest: './public/images/usercontent',
limits: {
files: 1,
fields: 1,
fileSize: 10000
},
fileFilter: function fileFilter(req, file, cb) {
if (file.mimetype !== 'image/png' && file.mimetype !== 'image/jpg' && file.mimetype !== 'image/jpeg') {
req.multerImageValidation = 'wrong type';
return cb(null, false);
}
return cb(null, true);
}
}).single('uploads[]');
router.post('/uploadimg', isLoggedIn, function(req, res, next) {
upload(req, res, function(err) {
if (err) {
if (err.code === 'LIMIT_FILE_SIZE') {
return res.send({
statusText: 'fileSize'
});
}
return res.send({
statusText: 'failed'
});
}
if (req.multerImageValidation) {
return res.send({
statusText: 'wrongType'
});
}
Company.findOneAndUpdate({
ownedBy: req.user.local.username,
_id: req.body.cid // value that is sent with the image
}, {
icon: 'images/usercontent/' + req.file.filename
}, function(err, result) {
if (err) {
return res.send({
statusText: 'failed'
});
}
if (!result) {
return res.send({
statusText: 'failed'
});
}
res.send({
statusText: 'success'
});
});
//
});
});
Why don't you use the success callback for the db query to upload image and then use the success callback of the upload image function to update the db record with cid? At a very high level, this would be:
query db for record
//if record found, in success callback
attempt to upload image
//if image uploaded successfully and cid generated, in success callback
update db record
You can use the mongo update operation to ensure atomic operation

Resources