NodeJS Web App File Upload Chops Off Beginning Of File - ajax

I'm working on a project in NodeJS which involves file upload. The upload is done on the client side with the code:
$('#file-upload').bind('change focus click', function() {
var file = jQuery(this)[0].files[0];
if (file && file.fileName) {
var xhr = new XMLHttpRequest();
xhr.upload.addEventListener('progress', onProgressHandler, false);
xhr.upload.addEventListener('load', transferComplete, false);
xhr.open('POST', '/upload', true);
xhr.setRequestHeader('X-Requested-With', 'XMLHttpRequest');
xhr.setRequestHeader('X-File-Name', encodeURIComponent(file.fileName));
xhr.setRequestHeader('Content-Type', 'application/octet-stream');
xhr.send(file);
function onProgressHandler(evt) {
var percentage = event.loaded/event.total*100;
console.log(percentage);
}
function transferComplete(evt) {
console.log('Done');
}
}
});
And on the server-side, I use:
app.post('/upload', function(req, res, next) {
if(req.xhr) {
console.log('Uploading...');
var fName = req.header('x-file-name');
var fSize = req.header('x-file-size');
var fType = req.header('x-file-type');
var ws = fs.createWriteStream('./'+fName)
req.on('data', function(data) {
console.log('DATA');
ws.write(data);
});
req.on('end', function() {
console.log('All Done!!!!');
});
}
});
This code does work alone, but when combined with the rest of my much larger project, it seems to chop of the beginning of large files, and ignore small files all together. If I upload a small file, the console.log('DATA') never fires and it does fire for large files, but not for the beginning of the file. I believe for some reason it is sending the file early and by the time my function picks it up the beginning (or in the case of a small file, the entire thing) has already sent. I don't know what would be causing this, though.
Thanks!

I figured it out. There was so much logic between my route being defined and the actual file upload code running that it wasn't ready listening for the file.

I am having this exact same problem. It bothers me that having too much logic between the request and the on('data') event is the problem. I"m testing with a local server, and the amount of logic between the start of the request and registering the on data event is negligible. But the fact that I don't need to cross the internet to do my upload is making this problem that much worse? Are you still experiencing this issue?

Related

a file upload progress bar with node (socket.io and formidable) and ajax

I was in the middle of teaching myself some Ajax, and this lesson required building a simple file upload form locally. I'm running XAMPP on windows 7, with a virtual host set up for http://test. The solution in the book was to use node and an almost unknown package called "multipart" which was supposed to parse the form data but was crapping out on me.
I looked for the best package for the job, and that seems to be formidable. It does the trick and my file will upload locally and I get all the details back through Ajax. BUT, it won't play nice with the simple JS code from the book which was to display the upload progress in a progress element. SO, I looked around and people suggested using socket.io to emit the progress info back to the client page.
I've managed to get formidable working locally, and I've managed to get socket.io working with some basic tutorials. Now, I can't for the life of me get them to work together. I can't even get a simple console log message to be sent back to my page from socket.io while formidable does its thing.
First, here is the file upload form by itself. The script inside the upload.html page:
document.getElementById("submit").onclick = handleButtonPress;
var httpRequest;
function handleResponse() {
if (httpRequest.readyState == 4 && httpRequest.status == 200) {
document.getElementById("results").innerHTML = httpRequest.responseText;
}
}
function handleButtonPress(e) {
e.preventDefault();
var form = document.getElementById("myform");
var formData = new FormData(form);
httpRequest = new XMLHttpRequest();
httpRequest.onreadystatechange = handleResponse;
httpRequest.open("POST", form.action);
httpRequest.send(formData);
}
And here's the corresponding node script (the important part being form.on('progress')
var http = require('http'),
util = require('util'),
formidable = require('formidable');
http.createServer(function(req, res) {
if (req.url == '/upload' && req.method.toLowerCase() == 'post') {
var form = new formidable.IncomingForm(),
files = [],
fields = [];
form.uploadDir = './files/';
form.keepExtensions = true;
form
.on('progress', function(bytesReceived, bytesExpected) {
console.log('Progress so far: '+(bytesReceived / bytesExpected * 100).toFixed(0)+"%");
})
.on('file', function(name, file) {
files.push([name, file]);
})
.on('error', function(err) {
console.log('ERROR!');
res.end();
})
.on('end', function() {
console.log('-> upload done');
res.writeHead(200, "OK", {
"Content-Type": "text/html", "Access-Control-Allow-Origin": "http://test"
});
res.end('received files: '+util.inspect(files));
});
form.parse(req);
} else {
res.writeHead(404, {'content-type': 'text/plain'});
res.end('404');
}
return;
}).listen(8080);
console.log('listening');
Ok, so that all works as expected. Now here's the simplest socket.io script which I'm hoping to infuse into the previous two to emit the progress info back to my page. Here's the client-side code:
var socket = io.connect('http://test:8080');
socket.on('news', function(data){
console.log('server sent news:', data);
});
And here's the server-side node script:
var http = require('http'),
fs = require('fs');
var server = http.createServer(function(req, res) {
fs.createReadStream('./socket.html').pipe(res);
});
var io = require('socket.io').listen(server);
io.sockets.on('connection', function(socket) {
socket.emit('news', {hello: "world"});
});
server.listen(8080);
So this works fine by itself, but my problem comes when I try to place the socket.io code inside my form.... I've tried placing it anywhere it might remotely make sense, i've tried the asynchronous mode of fs.readFile too, but it just wont send anything back to the client - meanwhile the file upload portion still works fine. Do I need to establish some sort of handshake between the two packages? Help me out here. I'm a front-end guy so I'm not too familiar with this back-end stuff. I'll put this aside for now and move onto other lessons.
Maybe you can create a room for one single client and then broadcast the percentage to this room.
I explained it here: How to connect formidable file upload to socket.io in Node.js

Node: Download raw bytes of jpeg without piping output

Here is what I'm trying to do:
Retrieve raw data of an image (jpeg) from a URL given to me by an API
Pass the raw data or buffer to a function that uploads it to another server
NEVER PIPE THE IMAGE TO THE DISK
I've followed every example I can find (that doesn't pipe to disk), but still the content comes out corrupted. I have tried forcing various "accept-encodings" (gzip, deflate) but they basically resolve to the same data, just compressed.
I believe this has something to do with the response encoding rather than how I am asking for the data.
Here's the code so far:
var parsedUrl = require('url').parse(PATH_TO_IMAGE)
var params = {
hostname: parsedUrl.hostname,
path: parsedUrl.path,
}
return http.get(params, function(photo_res) {
var photoData = '';
res.setEncoding('binary');
photo_res.on('data', function(chunk) {
photoData += chunk;
});
photo_res.on('end', function() {
// DO STUFF TO UPLOAD IMAGE
});
photo_res.on('error', function(err) {
console.error('Unable to download photo:', err);
return done(err);
});
});
You have a simple typographic error which may be causing Node to interpret your data stream with the incorrect type. Your error is in this line:
res.setEncoding('binary');
To avoid confusion you should keep the response variable named res, and since your data is in binary format, it might be better to keep it as a buffer.
http.get(options, function(res) {
var photoData = [];
res.setEncoding('binary');
res.on('data', function(chunk) {
photoData.push(chunk);
});
res.on('end', function() {
var photo = Buffer.concat(photoData);
});
res.on('error', function(err) {
console.error('Unable to download photo:', err);
});
});
In the example, I store all chunks of data into an array, then use Buffer.concat() to create a single buffer. It is better this way because you were originally appending your image's data onto a string, which may have cause the corruption.

Clear IE cache when using AJAX without a cache busting querystring, but using http response header

I'm having the classic IE-caches-everything-in-Ajax issue. I have a bit of data that refreshes every minute.
Having researched the forums the solutions boil down to these options (http://stackoverflow.com/questions/5997857/grails-best-way-to-send-cache-headers-with-every-ajax-call):
add a cache-busting token to the query string (like ?time=[timestamp])
send a HTTP response header that specifically forbids IE to cache the request
use an ajax POST instead of a GET
Unfortunately the obvious querysting or "cache: false" setting will not work for me as the updated data file is hosted on Akamai Netstorage and cannot accept querystrings. I don't want to use POST either.
What I want to do is try send an HTTP response header that specifically forbids IE to cache the request or if anyone else knows another cache busting solution??
Does anyone know how this might be done? Any help would be much appreciated.
Here is my code:
(function ($) {
var timer = 0;
var Browser = {
Version: function () {
var version = 999;
if (navigator.appVersion.indexOf("MSIE") != -1) version = parseFloat(navigator.appVersion.split("MSIE")[1]);
return version;
}
}
$.fn.serviceboard = function (options) {
var settings = { "refresh": 60};
return this.each(function () {
if (options) $.extend(settings, options);
var obj = $(this);
GetLatesData(obj, settings.refresh);
if (settings.refresh > 9 && Browser.Version() > 6) {
timer = setInterval(function () { GetLatestData(obj, settings.refresh) }, settings.refresh * 1000);
}
});
};
function GetLatestData(obj, refresh) {
var _url = "/path/updated-data.htm";
$.ajax({
url: _url,
dataType: "html",
complete: function () {},
success: function (data) {
obj.empty().append(data);
}
}
});
}
})(jQuery);
Add a random number to the GET request so that IE will not identify it as "the same" in its cache. This number could be a timestamp:
new Date().getTime()
EDIT perhaps make the requested url:
var _url = "/path/updated-data.htm?" + new Date().getTime()
This shouldn't cause any errors I believe.
EDIT2 Sorry I just read your post a bit better and saw that this is not an option for you.
You say "is hosted on Akamai and cannot accept querystrings" but why not?
I've never heard of a page that won't accept an additional: "?blabla", even when it's html.
This was driving me crazy. I tried many cache busting techniques and setting cache headers. So many of these either did not work or were wild goose chases. The only solution I found which tested to work correctly was setting:
Header Pragma: no-cache
I hope it saves others with IE headaches.

Fastest way to check for existence of a file in NodeJs

I'm building a super simple server in node and in my onRequest listener I'm trying to determine if I should serve a static file (off the disk) or some json (probably pulled from mongo) based on the path in request.url.
Currently I'm trying to stat the file first (because I use mtime elsewhere) and if that doesn't fail then I read the contents from disk. Something like this:
fs.stat(request.url.pathname, function(err, stat) {
if (!err) {
fs.readFile(request.url.pathname, function( err, contents) {
//serve file
});
}else {
//either pull data from mongo or serve 404 error
}
});
Other than cacheing the result of fs.stat for the request.url.pathname, is there something that could speed this check up? For example, would it be just as fast to see if fs.readFile errors out instead of the stat? Or using fs.createReadStream instead of fs.readFile? Or could I potentially check for the file using something in child_process.spawn? Basically I just want to make sure I'm not spending any extra time messing w/ fileio when the request should be sent to mongo for data...
Thanks!
var fs = require('fs');
fs.exists(file, function(exists) {
if (exists) {
// serve file
} else {
// mongodb
}
});
I don't think you should be worrying about that, but rather how can you improve the caching mechanism. fs.stat is really ok for file checking, doing that in another child process would probably slow you down rather then help you here.
Connect implemented the staticCache() middleware a few months ago, as described in this blog post: http://tjholowaychuk.com/post/9682643240/connect-1-7-0-fast-static-file-memory-cache-and-more
A Least-Recently-Used (LRU) cache algo is implemented through the
Cache object, simply rotating cache objects as they are hit. This
means that increasingly popular objects maintain their positions while
others get shoved out of the stack and garbage collected.
Other resources:
http://senchalabs.github.com/connect/middleware-staticCache.html
The source code for staticCache
this snippet can help you
fs = require('fs') ;
var path = 'sth' ;
fs.stat(path, function(err, stat) {
if (err) {
if ('ENOENT' == err.code) {
//file did'nt exist so for example send 404 to client
} else {
//it is a server error so for example send 500 to client
}
} else {
//every thing was ok so for example you can read it and send it to client
}
} );
In case you want to serve a file using express, I would recommend to just use the sendFile error Handler of express
const app = require("express")();
const options = {};
options.root = process.cwd();
var sendFiles = function(res, files) {
res.sendFile(files.shift(), options, function(err) {
if (err) {
console.log(err);
console.log(files);
if(files.length === 0) {
res.status(err.status).end();
} else {
sendFiles(res, files)
}
} else {
console.log("Image Sent");
}
});
};
app.get("/getPictures", function(req, res, next) {
const files = [
"file-does-not-exist.jpg",
"file-does-not-exist-also.jpg",
"file-exists.jpg",
"file-does-not-exist.jpg"
];
sendFiles(res, files);
});
app.listen(8080);
If the file is not existent then it will go to the error that sends it self.
I made a github repo here https://github.com/dmastag/ex_fs/blob/master/index.js

NodeJS unable to response.write to the browser when there is delay in callbacks

i'm using simple MVC structure by Nathan Broslawsky. i have these code below.
ArticleProviderDBController.prototype.Show = function(data) {
//Init Model
var res = this.Response;
var model = this.getModel();
var view = this.getView("ArticleProviderDB");
model.findAll(function(error, article_collections){
if( error ) console.log(error);
view.renderGH(res, data, article_collections); //this will actually call the renderGH function to serve a html file with data from DB but it is not working.
res.write('inside callback'); //this will not.
//res.end();
});
//console.log(_self.Response);
res.write('outside callback'); //this will be shown on my browser.
//res.end();
}
actually i try to follow what people have done using expressjs
app.get('/', function(req, res){
articleProvider.findAll( function(error,docs){
res.render('index.jade', {
locals: {
title: 'Blog',
articles:docs
}
});
})
});
but seems like it is not working.
i also saw a post NodeJS response.write not working within callback posted recently but his solution is not working for me. My main objective is to use simple MVC structure created with Nodejs without the use of other templates such as expressjs to serve html with DB query. thank you.

Resources