How can I save a base64-encoded image to disk? - image

My Express app is receiving a base64-encoded PNG from the browser (generated from canvas with toDataURL() ) and writing it to a file. But the file isn't a valid image file, and the "file" utility simply identifies it as "data".
var body = req.rawBody,
base64Data = body.replace(/^data:image\/png;base64,/,""),
binaryData = new Buffer(base64Data, 'base64').toString('binary');
require("fs").writeFile("out.png", binaryData, "binary", function(err) {
console.log(err); // writes out file without error, but it's not a valid image
});

I think you are converting the data a bit more than you need to. Once you create the buffer with the proper encoding, you just need to write the buffer to the file.
var base64Data = req.rawBody.replace(/^data:image\/png;base64,/, "");
require("fs").writeFile("out.png", base64Data, 'base64', function(err) {
console.log(err);
});
new Buffer(..., 'base64') will convert the input string to a Buffer, which is just an array of bytes, by interpreting the input as a base64 encoded string. Then you can just write that byte array to the file.
Update
As mentioned in the comments, req.rawBody is no longer a thing. If you are using express/connect then you should use the bodyParser() middleware and use req.body, and if you are doing this using standard Node then you need to aggregate the incoming data event Buffer objects and do this image data parsing in the end callback.

this is my full solution which would read any base64 image format and save it in the proper format in the database:
// Save base64 image to disk
try
{
// Decoding base-64 image
// Source: http://stackoverflow.com/questions/20267939/nodejs-write-base64-image-file
function decodeBase64Image(dataString)
{
var matches = dataString.match(/^data:([A-Za-z-+\/]+);base64,(.+)$/);
var response = {};
if (matches.length !== 3)
{
return new Error('Invalid input string');
}
response.type = matches[1];
response.data = new Buffer(matches[2], 'base64');
return response;
}
// Regular expression for image type:
// This regular image extracts the "jpeg" from "image/jpeg"
var imageTypeRegularExpression = /\/(.*?)$/;
// Generate random string
var crypto = require('crypto');
var seed = crypto.randomBytes(20);
var uniqueSHA1String = crypto
.createHash('sha1')
.update(seed)
.digest('hex');
var base64Data = 'data:image/jpeg;base64,/9j/4AAQSkZJRgABAQEAZABkAAD/4Q3zaHR0cDovL25zLmFkb2JlLmN...';
var imageBuffer = decodeBase64Image(base64Data);
var userUploadedFeedMessagesLocation = '../img/upload/feed/';
var uniqueRandomImageName = 'image-' + uniqueSHA1String;
// This variable is actually an array which has 5 values,
// The [1] value is the real image extension
var imageTypeDetected = imageBuffer
.type
.match(imageTypeRegularExpression);
var userUploadedImagePath = userUploadedFeedMessagesLocation +
uniqueRandomImageName +
'.' +
imageTypeDetected[1];
// Save decoded binary image to disk
try
{
require('fs').writeFile(userUploadedImagePath, imageBuffer.data,
function()
{
console.log('DEBUG - feed:message: Saved to disk image attached by user:', userUploadedImagePath);
});
}
catch(error)
{
console.log('ERROR:', error);
}
}
catch(error)
{
console.log('ERROR:', error);
}

This did it for me simply and perfectly.
Excellent explanation by Scott Robinson
From image to base64 string
let buff = fs.readFileSync('stack-abuse-logo.png');
let base64data = buff.toString('base64');
From base64 string to image
let buff = Buffer.from(data, 'base64');
fs.writeFileSync('stack-abuse-logo-out.png', buff);

UPDATE
I found this interesting link how to solve your problem in PHP. I think you forgot to replace space by +as shown in the link.
I took this circle from http://images-mediawiki-sites.thefullwiki.org/04/1/7/5/6204600836255205.png as sample which looks like:
Next I put it through http://www.greywyvern.com/code/php/binary2base64 which returned me:
data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAAAAACPAi4CAAAAB3RJTUUH1QEHDxEhOnxCRgAAAAlwSFlzAAAK8AAACvABQqw0mAAAAXBJREFUeNrtV0FywzAIxJ3+K/pZyctKXqamji0htEik9qEHc3JkWC2LRPCS6Zh9HIy/AP4FwKf75iHEr6eU6Mt1WzIOFjFL7IFkYBx3zWBVkkeXAUCXwl1tvz2qdBLfJrzK7ixNUmVdTIAB8PMtxHgAsFNNkoExRKA+HocriOQAiC+1kShhACwSRGAEwPP96zYIoE8Pmph9qEWWKcCWRAfA/mkfJ0F6dSoA8KW3CRhn3ZHcW2is9VOsAgoqHblncAsyaCgcbqpUZQnWoGTcp/AnuwCoOUjhIvCvN59UBeoPZ/AYyLm3cWVAjxhpqREVaP0974iVwH51d4AVNaSC8TRNNYDQEFdlzDW9ob10YlvGQm0mQ+elSpcCCBtDgQD7cDFojdx7NIeHJkqi96cOGNkfZOroZsHtlPYoR7TOp3Vmfa5+49uoSSRyjfvc0A1kLx4KC6sNSeDieD1AWhrJLe0y+uy7b9GjP83l+m68AJ72AwSRPN5g7uwUAAAAAElFTkSuQmCC
saved this string to base64 which I read from in my code.
var fs = require('fs'),
data = fs.readFileSync('base64', 'utf8'),
base64Data,
binaryData;
base64Data = data.replace(/^data:image\/png;base64,/, "");
base64Data += base64Data.replace('+', ' ');
binaryData = new Buffer(base64Data, 'base64').toString('binary');
fs.writeFile("out.png", binaryData, "binary", function (err) {
console.log(err); // writes out file without error, but it's not a valid image
});
I get a circle back, but the funny thing is that the filesize has changed :)...
END
When you read back image I think you need to setup headers
Take for example imagepng from PHP page:
<?php
$im = imagecreatefrompng("test.png");
header('Content-Type: image/png');
imagepng($im);
imagedestroy($im);
?>
I think the second line header('Content-Type: image/png');, is important else your image will not be displayed in browser, but just a bunch of binary data is shown to browser.
In Express you would simply just use something like below. I am going to display your gravatar which is located at http://www.gravatar.com/avatar/cabf735ce7b8b4471ef46ea54f71832d?s=32&d=identicon&r=PG
and is a jpeg file when you curl --head http://www.gravatar.com/avatar/cabf735ce7b8b4471ef46ea54f71832d?s=32&d=identicon&r=PG. I only request headers because else curl will display a bunch of binary stuff(Google Chrome immediately goes to download) to console:
curl --head "http://www.gravatar.com/avatar/cabf735ce7b8b4471ef46ea54f71832d?s=32&d=identicon&r=PG"
HTTP/1.1 200 OK
Server: nginx
Date: Wed, 03 Aug 2011 12:11:25 GMT
Content-Type: image/jpeg
Connection: keep-alive
Last-Modified: Mon, 04 Oct 2010 11:54:22 GMT
Content-Disposition: inline; filename="cabf735ce7b8b4471ef46ea54f71832d.jpeg"
Access-Control-Allow-Origin: *
Content-Length: 1258
X-Varnish: 2356636561 2352219240
Via: 1.1 varnish
Expires: Wed, 03 Aug 2011 12:16:25 GMT
Cache-Control: max-age=300
Source-Age: 1482
$ mkdir -p ~/tmp/6922728
$ cd ~/tmp/6922728/
$ touch app.js
app.js
var app = require('express').createServer();
app.get('/', function (req, res) {
res.contentType('image/jpeg');
res.sendfile('cabf735ce7b8b4471ef46ea54f71832d?s=32&d=identicon&r=PG');
});
app.get('/binary', function (req, res) {
res.sendfile('cabf735ce7b8b4471ef46ea54f71832d?s=32&d=identicon&r=PG');
});
app.listen(3000);
$ wget "http://www.gravatar.com/avatar/cabf735ce7b8b4471ef46ea54f71832d?s=32&d=identicon&r=PG"
$ node app.js

I also had to save Base64 encoded images that are part of data URLs, so I ended up making a small npm module to do it in case I (or someone else) needed to do it again in the future. It's called ba64.
Simply put, it takes a data URL with a Base64 encoded image and saves the image to your file system. It can save synchronously or asynchronously. It also has two helper functions, one to get the file extension of the image, and the other to separate the Base64 encoding from the data: scheme prefix.
Here's an example:
var ba64 = require("ba64"),
data_url = "data:image/jpeg;base64,[Base64 encoded image goes here]";
// Save the image synchronously.
ba64.writeImageSync("myimage", data_url); // Saves myimage.jpeg.
// Or save the image asynchronously.
ba64.writeImage("myimage", data_url, function(err){
if (err) throw err;
console.log("Image saved successfully");
// do stuff
});
Install it: npm i ba64 -S. Repo is on GitHub: https://github.com/HarryStevens/ba64.
P.S. It occurred to me later that ba64 is probably a bad name for the module since people may assume it does Base64 encoding and decoding, which it doesn't (there are lots of modules that already do that). Oh well.

Below function to save files, just pass your base64 file, it return filename save it in DB.
import fs from 'fs';
const uuid = require('uuid/v1');
/*Download the base64 image in the server and returns the filename and path of image.*/
function saveImage(baseImage) {
/*path of the folder where your project is saved. (In my case i got it from config file, root path of project).*/
const uploadPath = "/home/documents/project";
//path of folder where you want to save the image.
const localPath = `${uploadPath}/uploads/images/`;
//Find extension of file
const ext = baseImage.substring(baseImage.indexOf("/")+1, baseImage.indexOf(";base64"));
const fileType = baseImage.substring("data:".length,baseImage.indexOf("/"));
//Forming regex to extract base64 data of file.
const regex = new RegExp(`^data:${fileType}\/${ext};base64,`, 'gi');
//Extract base64 data.
const base64Data = baseImage.replace(regex, "");
const filename = `${uuid()}.${ext}`;
//Check that if directory is present or not.
if(!fs.existsSync(`${uploadPath}/uploads/`)) {
fs.mkdirSync(`${uploadPath}/uploads/`);
}
if (!fs.existsSync(localPath)) {
fs.mkdirSync(localPath);
}
fs.writeFileSync(localPath+filename, base64Data, 'base64');
return filename;
}

You can use a third-party library like base64-img or base64-to-image.
base64-img
const base64Img = require('base64-img');
const data = 'data:image/png;base64,...';
const destpath = 'dir/to/save/image';
const filename = 'some-filename';
base64Img.img(data, destpath, filename, (err, filepath) => {}); // Asynchronous using
const filepath = base64Img.imgSync(data, destpath, filename); // Synchronous using
base64-to-image
const base64ToImage = require('base64-to-image');
const base64Str = 'data:image/png;base64,...';
const path = 'dir/to/save/image/'; // Add trailing slash
const optionalObj = { fileName: 'some-filename', type: 'png' };
const { imageType, fileName } = base64ToImage(base64Str, path, optionalObj); // Only synchronous using

Converting from file with base64 string to png image.
4 variants which works.
var {promisify} = require('util');
var fs = require("fs");
var readFile = promisify(fs.readFile)
var writeFile = promisify(fs.writeFile)
async function run () {
// variant 1
var d = await readFile('./1.txt', 'utf8')
await writeFile("./1.png", d, 'base64')
// variant 2
var d = await readFile('./2.txt', 'utf8')
var dd = new Buffer(d, 'base64')
await writeFile("./2.png", dd)
// variant 3
var d = await readFile('./3.txt')
await writeFile("./3.png", d.toString('utf8'), 'base64')
// variant 4
var d = await readFile('./4.txt')
var dd = new Buffer(d.toString('utf8'), 'base64')
await writeFile("./4.png", dd)
}
run();

Easy way to convert base64 image into file and save as some random id or name.
// to create some random id or name for your image name
const imgname = new Date().getTime().toString();
// to declare some path to store your converted image
const path = yourpath.png
// image takes from body which you uploaded
const imgdata = req.body.image;
// to convert base64 format into random filename
const base64Data = imgdata.replace(/^data:([A-Za-z-+/]+);base64,/, '');
fs.writeFile(path, base64Data, 'base64', (err) => {
console.log(err);
});
// assigning converted image into your database
req.body.coverImage = imgname

is very simple
const path = require('path');
const { readFile, stat, writeFile } = require("fs/promises");
(async () => {
try {
const contents = await readFile(path.join(__dirname, 'clau.jpg'), { encoding: 'base64' });
console.log(contents);
await writeFile(path.join(__dirname, 'claumia.jpg'), Buffer.from(contents, 'base64'));
} catch (error) {
console.log(error)
}
})()

Related

React Native base64 Image to Uint8ClampedArray

I had functionality to pick an image of QRcode from CameraRoll of Android and iOS in react-native and once the user had picked an image. I will use something like jsQR to decode that and validate if its a real qr code or not.
But on jsQR lib they said that they need to accept Uint8ClampedArray to decode the image and read the qr. So I already have a function to get the base64 image. But can't find on how to convert it properly to Uint8ClampedArray.
Here is my code below:
const handleImportScan = useCallback(async () => {
try {
const base64Image = await RNFS.readFile(
photos[selected].node.image.uri,
'base64',
);
console.log('base64img:', base64Image);
// First argument below should be a 'Uint8ClampedArray'
const code = jsQR(base64Image, width, height);
if (code) {
console.log('Found QR code', code);
}
} catch (error) {
console.log('err:', error);
}
}, [photos, selected]);
I'm trying to find a library or third-party to convert my base64 image to Uint8ClampedArray
Mostly I save the user qr generate images using PNG.
Appreciate it if someone could help.
Thanks
Note that base64data should be base64 encoded image not uri. (eg. without 'data:image/png;base64,', if you have uri)
const byteCharacters = atob(base64data);
const byteNumbers = new Array(byteCharacters.length);
for (let i = 0; i < byteCharacters.length; i++) {
byteNumbers[i] = byteCharacters.charCodeAt(i);
}
const byteArray = new Uint8ClampedArray(byteNumbers);

Drag an image from table cell to an iFrame droppable area

I have problems to drag an image from one iframe (1) out of a dynamic table to another iframe (2) within a droppable area. I think there is no permission issue, but a "type" issue. The drop area on iframe (2) is working with files from anywhere except form the iframe (1). The iframe (1) is hosted by localhost. The iframe (2) is hosted by a different domain.
Three different functions I found to convert data uri to file or blob were tested. The setData arguments also have been tested with all possibilities, no success by now.
Interestingly, having opened the site with the two iframes on Chrome and firefox, I am able to drag an drop from firefox to Chrome, but the image will be converted to bmp type and renamed! The other way around it is not working.
What are the right arguments for setData / get Data to drop an image
data uri on a drop area?
Do I have to convert data uri to a file
resp. blob object?
If so, again, what arguments will do the job?
Any help would be appreciated very much!
var dataUri;
var file;
function mouseDown(event){
toDataURL(event.target.src, function(dataUrl) {
console.log('RESULT:', dataUrl.replace('data:text/plain;base64,', ''));
dataUri = dataUrl.replace('data:text/plain;base64,', '');
file = urltoFile(dataUrl, 'hello.jpg','image/jpeg')
.then(function(file){ console.log(file);});
})
}
function drag(event) {
// event.stopPropagation();
console.log(file);
console.log(dataUri);
event.dataTransfer.setData("text/plain", file);
//event.dataTransfer.mozSetDataAt("application/x-moz-file", file, 0);
event.dataTransfer.effectAllowed = "move";
}
function drop(event) {
//event.preventDefault();
console.log(file);
console.log(dataUri);
var data = event.dataTransfer.getData("text/plain");
//var data = event.dataTransfer.mozGetDataAt("application/x-moz-file", 0);
event.target.appendChild(document.getElementById(data));
document.getElementsById(data);
console.log(data);
}
function allowDrop(event) {
event.preventDefault();
}
function dataURLtoFile(dataurl, filename) {
var arr = dataurl.split(','), mime = arr[0].match(/:(.*?);/)[1],
bstr = atob(arr[1]), n = bstr.length, u8arr = new Uint8Array(n);
while(n--){
u8arr[n] = bstr.charCodeAt(n);
}
return new File([u8arr], filename, {type:mime});
}
function urltoFile(url, filename, mimeType){
return (fetch(url)
.then(function(res){return res.arrayBuffer();})
.then(function(buf){return new File([buf], filename,{type:mimeType});})
);
}
function dataURItoBlob(dataURI) {
// convert base64 to raw binary data held in a string
// doesn't handle URLEncoded DataURIs - see SO answer #6850276 for code that does this
var byteString = atob(dataURI.split(',')[1]);
// separate out the mime component
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
var dw = new DataView(ab);
for(var i = 0; i < byteString.length; i++) {
dw.setUint8(i, byteString.charCodeAt(i));
}
// write the ArrayBuffer to a blob, and you're done
return new Blob([ab], {type: mimeString});
}

Parse Server S3 Adaptor saves image as .txt

I am using the S3 adaptor to save images to my S3 bucket. My client code is:
var saveFileToParse = function(imageData) {
var parseFile = new Parse.File(Parse.User.current().id + " Image", {
base64: imageData
});
$ionicLoading.show();
parseFile.save().then(function(response) {
if (angular.isNumber($scope.activeExercise.images[0])) {
$scope.activeExercise.images = [];
}
var imageUrl = response._url;
$scope.activeExercise.images.push(imageUrl);
$scope.$apply();
$scope.customImage = true;
$ionicLoading.hide();
}).then(function(imageUrl) {
var file = new Parse.Object("Files");
file.set("file", parseFile);
file.save().then(function(response) {
$ionicLoading.hide();
});
},
function(error) {
$ionicLoading.hide();
errorFactory.checkError(error);
});
};
The file is being saved, however is being saved as a .txt file, I expect because of the base 64, however this was never an issue on parse.com. Can I explicitly make this save as .jpg?
EDIT:
I am employed the code below and this gives a different filename and shows as an image in S3. However, when downloaded it still has a .txt file extension. I have even changed my save command to include the mime-type var parseFile = new Parse.File(name, file, "image/jpeg");
The issue here was caused by the old parse.com server automatically applying %20 when there was a space in the file URL.
Removing the space made it work.
Honestly, you just need to read the documentation thoroughly. The answer is already available there, so just pass the file object.
var fileUploadControl = $("#profilePhotoFileUpload")[0];
if (fileUploadControl.files.length > 0) {
var file = fileUploadControl.files[0];
var name = "photo.jpg";
var parseFile = new Parse.File(name, file);
parseFile.save().then(function () {
// The file has been saved to Parse.
}, function (error) {
// The file either could not be read, or could not be saved to Parse.
});
}

Node.js Base64 Image decoding and writing to file

I'm sending the contents of this Flex form (Don't ask why) over to node. There is a post paramteter called "photo" which is a base64 encoded image.
Contents of photo get sent over ok. Problem is when I am trying to decode the content and write them to a file.
var fs = require("fs");
fs.writeFile("arghhhh.jpg", new Buffer(request.body.photo, "base64").toString(), function(err) {});
I've tried toString("binary") as well. But it seems node doesnt decode all of the content. It seems it only decodes jpg header info and leaves the rest.
Can anyone please help me with this?
Thanks
Try removing the .toString() entirely and just write the buffer directly.
this is my full solution which would read any base64 image format, decode it and save it in the proper format in the database:
// Save base64 image to disk
try
{
// Decoding base-64 image
// Source: http://stackoverflow.com/questions/20267939/nodejs-write-base64-image-file
function decodeBase64Image(dataString)
{
var matches = dataString.match(/^data:([A-Za-z-+\/]+);base64,(.+)$/);
var response = {};
if (matches.length !== 3)
{
return new Error('Invalid input string');
}
response.type = matches[1];
response.data = new Buffer(matches[2], 'base64');
return response;
}
// Regular expression for image type:
// This regular image extracts the "jpeg" from "image/jpeg"
var imageTypeRegularExpression = /\/(.*?)$/;
// Generate random string
var crypto = require('crypto');
var seed = crypto.randomBytes(20);
var uniqueSHA1String = crypto
.createHash('sha1')
.update(seed)
.digest('hex');
var base64Data = 'data:image/jpeg;base64,/9j/4AAQSkZJRgABAQEAZABkAAD/4Q3zaHR0cDovL25zLmFkb2JlLmN...';
var imageBuffer = decodeBase64Image(base64Data);
var userUploadedFeedMessagesLocation = '../img/upload/feed/';
var uniqueRandomImageName = 'image-' + uniqueSHA1String;
// This variable is actually an array which has 5 values,
// The [1] value is the real image extension
var imageTypeDetected = imageBuffer
.type
.match(imageTypeRegularExpression);
var userUploadedImagePath = userUploadedFeedMessagesLocation +
uniqueRandomImageName +
'.' +
imageTypeDetected[1];
// Save decoded binary image to disk
try
{
require('fs').writeFile(userUploadedImagePath, imageBuffer.data,
function()
{
console.log('DEBUG - feed:message: Saved to disk image attached by user:', userUploadedImagePath);
});
}
catch(error)
{
console.log('ERROR:', error);
}
}
catch(error)
{
console.log('ERROR:', error);
}
In nodejs 8.11.3 new Buffer(string, encoding) is deprecated, instead of this the new way to do that is Buffer.from(string, encoding) always without .toString().
For more details read the documentation in nodejs docs: Buffer
Remove .toString()
Here you decode the base64 to a buffer, which is fine, but then you convert the buffer into a string. This means that it is a string object whose code points are bytes of the buffer.

Node.js proxy, dealing with gzip DEcompression

I'm currently working on a proxy server where we in this case have to modify the data (by using regexp) that we push through it.
In most cases it works fine except for websites that use gzip as content-encoding (I think), I've come across a module called compress and tried to push the chunks that I receive through a decompress / gunzip stream but it isn't really turning out as I expected. (see below for code)
figured i'd post some code to support my prob, this is the proxy that gets loaded with mvc (express):
module.exports = {
index: function(request, response){
var iframe_url = "www.nu.nl"; // site with gzip encoding
var http = require('http');
var httpClient = http.createClient(80, iframe_url);
var headers = request.headers;
headers.host = iframe_url;
var remoteRequest = httpClient.request(request.method, request.url, headers);
request.on('data', function(chunk) {
remoteRequest.write(chunk);
});
request.on('end', function() {
remoteRequest.end();
});
remoteRequest.on('response', function (remoteResponse){
var body_regexp = new RegExp("<head>"); // regex to find first head tag
var href_regexp = new RegExp('\<a href="(.*)"', 'g'); // regex to find hrefs
response.writeHead(remoteResponse.statusCode, remoteResponse.headers);
remoteResponse.on('data', function (chunk) {
var body = doDecompress(new compress.GunzipStream(), chunk);
body = body.replace(body_regexp, "<head><base href=\"http://"+ iframe_url +"/\">");
body = body.replace(href_regexp, '<a href="#" onclick="javascript:return false;"');
response.write(body, 'binary');
});
remoteResponse.on('end', function() {
response.end();
});
});
}
};
at the var body part i want to read the body and for example in this case remove all hrefs by replacing them with an #. The problem here of course is when we have an site which is gzip encoded/ compressed it's all jibberish and we can't apply the regexps.
now I've already tired to mess around with the node-compress module:
doDecompress(new compress.GunzipStream(), chunk);
which refers to
function doDecompress(decompressor, input) {
var d1 = input.substr(0, 25);
var d2 = input.substr(25);
sys.puts('Making decompression requests...');
var output = '';
decompressor.setInputEncoding('binary');
decompressor.setEncoding('utf8');
decompressor.addListener('data', function(data) {
output += data;
}).addListener('error', function(err) {
throw err;
}).addListener('end', function() {
sys.puts('Decompressed length: ' + output.length);
sys.puts('Raw data: ' + output);
});
decompressor.write(d1);
decompressor.write(d2);
decompressor.close();
sys.puts('Requests done.');
}
But it fails on it since the chunk input is an object, so i tried supplying it as an chunk.toString() which also fails with invalid input data.
I was wondering if I am at all heading in the right direction?
The decompressor expects binary encoded input. The chunk that your response receives is an instance of Buffer which toString() method does by default give you an UTF-8 encoded string back.
So you have to use chunk.toString('binary') to make it work, this can also be seen in the demo.

Resources