I have problems to drag an image from one iframe (1) out of a dynamic table to another iframe (2) within a droppable area. I think there is no permission issue, but a "type" issue. The drop area on iframe (2) is working with files from anywhere except form the iframe (1). The iframe (1) is hosted by localhost. The iframe (2) is hosted by a different domain.
Three different functions I found to convert data uri to file or blob were tested. The setData arguments also have been tested with all possibilities, no success by now.
Interestingly, having opened the site with the two iframes on Chrome and firefox, I am able to drag an drop from firefox to Chrome, but the image will be converted to bmp type and renamed! The other way around it is not working.
What are the right arguments for setData / get Data to drop an image
data uri on a drop area?
Do I have to convert data uri to a file
resp. blob object?
If so, again, what arguments will do the job?
Any help would be appreciated very much!
var dataUri;
var file;
function mouseDown(event){
toDataURL(event.target.src, function(dataUrl) {
console.log('RESULT:', dataUrl.replace('data:text/plain;base64,', ''));
dataUri = dataUrl.replace('data:text/plain;base64,', '');
file = urltoFile(dataUrl, 'hello.jpg','image/jpeg')
.then(function(file){ console.log(file);});
})
}
function drag(event) {
// event.stopPropagation();
console.log(file);
console.log(dataUri);
event.dataTransfer.setData("text/plain", file);
//event.dataTransfer.mozSetDataAt("application/x-moz-file", file, 0);
event.dataTransfer.effectAllowed = "move";
}
function drop(event) {
//event.preventDefault();
console.log(file);
console.log(dataUri);
var data = event.dataTransfer.getData("text/plain");
//var data = event.dataTransfer.mozGetDataAt("application/x-moz-file", 0);
event.target.appendChild(document.getElementById(data));
document.getElementsById(data);
console.log(data);
}
function allowDrop(event) {
event.preventDefault();
}
function dataURLtoFile(dataurl, filename) {
var arr = dataurl.split(','), mime = arr[0].match(/:(.*?);/)[1],
bstr = atob(arr[1]), n = bstr.length, u8arr = new Uint8Array(n);
while(n--){
u8arr[n] = bstr.charCodeAt(n);
}
return new File([u8arr], filename, {type:mime});
}
function urltoFile(url, filename, mimeType){
return (fetch(url)
.then(function(res){return res.arrayBuffer();})
.then(function(buf){return new File([buf], filename,{type:mimeType});})
);
}
function dataURItoBlob(dataURI) {
// convert base64 to raw binary data held in a string
// doesn't handle URLEncoded DataURIs - see SO answer #6850276 for code that does this
var byteString = atob(dataURI.split(',')[1]);
// separate out the mime component
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
var dw = new DataView(ab);
for(var i = 0; i < byteString.length; i++) {
dw.setUint8(i, byteString.charCodeAt(i));
}
// write the ArrayBuffer to a blob, and you're done
return new Blob([ab], {type: mimeString});
}
Related
I am using the following sample to resize the uploaded images with Blazor WebAssembly
https://www.prowaretech.com/Computer/Blazor/Examples/WebApi/UploadImages .
Still I need the original file too to be converted to base64 too and I don't know how can I access it...
I tried to find the file's original width and height to pass its to RequestImageFileAsync function but no success...
I need to store both files : the original one and the resized one.
Can you help me, please ?
Thank You Very Much !
The InputFile control emits an IBrowserFile type. RequestImageFileAsync is a convenience method on IBrowserFile to resize the image and convert the type. The result is still an IBrowserFile.
One way to do what you are asking is with SixLabors.ImageSharp. Based on the ProWareTech example, something like this...
async Task OnChange(InputFileChangeEventArgs e)
{
var files = e.GetMultipleFiles(); // get the files selected by the users
foreach(var file in files)
{
//Original-sized file
var buf1 = new byte[file.Size];
using (var stream = file.OpenReadStream())
{
await stream.ReadAsync(buf1); // copy the stream to the buffer
}
origFilesBase64.Add(new ImageFile { base64data = Convert.ToBase64String(buf1), contentType = file.ContentType, fileName = file.Name }); // convert to a base64 string!!
//Resized File
var resizedFile = await file.RequestImageFileAsync(file.ContentType, 640, 480); // resize the image file
var buf = new byte[resizedFile.Size]; // allocate a buffer to fill with the file's data
using (var stream = resizedFile.OpenReadStream())
{
await stream.ReadAsync(buf); // copy the stream to the buffer
}
filesBase64.Add(new ImageFile { base64data = Convert.ToBase64String(buf), contentType = file.ContentType, fileName = file.Name }); // convert to a base64 string!!
}
//To get the image Sizes for first image
ImageSharp.Image origImage = Image.Load<*imagetype*>(origFilesBase64[0])
int origImgHeight = origImage.Height;
int origImgWidth = origImage.Width;
ImageSharp.Image resizedImage = Image.Load<*imagetype*>(filesBase64[0])
int resizedImgHeight = resizedImage.Height;
int resizedImgWidth = resizedImage.Width;
}
I am trying to compress an image client-side (Angular 2/Ionic 3) and When I log the file that is created by the camera, it says:
"type":null when it should say "type":'image/jpeg'.
I am using the Ionic camera plugin to handle taking a picture or choosing one from the photo library, after that (I assume) the file is created without a type. Every compression tool I have tried has had this problem, and I have run out of options. Is there a way to change the type of a File object?
I created a new Blob with this method and made sure to give it image type:
dataURItoBlob(dataURI, callback): Promise<any> {
return new Promise((resolve, reject) => {
let byteString = atob(dataURI);
//console.log(byteString);
// separate out the mime component
//let mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0]
// write the bytes of the string to an ArrayBuffer
let ab = new ArrayBuffer(byteString.length);
let ia = new Uint8Array(ab);
for (let i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
// write the ArrayBuffer to a blob, and you're done
let bb = new Blob([ab], {type: 'image/jpeg'});
resolve(bb);
})
}
I used it after reading the contents of the image to base64 like this and got the resulting blob with image type:
var readerZ = new FileReader();
readerZ.onload = (e) => {
let data = readerZ.result.split(',')[1];
//console.log(data);
self.dataURItoBlob(data, null).then(blob => {
I'm trying to allow users to upload images and then store the images, base64 encoded, in firebase. I'm trying to make my firebase structured as follows:
|--Feed
|----Feed Item
|------username
|------epic
|---------name,etc.
|------images
|---------image1, image 2, etc.
However, I can't get the remote data in firebase to mirror the local data in the client. When I print the array of images to the console in the client, it shows that the uploaded images have been added to the array of images...but these images never make it to firebase. I've tried doing this multiple ways to no avail. I tried using implicit syncing, explicit syncing, and a mixture of both. I can;t for the life of me figure out why this isn;t working and I'm getting pretty frustrated. Here's my code:
$scope.complete = function(epicName){
for (var i = 0; i < $scope.activeEpics.length; i++){
if($scope.activeEpics[i].id === epicName){
var epicToAdd = $scope.activeEpics[i];
}
}
var epicToAddToFeed = {epic: epicToAdd, username: $scope.currUser.username, userImage: $scope.currUser.image, props:0, images:['empty']};
//connect to feed data
var feedUrl = "https://myfirebaseurl.com/feed";
$scope.feed = angularFireCollection(new Firebase(feedUrl));
//add epic
var added = $scope.feed.add(epicToAddToFeed).name();
//connect to added epic in firebase
var addedUrl = "https://myfirebaseurl.com/feed/" + added;
var addedRef = new Firebase(addedUrl);
angularFire(addedRef, $scope, 'added').then(function(){
// for each image uploaded, add image to the added epic's array of images
for (var i = 0, f; f = $scope.files[i]; i++) {
var reader = new FileReader();
reader.onload = (function(theFile) {
return function(e) {
var filePayload = e.target.result;
$scope.added.images.push(filePayload);
};
})(f);
reader.readAsDataURL(f);
}
});
}
EDIT: Figured it out, had to connect to "https://myfirebaseurl.com/feed/" + added + "/images"
Can we do copy(Ctrl+C) and paste(Ctrl+V) the image from User system(desktop/any folder) to canvas using fabric.js. I have seen the copy and paste program inside the canvas, I have found this Example while searching google but didnt find any relevant example for desktop to canvas. Here is the snippet for copy and paste
function onKeyDownHandler(event) {
//event.preventDefault();
var key;
if(window.event){
key = window.event.keyCode;
}
else{
key = event.keyCode;
}
switch(key){
//////////////
// Shortcuts
//////////////
// Copy (Ctrl+C)
case 67: // Ctrl+C
if(ableToShortcut()){
if(event.ctrlKey){
event.preventDefault();
copy();
}
}
break;
// Paste (Ctrl+V)
case 86: // Ctrl+V
if(ableToShortcut()){
if(event.ctrlKey){
event.preventDefault();
paste();
}
}
break;
default:
// TODO
break;
}
}
function ableToShortcut(){
/*
TODO check all cases for this
if($("textarea").is(":focus")){
return false;
}
if($(":text").is(":focus")){
return false;
}
*/
return true;
}
function copy(){
if(canvas.getActiveGroup()){
for(var i in canvas.getActiveGroup().objects){
var object = fabric.util.object.clone(canvas.getActiveGroup().objects[i]);
object.set("top", object.top+5);
object.set("left", object.left+5);
copiedObjects[i] = object;
}
}
else if(canvas.getActiveObject()){
var object = fabric.util.object.clone(canvas.getActiveObject());
object.set("top", object.top+5);
object.set("left", object.left+5);
copiedObject = object;
copiedObjects = new Array();
}
}
function paste(){
if(copiedObjects.length > 0){
for(var i in copiedObjects){
canvas.add(copiedObjects[i]);
}
}
else if(copiedObject){
canvas.add(copiedObject);
}
canvas.renderAll();
}
Is it possible to do actually I have heard dat it's may not possible.Can anyone guide me how to do please.
If you're targeting modern browsers you can combine 2 new (but widely adopted) html5 features to accomplish your task:
You can create a dropzone on your page using the dragover and drop events.
Then you can use the FileReader API to read the image files into an image object.
Then it's back to FabricJS to load the image as usual.
Here's a tutorial describing how to do the hard bits (#1,#2): http://www.html5rocks.com/en/tutorials/file/dndfiles/
[ Added code that SOMETIMES allows cut/paste of image files ]
Most modern browsers support binding the “paste” event.
// listen for the paste event
window.addEventListener("paste",pasteImage);
But...!!
Support for non-text mime types (ie “image”) is scarce. Chrome seems to support it “off-and-on”.
…And browsers are constantly revising their cut/paste capabilities because of security concerns.
Here is code that sometimes works in Chrome.
// listen for the paste event
window.addEventListener("paste",pasteImage);
function pasteImage(event) {
// get the raw clipboardData
var cbData=event.clipboardData;
for(var i=0;i<cbData.items.length;i++){
// get the clipboard item
var cbDataItem = cbData.items[i];
var type = cbDataItem.type;
// warning: most browsers don't support image data type
if (type.indexOf("image")!=-1) {
// grab the imageData (as a blob)
var imageData = cbDataItem.getAsFile();
// format the imageData into a URL
var imageURL=window.webkitURL.createObjectURL(imageData);
// We've got an imageURL, add code to use it as needed
// the imageURL can be used as src for an Image object
}
}
}
I'm sending the contents of this Flex form (Don't ask why) over to node. There is a post paramteter called "photo" which is a base64 encoded image.
Contents of photo get sent over ok. Problem is when I am trying to decode the content and write them to a file.
var fs = require("fs");
fs.writeFile("arghhhh.jpg", new Buffer(request.body.photo, "base64").toString(), function(err) {});
I've tried toString("binary") as well. But it seems node doesnt decode all of the content. It seems it only decodes jpg header info and leaves the rest.
Can anyone please help me with this?
Thanks
Try removing the .toString() entirely and just write the buffer directly.
this is my full solution which would read any base64 image format, decode it and save it in the proper format in the database:
// Save base64 image to disk
try
{
// Decoding base-64 image
// Source: http://stackoverflow.com/questions/20267939/nodejs-write-base64-image-file
function decodeBase64Image(dataString)
{
var matches = dataString.match(/^data:([A-Za-z-+\/]+);base64,(.+)$/);
var response = {};
if (matches.length !== 3)
{
return new Error('Invalid input string');
}
response.type = matches[1];
response.data = new Buffer(matches[2], 'base64');
return response;
}
// Regular expression for image type:
// This regular image extracts the "jpeg" from "image/jpeg"
var imageTypeRegularExpression = /\/(.*?)$/;
// Generate random string
var crypto = require('crypto');
var seed = crypto.randomBytes(20);
var uniqueSHA1String = crypto
.createHash('sha1')
.update(seed)
.digest('hex');
var base64Data = 'data:image/jpeg;base64,/9j/4AAQSkZJRgABAQEAZABkAAD/4Q3zaHR0cDovL25zLmFkb2JlLmN...';
var imageBuffer = decodeBase64Image(base64Data);
var userUploadedFeedMessagesLocation = '../img/upload/feed/';
var uniqueRandomImageName = 'image-' + uniqueSHA1String;
// This variable is actually an array which has 5 values,
// The [1] value is the real image extension
var imageTypeDetected = imageBuffer
.type
.match(imageTypeRegularExpression);
var userUploadedImagePath = userUploadedFeedMessagesLocation +
uniqueRandomImageName +
'.' +
imageTypeDetected[1];
// Save decoded binary image to disk
try
{
require('fs').writeFile(userUploadedImagePath, imageBuffer.data,
function()
{
console.log('DEBUG - feed:message: Saved to disk image attached by user:', userUploadedImagePath);
});
}
catch(error)
{
console.log('ERROR:', error);
}
}
catch(error)
{
console.log('ERROR:', error);
}
In nodejs 8.11.3 new Buffer(string, encoding) is deprecated, instead of this the new way to do that is Buffer.from(string, encoding) always without .toString().
For more details read the documentation in nodejs docs: Buffer
Remove .toString()
Here you decode the base64 to a buffer, which is fine, but then you convert the buffer into a string. This means that it is a string object whose code points are bytes of the buffer.