I need to take a picture, convert the file to an image to crop, and then convert the image back to a file to then run into a tflite model (currently just displaying an image on another screen).
As it stands I am using a simple camera app (https://flutter.dev/docs/cookbook/plugins/picture-using-camera?source=post_page---------------------------)
and stacking a container on the preview screen to use as a viewfinder. I Use the rect_getter package to get the container coordinates for the copyCrop() function from the Image package.
Attempting to convert my file to an image (so the copyCrop() function can be run) and then back to a file (cropSaveFile.path) to later be used in a tflite model is resulting in an error: The following FileSystemException was thrown resolving an image codec:
��GFD�oom����������������� etc.
final image = await _controller.takePicture();
////////////////////////////////////////////
final xpath = image.path;
final bytes = await File(xpath).readAsBytes();
final img.Image? newImage = img.decodeImage(bytes);
////////////////////////////////////////////
img.Image crop =
img.copyCrop(newImage!, _proX, _proY, _proW, _proH);
print('Crop: $crop');
final newBytes = crop.getBytes();
final File cropSaveFile = File.fromRawPath(newBytes);
I'm not sure what kind of file I'm really getting back. It is unreadable. Any Ideas? The full code to run is below:
import 'dart:async';
import 'dart:io';
import 'package:universal_io/io.dart';
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:rect_getter/rect_getter.dart';
import 'package:image/image.dart' as img;
import 'package:path_provider/path_provider.dart';
Future<void> main() async {
// Ensure that plugin services are initialized so that `availableCameras()`
// can be called before `runApp()`
WidgetsFlutterBinding.ensureInitialized();
// Obtain a list of the available cameras on the device.
final cameras = await availableCameras();
// Get a specific camera from the list of available cameras.
final firstCamera = cameras.first;
runApp(
MaterialApp(
theme: ThemeData.dark(),
home: TakePictureScreen(
// Pass the appropriate camera to the TakePictureScreen widget.
camera: firstCamera,
),
),
);
}
// A screen that allows users to take a picture using a given camera.
class TakePictureScreen extends StatefulWidget {
const TakePictureScreen({
Key? key,
required this.camera,
}) : super(key: key);
final CameraDescription camera;
#override
TakePictureScreenState createState() => TakePictureScreenState();
}
class TakePictureScreenState extends State<TakePictureScreen> {
late CameraController _controller;
late Future<void> _initializeControllerFuture;
var ContainerKey = RectGetter.createGlobalKey();
// Coordinates for rectangle
late int _proX;
late int _proY;
late int _proW;
late int _proH;
#override
void initState() {
super.initState();
// To display the current output from the Camera,
// create a CameraController.
_controller = CameraController(
// Get a specific camera from the list of available cameras.
widget.camera,
// Define the resolution to use.
ResolutionPreset.medium,
);
// Next, initialize the controller. This returns a Future.
_initializeControllerFuture = _controller.initialize();
}
#override
void dispose() {
// Dispose of the controller when the widget is disposed.
_controller.dispose();
super.dispose();
}
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: const Text('Take a picture')),
// You must wait until the controller is initialized before displaying the
// camera preview. Use a FutureBuilder to display a loading spinner until the
// controller has finished initializing.
body: FutureBuilder<void>(
future: _initializeControllerFuture,
builder: (context, snapshot) {
if (snapshot.connectionState == ConnectionState.done) {
// If the Future is complete, display the preview.
return Column(
children: [
AspectRatio(
aspectRatio: 1 / _controller.value.aspectRatio,
child: Stack(
children: [
CameraPreview(_controller),
Padding(
padding: const EdgeInsets.fromLTRB(
50.0,
8.0,
16.0,
8.0,
),
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
mainAxisAlignment: MainAxisAlignment.center,
children: [
Container(
key: ContainerKey,
height: 175,
width: 175,
decoration: BoxDecoration(
border: Border.all(
width: 10,
color: Colors.yellow,
),
borderRadius: BorderRadius.circular(10.0),
),
),
Text(
'Place Image in Box Above',
style: TextStyle(
fontSize: 20.0,
fontWeight: FontWeight.bold,
),
),
],
),
),
],
),
),
],
);
} else {
// Otherwise, display a loading indicator.
return const Center(child: CircularProgressIndicator());
}
},
),
floatingActionButton: FloatingActionButton(
// Provide an onPressed callback.
onPressed: () async {
// Take the Picture in a try / catch block. If anything goes wrong,
// catch the error.
try {
// Ensure that the camera is initialized.
await _initializeControllerFuture;
_controller.setFlashMode(FlashMode.off);
///////////////////////////////////////////
Rect? imageRect = RectGetter.getRectFromKey(ContainerKey);
setState(() {
_proX = imageRect!.left.toInt();
_proY = imageRect.top.toInt();
_proW = imageRect.right.toInt();
_proH = imageRect.bottom.toInt();
});
print(_proX);
print(_proY);
print(_proW);
print(_proH);
///////////////////////////////////////////
// Attempt to take a picture and get the file `image`
// where it was saved.
final image = await _controller.takePicture();
////////////////////////////////////////////
final xpath = image.path;
final bytes = await File(xpath).readAsBytes();
final img.Image? newImage = img.decodeImage(bytes);
////////////////////////////////////////////
img.Image crop =
img.copyCrop(newImage!, _proX, _proY, _proW, _proH);
print('Crop: $crop');
final newBytes = crop.getBytes();
final File cropSaveFile = File.fromRawPath(newBytes);
// If the picture was taken, display it on a new screen.
await Navigator.of(context).push(
MaterialPageRoute(
builder: (context) => DisplayPictureScreen(
// Pass the automatically generated path to
// the DisplayPictureScreen widget.
imagePath: cropSaveFile.path,
),
),
);
} catch (e) {
// If an error occurs, log the error to the console.
print(e);
}
},
child: const Icon(Icons.camera_alt),
),
);
}
}
// A widget that displays the picture taken by the user.
class DisplayPictureScreen extends StatelessWidget {
final String imagePath;
const DisplayPictureScreen({Key? key, required this.imagePath})
: super(key: key);
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: const Text('Display the Picture')),
// The image is stored as a file on the device. Use the `Image.file`
// constructor with the given path to display the image.
body: Image.file(File(imagePath)),
);
}
}
In the following code
final image = await _controller.takePicture();
final xpath = image.path;
//xpath store the path of the file captured by the app
just use
final File file = File(image.path);
//which is a jpg file for verification just console print the image.path and check for the extension.
btw I didn't get why you need a jpeg file because under the hood in the code snippet shared by you, you are already working on a jpeg file the only difference is that you are reading it from device memory as ByteString for processing it to crop and then generating a file again for the ByteString.
Edit: Future me found the following package much better than the code below... https://pub.dev/packages/mask_for_camera_view
This works for me.
////////////////////////////////////////////
final xpath = image.path;
final bytes = await File(xpath).readAsBytes();
final img.Image? newImage = img.decodeImage(bytes);
////////////////////////////////////////////
img.Image crop = img.copyCrop(newImage!, _proY, _proY, 175, 175);
final jpg = img.encodeJpg(crop);
File cropSaveFile = File(xpath);
cropSaveFile.writeAsBytes(jpg);
If anybody is interested in full code to crop an image from the camera based on a viewfinder container it is below.
import 'dart:io';
import 'package:universal_io/io.dart';
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:rect_getter/rect_getter.dart';
import 'package:image/image.dart' as img;
Future<void> main() async {
// Ensure that plugin services are initialized so that `availableCameras()`
// can be called before `runApp()`
WidgetsFlutterBinding.ensureInitialized();
// Obtain a list of the available cameras on the device.
final cameras = await availableCameras();
// Get a specific camera from the list of available cameras.
final firstCamera = cameras.first;
runApp(
MaterialApp(
theme: ThemeData.dark(),
home: TakePictureScreen(
// Pass the appropriate camera to the TakePictureScreen widget.
camera: firstCamera,
),
),
);
}
// A screen that allows users to take a picture using a given camera.
class TakePictureScreen extends StatefulWidget {
const TakePictureScreen({
Key? key,
required this.camera,
}) : super(key: key);
final CameraDescription camera;
#override
TakePictureScreenState createState() => TakePictureScreenState();
}
class TakePictureScreenState extends State<TakePictureScreen> {
late CameraController _controller;
late Future<void> _initializeControllerFuture;
var ContainerKey = RectGetter.createGlobalKey();
// Coordinates for rectangle
late int _proY;
#override
void initState() {
super.initState();
// To display the current output from the Camera,
// create a CameraController.
_controller = CameraController(
// Get a specific camera from the list of available cameras.
widget.camera,
// Define the resolution to use.
ResolutionPreset.medium,
);
// Next, initialize the controller. This returns a Future.
_initializeControllerFuture = _controller.initialize();
}
#override
void dispose() {
// Dispose of the controller when the widget is disposed.
_controller.dispose();
super.dispose();
}
Future<File> writeImageWidgetToFile(
img.Image crop, String croppedImagePath) async {
final imgByteData = await crop.getBytes();
final buffer = imgByteData.buffer;
return File(croppedImagePath).writeAsBytes(buffer.asUint8List(
imgByteData.offsetInBytes, imgByteData.lengthInBytes));
}
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: const Text('Take a picture')),
// You must wait until the controller is initialized before displaying the
// camera preview. Use a FutureBuilder to display a loading spinner until the
// controller has finished initializing.
body: FutureBuilder<void>(
future: _initializeControllerFuture,
builder: (context, snapshot) {
if (snapshot.connectionState == ConnectionState.done) {
// If the Future is complete, display the preview.
return Column(
children: [
AspectRatio(
aspectRatio: 1 / _controller.value.aspectRatio,
child: Stack(
children: [
CameraPreview(_controller),
Padding(
padding: const EdgeInsets.fromLTRB(
16.0,
8.0,
16.0,
8.0,
),
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
mainAxisAlignment: MainAxisAlignment.center,
children: [
Container(
key: ContainerKey,
height: 175,
width: 175,
decoration: BoxDecoration(
border: Border.all(
width: 10,
color: Colors.yellow,
),
borderRadius: BorderRadius.circular(10.0),
),
),
Text(
'Place Image in Box Above',
style: TextStyle(
fontSize: 20.0,
fontWeight: FontWeight.bold,
),
),
],
),
),
],
),
),
],
);
} else {
// Otherwise, display a loading indicator.
return const Center(child: CircularProgressIndicator());
}
},
),
floatingActionButton: FloatingActionButton(
// Provide an onPressed callback.
onPressed: () async {
// Take the Picture in a try / catch block. If anything goes wrong,
// catch the error.
try {
// Ensure that the camera is initialized.
await _initializeControllerFuture;
_controller.setFlashMode(FlashMode.off);
///////////////////////////////////////////
Rect? imageRect = RectGetter.getRectFromKey(ContainerKey);
setState(() {
_proY = imageRect!.top.toInt();
});
print('Top Left Corner of Rect: $_proY');
///////////////////////////////////////////
// Attempt to take a picture and get the file `image`
// where it was saved.
final image = await _controller.takePicture();
////////////////////////////////////////////
final xpath = image.path;
final bytes = await File(xpath).readAsBytes();
final img.Image? newImage = img.decodeImage(bytes);
////////////////////////////////////////////
img.Image crop = img.copyCrop(newImage!, _proY, _proY, 175, 175);
final jpg = img.encodeJpg(crop);
File cropSaveFile = File(xpath);
cropSaveFile.writeAsBytes(jpg);
// If the picture was taken, display it on a new screen.
await Navigator.of(context).push(
MaterialPageRoute(
builder: (context) => DisplayPictureScreen(
// Pass the automatically generated path to
// the DisplayPictureScreen widget.
imagePath: cropSaveFile.path,
),
),
);
} catch (e) {
// If an error occurs, log the error to the console.
print(e);
}
},
child: const Icon(Icons.camera_alt),
),
);
}
}
// A widget that displays the picture taken by the user.
class DisplayPictureScreen extends StatelessWidget {
final String imagePath;
const DisplayPictureScreen({Key? key, required this.imagePath})
: super(key: key);
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: const Text('Display the Picture')),
// The image is stored as a file on the device. Use the `Image.file`
// constructor with the given path to display the image.
body: Image.file(File(imagePath)),
);
}
}
I am making a face in flutter using different facial images and i want to export it as jpg when the face creation is done. what could i use to achieve it?
You can see here a face is created and i want to export only face as a jpeg.
In this article, use GlobalKey with your widget and save image by following code:
takeScreenShot() async {
RenderRepaintBoundary boundary =
previewContainer.currentContext.findRenderObject();
double pixelRatio = originalSize / MediaQuery.of(context).size.width;
ui.Image image = await boundary.toImage(pixelRatio: pixelRatio);
ByteData byteData = await image.toByteData(format: ui.ImageByteFormat.png);
Uint8List pngBytes = byteData.buffer.asUint8List();
setState(() {
_image2 = Image.memory(pngBytes.buffer.asUint8List());
});
final directory = (await getApplicationDocumentsDirectory()).path;
File imgFile = new File('$directory/screenshot.png');
imgFile.writeAsBytes(pngBytes);
final snackBar = SnackBar(
content: Text('Saved to ${directory}'),
action: SnackBarAction(
label: 'Ok',
onPressed: () {
// Some code
},
),
);
Scaffold.of(context).showSnackBar(snackBar);
}
I uploaded an image on firebase storage. and to access said image on my flutter app, I first download it to the device and then use FileImage to display the image. However, if the image is changed, it still displays the previous one... Here's my code
var error;
Future getImage() async {
try {
var image = await ImagePicker.pickImage(
source: ImageSource.gallery,
);
await FirebaseStorage.instance
.ref()
.child(userMap['uid'])
.putFile(image)
.onComplete;
await FirebaseStorage.instance
.ref()
.child(userMap['uid'])
.writeToFile(File(path))
.future;
setState(() {
profile = FileImage(File(path));
});
} catch (e) {
error = e;
}
}
The following code displays the image...
GestureDetector(
onTap: () {
getImage();
},
child: CircleAvatar(
child: Icon(
Icons.add_a_photo,
color: color2.withOpacity(0.5),
),
radius: widget.height * 0.05,
backgroundColor: color3,
backgroundImage: profile,
),
),
please help
I could recommend you another way to solve your issue.
First thing, you don't need to use FileImage because it's very difficult control you cache
with it.
Try the next code:
Future<String> uploadImage(File image) async {
var reference = FirebaseStorage.insance.ref().child(userMap['uid']);
var uploadTask = reference.putFile(image); // you can just put your file like that
var snapshot = await uploadTask.onComplete;
var location = await snapshot.ref.getDownloadURL(); // and get url with it with this code
return location.toString()
}
Future getImage() async {
try {
var image = await ImagePicker.pickImage(
source: ImageSource.gallery,
);
profile = await uploadImage(image)
setState((){});
}
catch (e) {
error = e;
}
}
after you get your url address with uploadImage, please use library: https://pub.dev/packages/cached_network_image. It's perfect for work with images.
GestureDetector(
onTap: () {
getImage();
},
child: CircleAvatar(
child: Icon(
Icons.add_a_photo,
color: color2.withOpacity(0.5),
),
radius: widget.height * 0.05,
backgroundColor: color3,
backgroundImage: CachedNetworkImageProvider(
imageUrl: profile,
placeholder: (context, url) => CircularProgressIndicator(),
errorWidget: (context, url, error) => Icon(Icons.error),
),
),
),
I'm trying to upload images and other different types of files using Flutter mobile sdk and Laravel api. Here is my Flutter code :
class _MyHomePageState extends State<MyHomePage> {
File _image;
Future getImageGallery() async {
var imageFile = await ImagePicker.pickImage(source: ImageSource.gallery);
setState(() {
_image = imageFile;
});
}
Future getImageCamera() async {
var imageFile = await ImagePicker.pickImage(source: ImageSource.camera);
setState(() {
_image = imageFile;
});
}
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text(widget.title),
),
body: SingleChildScrollView(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Center(
child: _image == null
? Text('No image selected')
: Image.file(_image),
),
RaisedButton(
child: Icon(Icons.image),
onPressed: getImageGallery,
),
RaisedButton(
child: Icon(Icons.camera_alt),
onPressed: getImageCamera,
),
RaisedButton(
child: Icon(Icons.file_upload),
onPressed: () {
upload(_image);
},
),
],
),
),
);
}
Future upload(File imageFile) async {
print(imageFile.path);
var stream = http.ByteStream(DelegatingStream.typed(imageFile.openRead()));
var length = await imageFile.length();
var uri = Uri.parse('https://api.tredlr.com/api/upload');
var request = http.MultipartRequest('POST', uri);
var multipartFile = http.MultipartFile('image', stream, length,
filename: basename(imageFile.path));
request.files.add(multipartFile);
var response = await request.send();
print(response);
print(response.stream);
print(response.statusCode);
if (response.statusCode == 200) {
print('uploaded');
} else {
print('not uploaded');
}
}
}
and here is my Laravel code :
$photo = $request->file("image");
$ext = $photo->getClientOriginalExtension();
$fileName = rand(10000, 50000) . '.' .$ext;
$thumbSm = 'thumb_sm' . rand(10000, 50000) . '.' .$ext;
$image = Image::make($request->file('image'));
$image->save(base_path().'/public/'. $fileName);
$image->resize(120, 120);
$image->save(base_path().'/public/'. $thumbSm);
Future _test(File file) async {
Dio dio = new Dio();
file.existsSync();
String fileName = file.path.split('/').last;
FormData formData = new FormData.fromMap({
"image": await MultipartFile.fromFile(file.path,filename: fileName)
});
response = await dio.post("http://you ip address/api/route", data: formData);
}
I'm using qr_flutter to create QrImage. It's ok but I would like to convert QrImage into image in order to create a PDF file to print on the printer. Please kindly help!
QrImage(
data: qrString,
size: 300.0,
version: 10,
backgroundColor: Colors.white,
),
Use a RepaintBoundary widget with a key to export the widget to a a b64 string which then you can export as an image.
Example:
Future<Uint8List> _getWidgetImage() async {
try {
RenderRepaintBoundary boundary =
_renderObjectKey.currentContext.findRenderObject();
ui.Image image = await boundary.toImage(pixelRatio: 3.0);
ByteData byteData =
await image.toByteData(format: ui.ImageByteFormat.png);
var pngBytes = byteData.buffer.asUint8List();
var bs64 = base64Encode(pngBytes);
debugPrint(bs64.length.toString());
return pngBytes;
} catch (exception) {}
}
#override
Widget build(BuildContext context) {
return Scaffold(
body: Column(children: [
RepaintBoundary(
key: _renderObjectKey,
child: QrImage(
data: "some text",
size: 300.0,
version: 10,
backgroundColor: Colors.white,
),
),
RaisedButton(onPressed: () {
_getWidgetImage();
})
]));
}
Future<Uint8List> toQrImageData(String text) async {
try {
final image = await QrPainter(
data: text,
version: QrVersions.auto,
gapless: false,
color: hexToColor('#000000'),
emptyColor: hexToColor('#ffffff'),
).toImage(300);
final a = await image.toByteData(format: ImageByteFormat.png);
return a.buffer.asUint8List();
} catch (e) {
throw e;
}
}
A more updated typed answer, that adds responsibility seggregation and null-safety, extending the correct one from #Zroq would be:
Future<Uint8List> createImageFromRenderKey({GlobalKey<State<StatefulWidget>>? renderKey}) async {
try {
final RenderRepaintBoundary boundary = renderKey?.currentContext?.findRenderObject()! as RenderRepaintBoundary;
final ui.Image image = await boundary.toImage(pixelRatio: 3);
final ByteData? byteData = await image.toByteData(format: ui.ImageByteFormat.png);
return byteData!.buffer.asUint8List();
} catch(_) {
rethrow;
}
}
The idea is based on the same principle: using the global render key to create the ByteData that allows you to create the Uint8List buffer. However, the new versions of Flutter change the type of the boundary to become a RenderyObject? instead of a RenderRepaintBoundary.
The rethrow is (dirty) way of bypassing the limitation/small bug where RepaintBoundary may be being used in the UI to repaint the boundary (exposed as boundary.debugNeedsPaint), so it can potentially throw an unhandled exception or create a low-quality image buffer. So if the view is being used I rethrow the method.
More details about the stack trace: https://github.com/theyakka/qr.flutter/issues/112