Decode a base64 data to image - uiimage

This code doesn't working with Swift 3 anymore.
imageData = NSData(base64EncodedString: mediaFile, options: NSDataBase64DecodingOptions.fromRaw(0)!)
So is this one.
imageData = NSData(base64EncodedString: mediaFile, options: .allZeros)

Instead of using NSData use directly Swift 3 native Data.
if let decodedData = Data(base64Encoded: mediaFile, options: .ignoreUnknownCharacters) {
let image = UIImage(data: decodedData)
}

Swift 4.1:
Sometimes string has prefix data:image/png;base64 will make base64Encoded return nil, for this situation:
extension String {
func base64ToImage() -> UIImage? {
if let url = URL(string: self),let data = try? Data(contentsOf: url),let image = UIImage(data: data) {
return image
}
return nil
}
}
Demo code:
let results = text.matches(for: "data:image\\/([a-zA-Z]*);base64,([^\\\"]*)")
for imageString in results {
autoreleasepool {
let image = imageString.base64ToImage()
}
}
extension String {
func matches(for regex: String) -> [String] {
do {
let regex = try NSRegularExpression(pattern: regex)
let results = regex.matches(in: self, range: NSRange(self.startIndex..., in: self))
return results.map {
//self.substring(with: Range($0.range, in: self)!)
String(self[Range($0.range, in: self)!])
}
} catch let error {
print("invalid regex: \(error.localizedDescription)")
return []
}
}
}
PS: For more about data uri:
https://en.wikipedia.org/wiki/Data_URI_scheme
https://github.com/nodes-vapor/data-uri

Swift
Swift 3.0 does not recommend to use NS any more and the same case with NSData as well
if let decodedImageData = Data(base64Encoded: mediaFile, options: .ignoreUnknownCharacters) {
let image = UIImage(data: decodedImageData)
}
In Objective-C
NSURL *url = [NSURL URLWithString:base64String];
NSData *decodedImageData = [NSData dataWithContentsOfURL:url];
UIImage *image = [UIImage imageWithData:decodedImageData];

I implemented this as UIImage extension
extension UIImage {
/*
#brief decode image base64
*/
static func decodeBase64(toImage strEncodeData: String!) -> UIImage {
if let decData = Data(base64Encoded: strEncodeData, options: .ignoreUnknownCharacters), strEncodeData.characters.count > 0 {
return UIImage(data: decData)!
}
return UIImage()
}
}

You can write like this way
let data = NSData(base64Encoded: mediaFile, options: NSData.Base64DecodingOptions(rawValue: 0))
Hope it will help you

Related

Swift 3: UICollectionView download video snapshot. UI is freezed while trying to download snapshot

I'm trying to download video preview base on remote video URL. In my project, the server cannot return snapshot image of the videos, that's why I have to do it manually.
In table view, I have code like this in cellForItemAt to get video preview
DataManager.sharedInstance.getCachedImage(url: movie.url!, handler: { (image) in
cell.ivCover.image = image
})
and my getCachedImage function in DataManager:
func getCachedImage(url: String, handler: #escaping(_ result:UIImage) -> Void){
DispatchQueue.global().async {
let userDefaults = UserDefaults.standard
if let imageData = userDefaults.object(forKey: url){
if let finalImg = UIImage(data: imageData as! Data){
DispatchQueue.main.async {
handler(finalImg)
}
print("USING CACHED IMG")
}else{
DispatchQueue.main.async {
handler(#imageLiteral(resourceName: "cover"))
}
print("Cannot parse cached data to image. Use default")
}
}else{
let asset = AVURLAsset(url: URL(string: url)!)
let generate = AVAssetImageGenerator(asset: asset)
generate.appliesPreferredTrackTransform = true
var thumbTime = asset.duration
thumbTime.value = 1
var imgRef:CGImage?
do{
print("Downloading thum from url: \(url)")
imgRef = try generate.copyCGImage(at: thumbTime, actualTime: nil)
}catch let error{
print("Error download thum: \(error.localizedDescription)")
}
var finalImg:UIImage
if let _ = imgRef{
finalImg = UIImage(cgImage: imgRef!)
userDefaults.set(UIImagePNGRepresentation(finalImg), forKey: url)
}else{
finalImg = #imageLiteral(resourceName: "cover")
print("Download thumnail failed. Use default")
}
DispatchQueue.main.async {
handler(finalImg)
}
}
}
}
The problem is that, sometimes I scroll the Collection view, UI is freezed, sometimes it's not. Please note that this video is on REMOTE SERVER, NOT local video.
I've spent days to figure out the issue but still not able to find out what went wrong. Please help!
Or is there any existing library I can use?
import UIKit
import Photos
class ImportViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
}
#IBAction func myButton(_ sender: Any) {
let videoURL = "https://youerdomin.com/file.mp4"
print("1")
DispatchQueue.global(qos: .background).async {
print("2")
if let url = URL(string: videoURL), let urlData = NSData(contentsOf: url) {
print("3")
let documentsPath =
NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0];
let filePath="\(documentsPath)/tempFile.mp4"
DispatchQueue.main.async {
print("4")
urlData.write(toFile: filePath, atomically: true)
PHPhotoLibrary.shared().performChanges({
print("5")
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: filePath))
}) { completed, error in
print("6")
if completed {
print("Video is saved!")
}else{
print("7: ",filePath)
}
}
}
}else{
print("8")
}
}
}
}

upload collection view images into server using swift

I'm developing an app which has UICollectionView as like #zhangao0086/DKImagePickerController# example in Github. Now i need to upload the displayed UICollectionviewCell images into server. Can any one suggest me the right tuts for uploading. Thanks in advance.
UICollectionView code as follows:
func collectionView(collectionView: UICollectionView, cellForItemAtIndexPath indexPath: NSIndexPath) -> UICollectionViewCell {
let asset = self.assets![indexPath.row]
var cell: UICollectionViewCell?
var imageView: UIImageView?
if asset.isVideo {
cell = collectionView.dequeueReusableCellWithReuseIdentifier("CellVideo", forIndexPath: indexPath)
imageView = cell?.contentView.viewWithTag(1) as? UIImageView
} else {
cell = collectionView.dequeueReusableCellWithReuseIdentifier("CellImage", forIndexPath: indexPath)
imageView = cell?.contentView.viewWithTag(1) as? UIImageView
}
if let cell = cell, imageView = imageView {
let layout = collectionView.collectionViewLayout as! UICollectionViewFlowLayout
let tag = indexPath.row + 1
cell.tag = tag
asset.fetchImageWithSize(layout.itemSize.toPixel(), completeBlock: { image, info in
if cell.tag == tag {
imageView.image = image
}
})
}
return cell!
}
uploading the image into server
func barButtonItemClicked(barButtonItem: UIBarButtonItem)
{
let myUrl = NSURL(string: "http://moneymonkey.tokiiyo.com/api/signature");
let typeItem: InsuranceType = InsuranceManager.sharedInstance.TypeArray[0]
let compItem: Companies = InsuranceManager.sharedInstance.CompArray[0]
let request = NSMutableURLRequest(URL:myUrl!);
request.HTTPMethod = "POST";
let param = [
"api_key" : "AiK58j67",
"api_secret" : "a#9rJkmbOea90-",
"phone" : "\(mobile)",
"policy_type" : "\(typeItem.name)",
"company" : "\(compItem.cname)"
]
print("Policy_type: \(typeItem.name)")
let boundary = generateBoundaryString()
request.setValue("multipart/form-data; boundary=\(boundary)", forHTTPHeaderField: "Content-Type")
let imageData = UIImagePNGRepresentation(?) //here what imageView
if(imageData==nil) { return; }
request.HTTPBody = createBodyWithParameters(param, filePathKey: "file", imageDataKey: imageData!, boundary: boundary)
let task = NSURLSession.sharedSession().dataTaskWithRequest(request) {
data, response, error in
if error != nil {
print("error=\(error)")
return
}
// You can print out response object
print("******* response = \(response)")
// Print out reponse body
let responseString = NSString(data: data!, encoding: NSUTF8StringEncoding)
print("****** response data = \(responseString!)")
do{
_ = try NSJSONSerialization.JSONObjectWithData(data!, options: .MutableContainers) as? NSDictionary
dispatch_async(dispatch_get_main_queue(),{
});
}
catch
{
// report error
print("Oops!! Something went wrong\(error)")
}
}
task.resume()
}
func createBodyWithParameters(parameters: [String: String]?, filePathKey: String?, imageDataKey: NSData, boundary: String) -> NSData {
let body = NSMutableData();
if parameters != nil {
for (key, value) in parameters! {
body.appendString("--\(boundary)\r\n")
body.appendString("Content-Disposition: form-data; name=\"\(key)\"\r\n\r\n")
body.appendString("\(value)\r\n")
}
}
let filename = "image.png"
let mimetype = "image/png"
body.appendString("--\(boundary)\r\n")
body.appendString("Content-Disposition: form-data; name=\"\(filePathKey!)\"; filename=\"\(filename)\"\r\n")
body.appendString("Content-Type: \(mimetype)\r\n\r\n")
body.appendData(imageDataKey)
body.appendString("\r\n")
body.appendString("--\(boundary)--\r\n")
return body
}
func generateBoundaryString() -> String {
return "Boundary-\(NSUUID().UUIDString)"
}
J
you can use Alamofire https://github.com/Alamofire/Alamofire
Use like this :
Alamofire.upload(.POST, "YourURl", file: YourFile)
.progress { bytesWritten, totalBytesWritten, totalBytesExpectedToWrite in
print(totalBytesWritten)
// This closure is NOT called on the main queue for performance
// reasons. To update your ui, dispatch to the main queue.
dispatch_async(dispatch_get_main_queue()) {
print("Total bytes written on main queue: \(totalBytesWritten)")
}
}
.responseJSON { response in
debugPrint(response)
}

Swift2 retrieving images from Firebase

I am trying to read/display an image from Firebase. I am first encoding the image and then posting this encoded String to Firebase. This runs fine. When I try and decode the encoded string from Firebase and convert it to an image, I am getting a nil value exception.
This is how I am saving the image to Firebase
var base64String: NSString!
func imagePickerController(picker: UIImagePickerController, didFinishPickingImage image: UIImage, editingInfo: [String : AnyObject]?) {
self.dismissViewControllerAnimated(true, completion: nil)
imageToPost.image = image
var uploadImage = image as! UIImage
var imageData = UIImagePNGRepresentation(uploadImage)!
self.base64String = imageData.base64EncodedStringWithOptions(NSDataBase64EncodingOptions.Encoding64CharacterLineLength)
let ref = Firebase(url: "https://XXX.firebaseio.com")
var quoteString = ["string": self.base64String]
var usersRef = ref.childByAppendingPath("goalImages")
var users = ["image": quoteString]
usersRef.setValue(users)
displayAlert("Image Posted", message: "Your image has been successfully posted!")
}
This is how I am trying to read the image from Firebase
// ViewController.swift
import UIKit
import Firebase
class ViewController: UIViewController {
#IBOutlet weak var image: UIImageView!
var base64String: NSString!
#IBAction func buttonClicked(sender: AnyObject) {
sender.setTitle("\(sender.tag)", forState: UIControlState.Normal)
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
let ref = Firebase(url: "https://XXX.firebaseio.com/goalImages/image/string")
ref.observeEventType(.Value, withBlock: { snapshot in
self.base64String = snapshot.value as! NSString
let decodedData = NSData(base64EncodedString: self.base64String as String, options: NSDataBase64DecodingOptions())
//Next line is giving the error
var decodedImage = UIImage(data: decodedData!)
self.image.image = decodedImage
}, withCancelBlock: { error in
print(error.description)
})
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
The error says: "fatal error: unexpectedly found nil while unwrapping an Optional value"; decodedData is nil. Could someone explain what is going wrong.
Instead of
let decodedData = NSData(base64EncodedString: self.base64String as String,
options: NSDataBase64DecodingOptions())
try adding IgnoreUnknownCharacters
NSDataBase64DecodingOptions.IgnoreUnknownCharacters
Use Example: Encode a jpg, store and read from firebase
encode and write our favorite starship
if let image = NSImage(named:"Enterprise.jpeg") {
let imageData = image.TIFFRepresentation
let base64String = imageData!.base64EncodedStringWithOptions(.Encoding64CharacterLineLength)
let imageRef = myRootRef.childByAppendingPath("image_path")
imageRef.setValue(base64String)
read and decode
imageRef.observeEventType(.Value, withBlock: { snapshot in
let base64EncodedString = snapshot.value
let imageData = NSData(base64EncodedString: base64EncodedString as! String,
options: NSDataBase64DecodingOptions.IgnoreUnknownCharacters)
let decodedImage = NSImage(data:imageData!)
self.myImageView.image = decodedImage
}, withCancelBlock: { error in
print(error.description)
})
EDIT 2019_05_17
Update to Swift 5 and Firebase 6
func writeImage() {
if let image = NSImage(named:"Enterprise.jpg") {
let imageData = image.tiffRepresentation
if let base64String = imageData?.base64EncodedString() {
let imageRef = self.ref.child("image_path")
imageRef.setValue(base64String)
}
}
}
func readImage() {
let imageRef = self.ref.child("image_path")
imageRef.observeSingleEvent(of: .value, with: { snapshot in
let base64EncodedString = snapshot.value as! String
let imageData = Data(base64Encoded: base64EncodedString, options: Data.Base64DecodingOptions.ignoreUnknownCharacters)!
let decodedImage = NSImage(data: imageData)
self.myImageView.image = decodedImage
})
}
Firebase Engineer here:
I highly recommend using the new Firebase Storage API for uploading images to Firebase. It's simple to use, low cost, and backed by Google Cloud Storage for huge scale.
You can upload from NSData or an NSURL pointing to a local file (I'll show NSData, but the principle is the same):
// Data in memory
let data: NSData = ...
// Create a reference to the file you want to upload
let riversRef = storageRef.child("images/rivers.jpg")
// Upload the file to the path "images/rivers.jpg"
let uploadTask = riversRef.putData(data, metadata: nil) { metadata, error in
if (error != nil) {
// Uh-oh, an error occurred!
} else {
// Metadata contains file metadata such as size, content-type, and download URL.
let downloadURL = metadata!.downloadURL
// This can be stored in the Firebase Realtime Database
// It can also be used by image loading libraries like SDWebImage
}
}
You can even pause and resume uploads, and you can easily monitor uploads for progress:
// Upload data
let uploadTask = storageRef.putData(...)
// Add a progress observer to an upload task
uploadTask.observeStatus(.Progress) { snapshot in
// Upload reported progress
if let progress = snapshot.progress {
let percentComplete = 100.0 * Double(progress.completedUnitCount) / Double(progress.totalUnitCount)
}
}

iOS 9(Swift 2.0): cannot invoke 'dataTaskwithURL' with an argument list of type '(NSURL, (_, _,_)throws -> Void)'

My app works perfectly fine with iOS 8 but yesterday I upgraded to iOS 9 and Xcode 7 then my app crashes. The error message is cannot invoke 'dataTaskwithURL' with an argument list of type '(NSURL, (_, ,)throws -> Void)'. I googled it and found a similar question here but the solution didn't really work (the solution was to add the do/catch blocks around the code). Can anyone help me with mine problem? Thank you!!!
Here's my code
import UIKit
import Foundation
import CoreLocation
class GoogleDataProvider {
let apiKey = "AIzaSyCQo-clIkek87N99RVh2lmFX9Mu9QPhAtA"
let serverKey = "AIzaSyBzmv7wPFcPAe1ucy5o6dqaXnda9i9MqjE"
var photoCache = [String:UIImage]()
var placesTask = NSURLSessionDataTask()
var session: NSURLSession {
return NSURLSession.sharedSession()
}
func fetchPlacesNearCoordinate(coordinate: CLLocationCoordinate2D, radius:Double, types:[String],keyword:String, completion: (([GooglePlace]) -> Void)) -> ()
{
var urlString = "https://maps.googleapis.com/maps/api/place/nearbysearch/json?key=\(serverKey)&location=\(coordinate.latitude),\(coordinate.longitude)&radius=\(radius)&keyword=\(keyword)&rankby=prominence&sensor=true"
let typesString = types.count > 0 ? types.joinWithSeparator("|") : "food"
urlString += "&types=\(typesString)"
urlString = urlString.stringByAddingPercentEscapesUsingEncoding(NSUTF8StringEncoding)!
if placesTask.taskIdentifier > 0 && placesTask.state == .Running {
placesTask.cancel()
}
UIApplication.sharedApplication().networkActivityIndicatorVisible = true
do{
//******************Here's the line that displays error
placesTask = session.dataTaskWithURL(NSURL(string: urlString)!) {
(data, response, error) -> Void in
UIApplication.sharedApplication().networkActivityIndicatorVisible = false
var placesArray = [GooglePlace]()
if let json = try NSJSONSerialization.JSONObjectWithData(data!, options:[]) as? NSDictionary {
if let results = json["results"] as? NSArray {
for rawPlace:AnyObject in results {
let place = GooglePlace(dictionary: rawPlace as! NSDictionary, acceptedTypes: types)
placesArray.append(place)
if let reference = place.photoReference {
self.fetchPhotoFromReference(reference) { image in
place.photo = image
}
}
}
}
}
dispatch_async(dispatch_get_main_queue()) {
completion(placesArray)
}
}
}catch{
}
placesTask.resume()
}
func fetchDirectionsFrom(from: CLLocationCoordinate2D, to: CLLocationCoordinate2D, completion: ((String?) -> Void)) -> ()
{
let urlString = "https://maps.googleapis.com/maps/api/directions/json?key=\(serverKey)&origin=\(from.latitude),\(from.longitude)&destination=\(to.latitude),\(to.longitude)&mode=walking"
UIApplication.sharedApplication().networkActivityIndicatorVisible = true
do{
//******************************Here too ****************************
session.dataTaskWithURL(NSURL(string: urlString)!) {
(data, response, error) -> Void in
UIApplication.sharedApplication().networkActivityIndicatorVisible = false
var encodedRoute: String?
if let json = try NSJSONSerialization.JSONObjectWithData(data!, options:[]) as? [String:AnyObject] {
if let routes = json["routes"] as AnyObject? as? [AnyObject] {
if let route = routes.first as? [String : AnyObject] {
if let polyline = route["overview_polyline"] as AnyObject? as? [String : String] {
if let points = polyline["points"] as AnyObject? as? String {
encodedRoute = points
}
}
}
}
}
dispatch_async(dispatch_get_main_queue()) {
completion(encodedRoute)
}
}.resume()
}catch{
}
}
}
Sorry this is my first time posting the code style is a little bit confusing sorry about the indentation mess :)
Thanks again!!!
dataTaskWithURL:completionHandler: does not throw error.
Put do and catch inside dataTaskWithURL method.
for example:
session.dataTaskWithURL(NSURL(string: urlString)!) {
(data, response, error) -> Void in
UIApplication.sharedApplication().networkActivityIndicatorVisible = false
var encodedRoute: String?
do {
if let json = try NSJSONSerialization.JSONObjectWithData(data!, options:[]) as? [String:AnyObject] {
if let routes = json["routes"] as AnyObject? as? [AnyObject] {
if let route = routes.first as? [String : AnyObject] {
if let polyline = route["overview_polyline"] as AnyObject? as? [String : String] {
if let points = polyline["points"] as AnyObject? as? String {
encodedRoute = points
}
}
}
}
}
} catch {
}
dispatch_async(dispatch_get_main_queue()) {
completion(encodedRoute)
}
}.resume()

Compress video size before attach to an email in swift

Previous, I have ask for how to attach video then send via email. Now it working. Advised by some friend from this website.
I found new problem that video size is very large and larger than send with default email app in iOS for same video file.
Please advice me how to compress video file before attach to an email application.
Thank you everyone.
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [NSObject : AnyObject]) {
if let myImage = info[UIImagePickerControllerOriginalImage] as? UIImage {
image = info[UIImagePickerControllerOriginalImage] as! UIImage
self.dismissViewControllerAnimated(false, completion: nil)
sendmail()
}
else {
//picker.videoQuality = UIImagePickerControllerQualityTypeLow
videoURL = info[UIImagePickerControllerMediaURL] as! NSURL
self.dismissViewControllerAnimated(true, completion: nil)
sendmailVDO()
}
}
Below is the code for Compress video by half of the actual size
var assetWriter:AVAssetWriter?
var assetReader:AVAssetReader?
let bitrate:NSNumber = NSNumber(value:250000)
func compressFile(urlToCompress: URL, outputURL: URL, completion:#escaping (URL)->Void){
//video file to make the asset
var audioFinished = false
var videoFinished = false
let asset = AVAsset(url: urlToCompress);
let duration = asset.duration
let durationTime = CMTimeGetSeconds(duration)
print("Video Actual Duration -- \(durationTime)")
//create asset reader
do{
assetReader = try AVAssetReader(asset: asset)
} catch{
assetReader = nil
}
guard let reader = assetReader else{
fatalError("Could not initalize asset reader probably failed its try catch")
}
let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first!
let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first!
let videoReaderSettings: [String:Any] = [(kCVPixelBufferPixelFormatTypeKey as String?)!:kCVPixelFormatType_32ARGB ]
// ADJUST BIT RATE OF VIDEO HERE
let videoSettings:[String:Any] = [
AVVideoCompressionPropertiesKey: [AVVideoAverageBitRateKey:self.bitrate],
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoHeightKey: videoTrack.naturalSize.height,
AVVideoWidthKey: videoTrack.naturalSize.width
]
let assetReaderVideoOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
let assetReaderAudioOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
if reader.canAdd(assetReaderVideoOutput){
reader.add(assetReaderVideoOutput)
}else{
fatalError("Couldn't add video output reader")
}
if reader.canAdd(assetReaderAudioOutput){
reader.add(assetReaderAudioOutput)
}else{
fatalError("Couldn't add audio output reader")
}
let audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: nil)
let videoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
videoInput.transform = videoTrack.preferredTransform
//we need to add samples to the video input
let videoInputQueue = DispatchQueue(label: "videoQueue")
let audioInputQueue = DispatchQueue(label: "audioQueue")
do{
assetWriter = try AVAssetWriter(outputURL: outputURL, fileType: AVFileType.mov)
}catch{
assetWriter = nil
}
guard let writer = assetWriter else{
fatalError("assetWriter was nil")
}
writer.shouldOptimizeForNetworkUse = true
writer.add(videoInput)
writer.add(audioInput)
writer.startWriting()
reader.startReading()
writer.startSession(atSourceTime: kCMTimeZero)
let closeWriter:()->Void = {
if (audioFinished && videoFinished){
self.assetWriter?.finishWriting(completionHandler: {
print("------ Finish Video Compressing")
self.checkFileSize(sizeUrl: (self.assetWriter?.outputURL)!, message: "The file size of the compressed file is: ")
completion((self.assetWriter?.outputURL)!)
})
self.assetReader?.cancelReading()
}
}
audioInput.requestMediaDataWhenReady(on: audioInputQueue) {
while(audioInput.isReadyForMoreMediaData){
let sample = assetReaderAudioOutput.copyNextSampleBuffer()
if (sample != nil){
audioInput.append(sample!)
}else{
audioInput.markAsFinished()
DispatchQueue.main.async {
audioFinished = true
closeWriter()
}
break;
}
}
}
videoInput.requestMediaDataWhenReady(on: videoInputQueue) {
//request data here
while(videoInput.isReadyForMoreMediaData){
let sample = assetReaderVideoOutput.copyNextSampleBuffer()
if (sample != nil){
let timeStamp = CMSampleBufferGetPresentationTimeStamp(sample!)
let timeSecond = CMTimeGetSeconds(timeStamp)
let per = timeSecond / durationTime
print("Duration --- \(per)")
DispatchQueue.main.async {
self.progress.progress = Float(per)
}
videoInput.append(sample!)
}else{
videoInput.markAsFinished()
DispatchQueue.main.async {
videoFinished = true
self.progress.progress = 1.0
closeWriter()
}
break;
}
}
}
}
You can also display the progress of video compression.

Resources