Cancelling Remaining AssetExports - swift

I'm trying to allow a user to cancel the exporting of a series of videos, while in the middle exporting (Goal: cancel the remaining unexpected videos).
Code for button:
var cancelExportButton = UIButton()
cancelExportButton.addTarget(self, action: #selector(cancelExport(sender:)), for: .touchUpInside)
#objc func cancelExport(sender: UIButton!) {
print("export cancelled")
//cancel remaining exports code
}
for i in 0...videoURLs.count - 1 {
overlayVideo(titleImage: titleImage, captionImage: captionImage, videoURL: videoURLs[i])
}
func overlayVideo(titleImage: UIImage, captionImage: UIImage, videoURL: URL) {
//...composition and mixing code, not important to exporting
// Exporting
let number = Int.random(in: 0...99999)
let savePathUrl: URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/\(number).mp4")
do { // delete old video
try FileManager.default.removeItem(at: savePathUrl)
} catch { print(error.localizedDescription) }
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.videoComposition = layerComposition
assetExport.outputFileType = AVFileType.mov
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
print("success")
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error?.localizedDescription ?? "error nil")")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error?.localizedDescription ?? "error nil")")
default:
print("complete")
}
}
}
What code (using queues or monitoring properties) would work in cancelExport to cancel all remaining function instances/unexported assets?
I've tried turning assetExport into a class variable and setting it nil after successful exports, but exports are being done at the same time so it messes up.

Related

copyNextSampleBuffer hanging for AVAssetReaderTrackOutput

I am trying to create a video compression tool which simply takes in a video file URL, compresses it, and returns the new URL in swift 5 for ios 16.
I have been following a few tutorials online, but they all use functions which have since been deprecated, so I have put together this code here which is a refactoring from various sources:
import Foundation
import AVFoundation
class VideoCompressorModel: ObservableObject {
let bitrate = 2_500_000 // MBPs
func compressFile(urlToCompress: URL, outputURL: URL, completionHandler:#escaping (URL?)->Void) async {
let asset = AVAsset(url: urlToCompress);
//create asset reader
var assetReader: AVAssetReader?
do{
assetReader = try AVAssetReader(asset: asset)
} catch{
assetReader = nil
}
guard let reader = assetReader else{
completionHandler(nil)
return
}
var videoTrack: AVAssetTrack?
var audioTrack: AVAssetTrack?
do {
videoTrack = try await asset.loadTracks(withMediaType: AVMediaType.video).first
audioTrack = try await asset.loadTracks(withMediaType: AVMediaType.audio).first
} catch {
completionHandler(nil)
return
}
guard let videoTrack, let audioTrack else {
completionHandler(nil)
return
}
let videoReaderSettings: [String:Any] = [kCVPixelBufferPixelFormatTypeKey as String:kCVPixelFormatType_32ARGB ]
// ADJUST BIT RATE OF VIDEO HERE
var videoSettings: [String:Any]?
do {
videoSettings = await [
AVVideoCompressionPropertiesKey: [AVVideoAverageBitRateKey:self.bitrate],
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoHeightKey: try videoTrack.load(.naturalSize).height,
AVVideoWidthKey: try videoTrack.load(.naturalSize).width
]
} catch {
completionHandler(nil)
return
}
guard let videoSettings else {
completionHandler(nil)
return
}
let audioSettings = [
AVSampleRateKey: 44100,
AVFormatIDKey: kAudioFormatLinearPCM
]
let assetReaderVideoOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
let assetReaderAudioOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: audioSettings)
if reader.canAdd(assetReaderVideoOutput){
reader.add(assetReaderVideoOutput)
}else{
completionHandler(nil)
return
}
if reader.canAdd(assetReaderAudioOutput){
reader.add(assetReaderAudioOutput)
} else{
completionHandler(nil)
return
}
let audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: nil)
let videoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
do {
videoInput.transform = try await videoTrack.load(.preferredTransform)
} catch{
completionHandler(nil)
return
}
//we need to add samples to the video input
let videoInputQueue = DispatchQueue(label: "videoQueue")
let audioInputQueue = DispatchQueue(label: "audioQueue")
var assetWriter: AVAssetWriter?
do{
assetWriter = try AVAssetWriter(outputURL: outputURL, fileType: AVFileType.mov)
} catch {
assetWriter = nil
}
guard let assetWriter else{
completionHandler(nil)
return
}
assetWriter.shouldOptimizeForNetworkUse = true
assetWriter.add(videoInput)
assetWriter.add(audioInput)
assetWriter.startWriting()
reader.startReading()
assetWriter.startSession(atSourceTime: CMTime.zero)
let closeWriterBothDone:()->Void = {
assetWriter.finishWriting(completionHandler: {
completionHandler((assetWriter.outputURL))
})
reader.cancelReading()
}
print("STARTING ")
let closeWriterAudioDone:()->Void = {
print("FINISHED AUDIO")
videoInput.requestMediaDataWhenReady(on: videoInputQueue) {
while(videoInput.isReadyForMoreMediaData){
let sample = assetReaderVideoOutput.copyNextSampleBuffer()
if (sample != nil){
videoInput.append(sample!)
} else{
videoInput.markAsFinished()
DispatchQueue.main.async {
closeWriterBothDone()
}
break;
}
}
}
}
audioInput.requestMediaDataWhenReady(on: audioInputQueue) {
while(audioInput.isReadyForMoreMediaData){
let sample = assetReaderAudioOutput.copyNextSampleBuffer()
print("hi")
if (sample != nil){
audioInput.append(sample!)
} else{
audioInput.markAsFinished()
DispatchQueue.main.async {
closeWriterAudioDone()
}
break;
}
}
}
}
}
The issue that occurs is that copyNextSampleBuffer for the audio track output hangs after a few calls. I test this simply by having a print("hi") which only gets called a handful of times before the code blocks.
I have tried changing the audio options provided to AVAssetReaderTrackOutput, which I originally had simply as nil, and this only had the effect of increasing the number of times copyNextSampleBuffer gets called before it freezes.
Perhaps there is a better way overall to compress video in newer versions of swift, as this seems somewhat unelegant? If not, is there any bugs in my code that is causing it to hang?

AVAssetExportSession progress not updating

I make use of AVAssetExportSession to export movies to MP4 format using the codes below:
Step 1: User clicks a button to start conversion
#IBAction func clickConvert(_ sender:UIButton) {
self.convertProgress?.progress = 0
self.convertProgress?.isHidden = false
var preset = AVAssetExportPresetHighestQuality
switch self.qualitySelection?.selectedSegmentIndex {
case 0:
preset = AVAssetExportPresetLowQuality
break
case 1:
preset = AVAssetExportPresetMediumQuality
break
case 2:
preset = AVAssetExportPresetHighestQuality
break
default:
break
}
DispatchQueue.global(qos: .background).async {
let formatter = DateFormatter()
formatter.dateFormat = "yyyyMMddHHmmss"
let fileName = formatter.string(from: Date()) + ".mp4"
let convertGroup = DispatchGroup()
convertGroup.enter()
do {
let documentDirectory = try self.fileManager.url(for: .documentDirectory, in: .userDomainMask, appropriateFor:nil, create:false)
let filePath = documentDirectory.appendingPathComponent(fileName)
if(self.videoURL != nil) {
self.convertVideo(fromURL: self.videoURL!, toURL: filePath, preset: preset, dispatchGroup: convertGroup)
} else {
print("nil Video URL")
}
convertGroup.notify(queue: DispatchQueue.main) {
// reset Convert button state
self.convertButton?.titleLabel?.text = "Convert"
self.convertButton?.isEnabled = true
self.delegate?.updateFileList()
// Take back to old VC, update file list
if let navController = self.navigationController {
navController.popViewController(animated: true)
}
}
} catch {
print(error)
}
}
}
Step 2: Trigger convert video function
func convertVideo(fromURL: URL, toURL: URL, preset:String, dispatchGroup: DispatchGroup) {
let outFileType = AVFileType.mp4
let inAsset = AVAsset(url: fromURL)
let startDate = Date()
AVAssetExportSession.determineCompatibility(ofExportPreset: preset, with: inAsset, outputFileType: outFileType, completionHandler: { (isCompitable) in
if !isCompitable {
return
}
guard let export = AVAssetExportSession(asset: inAsset, presetName: preset) else {
return
}
export.outputFileType = outFileType
export.outputURL = toURL
export.shouldOptimizeForNetworkUse = true
let start = CMTimeMakeWithSeconds(0.0, preferredTimescale: 0)
let range = CMTimeRangeMake(start: start, duration: inAsset.duration)
export.timeRange = range
// Timer for progress updates
self.exportTimer = Timer()
if #available(iOS 10.0, *) {
print("start exportTimer")
self.exportTimer = Timer.scheduledTimer(withTimeInterval: 0.05, repeats: true, block: { _ in
let progress = Float(export.progress)
print("Export Progress: \(progress)")
self.updateProgressDisplay(progress: progress)
if progress < 0.99 {
let dict:[String: Float] = ["progress": progress]
NotificationCenter.default.post(name: Notification.Name(Constants.Notifications.ConvertProgress.rawValue), object: nil, userInfo: dict)
}
})
}
export.exportAsynchronously { () -> Void in
// Handle export results
switch export.status {
case .exporting:
print("Exporting...")
self.updateProgressDisplay(progress: export.progress)
break
case .failed:
print("Error: %#!", export.error!)
break
case .cancelled:
print("export cancelled")
break
case .completed:
let endDate = Date()
let elapsed = endDate.timeIntervalSince(startDate)
print("Elapsed: \(elapsed)")
print("successful")
self.exportTimer?.invalidate() // Stop the timer
self.generateThumbnail(path: toURL)
break
default:
break
}
dispatchGroup.leave()
}
})
}
However, status update is not working, as the timer exportTimer never fires (attempt 1), and the exportSession.exporting case never fires (attempt 2).
p.s. The video can be converted without any problem
p.s. the Notification has been added in viewDidLoad() as follow:
NotificationCenter.default.addObserver(self, selector: #selector(onDidReceiveConvertProgress(_:)), name: Notification.Name(Constants.Notifications.ConvertProgress.rawValue), object: nil)
Status update functions (both attempts) are as follow:
#objc func onDidReceiveConvertProgress(_ notification:Notification) {
print ("onDidReceiveConvertProgress")
if let data = notification.userInfo as? [String:Float] {
print("Progress: \(String(describing: data["progress"]))")
self.convertProgress?.progress = data["progress"]!
}
}
func updateProgressDisplay(progress: Float) {
print("updateProgressDisplay")
self.convertProgress?.progress = progress
}
What did I miss?
I'm not sure if you figured this out, but just in case someone else try your code, the problem why the progress timer is not firing is because you missed two things.
You never called the function to start the timer. e.g. self.exportTimer.fire()
You have to make sure to update this timer on the main Queue.
I had the same problems and doing these two things fixed my issue.

How I can add a audio trim on audio file which is stored on FileManager in swift iOS?

With AVFoundation can we edit(trim) an audio which was earlier recorded and stored on the file manager by simply giving starting and ending time?
This solved my problem.
func trimSelectedAudio(){
let name = browseData.name!
if let asset = AVURLAsset(url: getDirectory().appendingPathComponent("\(tfTitle.text!).m4a")) as? AVAsset{
exportAsset(asset, fileName: name)
}
}
func exportAsset(_ asset: AVAsset, fileName:String){
let trimmedSoundFileUrl = getDirectory().appendingPathComponent("\(tfTitle.text!)_trimmed.m4a")
print("Saving to \(trimmedSoundFileUrl.absoluteString)")
if let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A){
exporter.outputFileType = AVFileType.m4a
exporter.outputURL = trimmedSoundFileUrl
let duration = CMTimeGetSeconds(asset.duration)
if duration < 5.0{
print("Audio is not song long")
return
}
let startTime = CMTimeMake(Int64(selectAudioRange.selectedMinValue), 1)
let stopTime = CMTimeMake(Int64(selectAudioRange.selectedMaxValue), 1)
exporter.timeRange = CMTimeRangeFromTimeToTime(startTime, stopTime)
exporter.exportAsynchronously(completionHandler: {
print("export complete \(exporter.status)")
switch exporter.status {
case AVAssetExportSessionStatus.failed:
if let e = exporter.error {
print("export failed \(e)")
}
case AVAssetExportSessionStatus.cancelled:
print("export cancelled \(String(describing: exporter.error))")
default:
print("export complete")
self.deleteFileAlreadyPresent()
// change core data data here
}
})
} else{
print("cannot create AVAssetExportSession for asset \(asset)")
}
}
func deleteFileAlreadyPresent(){
let PresentAudioUrl = getDirectory().appendingPathComponent("\(previousAudioName).m4a")
if FileManager.default.fileExists(atPath: PresentAudioUrl.path){
print("Sound exists, removing \(PresentAudioUrl.path)")
do{
if try PresentAudioUrl.checkResourceIsReachable(){
print("is reachable")
self.deleteRecordingFile(audioName: "\(previousAudioName).m4a")
self.saveTrimmedData()
}
// try FileManager.default.removeItem(atPath: trimmedSoundFileUrl.absoluteString)
} catch{
print("Could not remove \(PresentAudioUrl.absoluteString)")
}
}
}
func saveTrimmedData(){
DispatchQueue.main.async {
self.browseData.image = (self.imgToSave.image?.jpeg!)!
self.browseData.note = self.tfNotes.text
self.browseData.name = "\(self.tfTitle.text!)_trimmed"
do{
try self.context.save()
self.goToParentVC()
} catch let error as NSError{
print("Could not save \(error) \(error.userInfo)")
}
}
}

After compressing my audio file, why can I not play the file?

Audio file will not play after reducing it using AVAssetReader/ AVAssetWriter
At the moment, the whole function is being executed fine, with no errors thrown.
For some reason, when I go inside the document directory of the simulator via terminal, the audio file will not play through iTunes and comes up with error when trying to open with quicktime "QuickTime Player can't open "test1.m4a"
Does anyone specialise in this area and understand why this isn't working?
protocol FileConverterDelegate {
func fileConversionCompleted()
}
class WKAudioTools: NSObject {
var delegate: FileConverterDelegate?
var url: URL?
var assetReader: AVAssetReader?
var assetWriter: AVAssetWriter?
func convertAudio() {
let documentDirectory = try! FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true)
let exportURL = documentDirectory.appendingPathComponent(Assets.soundName1).appendingPathExtension("m4a")
url = Bundle.main.url(forResource: Assets.soundName1, withExtension: Assets.mp3)
guard let assetURL = url else { return }
let asset = AVAsset(url: assetURL)
//reader
do {
assetReader = try AVAssetReader(asset: asset)
} catch let error {
print("Error with reading >> \(error.localizedDescription)")
}
let assetReaderOutput = AVAssetReaderAudioMixOutput(audioTracks: asset.tracks, audioSettings: nil)
//let assetReaderOutput = AVAssetReaderTrackOutput(track: track!, outputSettings: nil)
guard let assetReader = assetReader else {
print("reader is nil")
return
}
if assetReader.canAdd(assetReaderOutput) == false {
print("Can't add output to the reader ☹️")
return
}
assetReader.add(assetReaderOutput)
// writer
do {
assetWriter = try AVAssetWriter(outputURL: exportURL, fileType: .m4a)
} catch let error {
print("Error with writing >> \(error.localizedDescription)")
}
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout.size(ofValue: channelLayout))
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo
// use different values to affect the downsampling/compression
let outputSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 44100.0,
AVNumberOfChannelsKey: 2,
AVEncoderBitRateKey: 128000,
AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout))]
let assetWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: outputSettings)
guard let assetWriter = assetWriter else { return }
if assetWriter.canAdd(assetWriterInput) == false {
print("Can't add asset writer input ☹️")
return
}
assetWriter.add(assetWriterInput)
assetWriterInput.expectsMediaDataInRealTime = false
// MARK: - File conversion
assetWriter.startWriting()
assetReader.startReading()
let audioTrack = asset.tracks[0]
let startTime = CMTime(seconds: 0, preferredTimescale: audioTrack.naturalTimeScale)
assetWriter.startSession(atSourceTime: startTime)
// We need to do this on another thread, so let's set up a dispatch group...
var convertedByteCount = 0
let dispatchGroup = DispatchGroup()
let mediaInputQueue = DispatchQueue(label: "mediaInputQueue")
//... and go
dispatchGroup.enter()
assetWriterInput.requestMediaDataWhenReady(on: mediaInputQueue) {
while assetWriterInput.isReadyForMoreMediaData {
let nextBuffer = assetReaderOutput.copyNextSampleBuffer()
if nextBuffer != nil {
assetWriterInput.append(nextBuffer!) // FIXME: Handle this safely
convertedByteCount += CMSampleBufferGetTotalSampleSize(nextBuffer!)
} else {
// done!
assetWriterInput.markAsFinished()
assetReader.cancelReading()
dispatchGroup.leave()
DispatchQueue.main.async {
// Notify delegate that conversion is complete
self.delegate?.fileConversionCompleted()
print("Process complete 🎧")
if assetWriter.status == .failed {
print("Writing asset failed ☹️ Error: ", assetWriter.error)
}
}
break
}
}
}
}
}
You need to call finishWriting on your AVAssetWriter to get the output completely written:
assetWriter.finishWriting {
DispatchQueue.main.async {
// Notify delegate that conversion is complete
self.delegate?.fileConversionCompleted()
print("Process complete 🎧")
if assetWriter.status == .failed {
print("Writing asset failed ☹️ Error: ", assetWriter.error)
}
}
}
If exportURL exists before you start the conversion, you should remove it, otherwise the conversion will fail:
try! FileManager.default.removeItem(at: exportURL)
As #matt points out, why the buffer stuff when you could do the conversion more simply with an AVAssetExportSession, and also why convert one of your own assets when you could distribute it already in the desired format?

UIImagePickerController allowsEditing = YES, getting untrimmed video after trimming

Problem:
When I record a video in my UIImagePickerController with allowsEditing set to YES, and afterwards trim the video by using the trim-interface that comes after video capture, I get returned the original video, instead of the trimmed one.
Setup:
I am using a UIImagePickerController for video capture, with the allowsEditing property set to YES. In the delegate method didFinishPickingMediaWithInfo, I use UIImagePickerControllerMediaURLfrom the info NSDictionary to get the path URL. The official Apple docs don't mention any Edited video URL unfortunately.
Code:
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
NSString *mediaType = [info objectForKey: UIImagePickerControllerMediaType];
if (CFStringCompare ((__bridge CFStringRef) mediaType, kUTTypeMovie, 0)
== kCFCompareEqualTo) {
self.tempVideoPath = [[info objectForKey:
UIImagePickerControllerMediaURL] path];
}
}
I realise this question is similar to other ones posted here on SO, but there was no definitive answer why it doesn't work or why the option is even there. If it is intended like this, I don't understand why there is an 'allowsEditing' property for the picker.
EDIT: In the info dictionary I got back are the following keys:
info: {
UIImagePickerControllerMediaType = "public.movie";
UIImagePickerControllerMediaURL = "file://localhost/private/var/mobile/Applications/F12E4608-FE5A-4EE3-B4E2-8F7D2508C4C8/tmp/capture-T0x21d810.tmp.wabFCC/capturedvideo.MOV";
"_UIImagePickerControllerVideoEditingEnd" = "5.498333333333333";
"_UIImagePickerControllerVideoEditingStart" = "4.273402690887451";
}
Does this mean we have to trim it ourselves with this data? Then the Apple documentation isn't very clear about this. If so, do you know a good practice for this?
take a look at the highlighted answer on this post:
How to trim the video using AVFoundation
I think it's exactly what you want. The answer uses UIImagePickerController too
Hope it helps,
Mário
Here's a quick and dirty Swift 5 example of how to trim the video from UIImagePickerController
extension ViewController: UIImagePickerControllerDelegate {
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
let mediaType = info[.mediaType] as! String
dismiss(animated: true) { [weak self] in
// Handle a movie capture
if mediaType == kUTTypeMovie as String {
guard let videoURL = info[.mediaURL] as? URL else {
SwiftyBeaver.error("Could not get URL for movie")
return
}
let editingEnd = UIImagePickerController.InfoKey(rawValue: "_UIImagePickerControllerVideoEditingEnd")
let editingStart = UIImagePickerController.InfoKey(rawValue: "_UIImagePickerControllerVideoEditingStart")
let startMilliseconds: Double?
let endMilliseconds: Double?
if let start = info[editingStart] as? Double, let end = info[editingEnd] as? Double {
startMilliseconds = start
endMilliseconds = end
} else {
startMilliseconds = nil
endMilliseconds = nil
}
let alert = UIAlertController(title: "Creating", message: "File is being processed", preferredStyle: .alert)
self?.present(alert, animated: true)
self?.process(srcVideoURL: videoURL, startSeconds: startMilliseconds, endSeconds: endMilliseconds) { (error) in
DispatchQueue.main.async {
if let error = error {
alert.title = "Whoops"
alert.message = "\(error)"
alert.addAction(UIAlertAction(title: "OK", style: .default, handler: { _ in
self?.dismiss(animated: true, completion: nil)
}))
return
}
self?.dismiss(animated: true, completion: nil)
}
}
}
}
}
}
enum VideoError: Error {
case error(message: String)
}
extension ViewController {
func process(srcVideoURL: URL, startSeconds: Double?, endSeconds: Double?, completed: #escaping (_ error: Error?) -> ()) {
DispatchQueue.global(qos: .userInitiated).async {
let dstVideoURL: URL // some URL for the destination
do {
try self.handleNewVideo(srcVideoURL: srcVideoURL, dstVideoURL: dstVideoURL, startSeconds: startSeconds, endSeconds: endSeconds)
completed(nil)
} catch {
completed(error)
}
}
}
func handleNewVideo(srcVideoURL: URL, dstVideoURL: URL, startSeconds: Double?, endSeconds: Double?) throws {
guard let start = startSeconds, let end = endSeconds else {
print("No video editing information. Copying file.")
try FileManager.default.moveItem(at: srcVideoURL, to: dstVideoURL)
return
}
print("Video editing information. Processing start \(start) end \(end).")
let videoAsset = AVURLAsset(url: srcVideoURL)
let exportSession = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exportSession.outputURL = dstVideoURL
exportSession.outputFileType = AVFileType.mov
let timeRange = CMTimeRange(start: CMTime(seconds: start, preferredTimescale: 1000), duration: CMTime(seconds: end - start, preferredTimescale: 1000))
exportSession.timeRange = timeRange
var error: Error? = nil
let dispatchGroup = DispatchGroup()
dispatchGroup.enter()
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .completed:
break
case .failed:
error = exportSession.error ?? VideoError.error(message: "Unknown failed error")
case .cancelled:
error = exportSession.error ?? VideoError.error(message: "Video Cancelled")
case .exporting:
error = exportSession.error ?? VideoError.error(message: "Video still exporting")
case .unknown:
error = exportSession.error ?? VideoError.error(message: "Unknown unknown error")
case .waiting:
error = exportSession.error ?? VideoError.error(message: "Waiting error")
#unknown default:
error = exportSession.error ?? VideoError.error(message: "Future error")
}
dispatchGroup.leave()
})
dispatchGroup.wait()
if let error = error {
throw error
}
}
}
You will need to use a UIVideoEditorController for this. It's Delegate Protocol specifies a method videoEditorController:didSaveEditedVideoToPath: which seems to be what you want.
There is sample code available here, as referenced in this SO question.