Unable to export AVMutableComposition using AVAssetExportSession only on iOS7 swift - swift

I'm trying to export an AVMutableComposition using AVAssetExportSession to a mp4 file in NSTemporaryDirectory(). It works great on iOS8 but on iOS7 I got a -1100 error on exportAsynchronouslyWithCompletionHandler
The requested URL was not found on this server.
My composition contains 2 AVAssets, both getting sources from the web, one for the video, one for the audio.
At first, I though that this was a folder problem but I can create / delete a file using NSFileManager without trouble.
I don't really understand why it should work on iOS8 but not on iOS7 and I didn't spot any particular "introduced=8.0"
Here is my code:
func compose() {
var composition = AVMutableComposition()
var videoTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
var audioTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
var videoAsset = AVAsset.assetWithURL(videoUrl) as! AVAsset //Not a local URL
var audioAsset = AVAsset.assetWithURL(audioUrl) as! AVAsset //Not a local URL
var videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
var videoError: NSError?
if let tracks = videoAsset.tracksWithMediaType(mediaType) as? [AVMediaTypeVideo] where tracks.isEmpty == false {
videoTrack.insertTimeRange(videoTimeRange, ofTrack: tracks[0], atTime: duration, error: &videoError)
}
var audioTimeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
var audioError: NSError?
if let tracks = audioAsset.tracksWithMediaType(mediaType) as? [AVMediaTypeAudio] where tracks.isEmpty == false {
audioTrack.insertTimeRange(audioTimeRange, ofTrack: tracks[0], atTime: duration, error: &audioError)
}
self.export(composition)
}
func export(composition: AVMutableComposition) {
self.exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetMediumQuality)
let filename = "composition.mp4"
let outputPath = NSTemporaryDirectory().stringByAppendingPathComponent(filename)
//Check if file already exists and delete it if needed
if let fileUrl = NSURL(fileURLWithPath: outputPath) {
let manager = NSFileManager.defaultManager()
if manager.fileExistsAtPath(outputPath) {
var error: NSError? = nil
if manager.removeItemAtPath(outputPath, error: &error) == true {
println("Removed")
}
}
//File should be 30 seconds max
let maxDuration = CMTimeMakeWithSeconds(30.0, composition.duration.timescale)
let exportInterval = CMTimeRangeMake(kCMTimeZero, maxDuration)
self.exporter.outputFileType = AVFileTypeMPEG4
self.exporter.outputURL = fileUrl
self.exporter.timeRange = exportInterval
self.exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
dispatch_async(dispatch_get_main_queue(), {
if self.exporter.status == AVAssetExportSessionStatus.Completed {
println("Success")
}
else {
println(self.exporter.error?.localizedDescription)
//The requested URL was not found on this server.
}
})
})
}
I'm getting stuck on this problem for a while now, so any advice would be much appreciated !
Thanks in advance,
Cordially, Louis.
Edit 7th July:
I found out that it is not possible to do so since AVMutableComposition does not support web URLs in iOS7.
For now, I haven't been able to find a workaround and would really appreciate any advice.

Related

Error Domain=NSOSStatusErrorDomain Code=-12780 \"(null)\"

When I try to export the asset with AVAssetExport I get the following error only on videos received through whatsapp probably.
I could not find a working solution. I've also tried implementing code to fix video duration, but I did not fix it.
Error is:
Error Domain=NSOSStatusErrorDomain Code=-12780 \"(null)\"
Here code
PHCachingImageManager().requestAVAsset(forVideo: asset.phAsset!, options: nil, resultHandler: { (AVAssetRecivied, audioMix, info) in
let AVAssetMy = AVAssetRecivied!.normalizingMediaDuration()
let exportSession : AVAssetExportSession?
if (AVAssetMy as? AVURLAsset) != nil {
exportSession = AVAssetExportSession(asset: (AVAssetMy as? AVURLAsset)!, presetName: AVAssetExportPresetMediumQuality)
}
else {
exportSession = AVAssetExportSession(asset: (AVAssetMy as? AVComposition)!, presetName: AVAssetExportPresetMediumQuality)
}
exportSession?.outputURL = URL(fileURLWithPath: NSTemporaryDirectory() + NSUUID().uuidString + ".m4v")
exportSession?.outputFileType = AVFileTypeQuickTimeMovie
exportSession?.audioMix = audioMix
exportSession?.shouldOptimizeForNetworkUse = true
exportSession?.exportAsynchronously { () -> Void in
if exportSession?.status == .completed {
self.getFileSize(url: exportSession!.outputURL!)
if self.myMediaArray == nil {
self.myMediaArray = [["Video" : AVAsset(url: exportSession!.outputURL!)]]
DispatchQueue.main.async {
self.collectionViewImage.reloadData()
}
} else {
self.myMediaArray?.append(["Video" : AVAsset(url: exportSession!.outputURL!)])
DispatchQueue.main.async {
self.collectionViewImage.reloadData()
}
}}
}
})
Here is the method for adjusting the duration of the video
func normalizingMediaDuration() -> AVAsset? {
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
guard let video = tracks(withMediaType: AVMediaTypeVideo).first else {
return nil
}
guard let audio = tracks(withMediaType: AVMediaTypeAudio).first else {
return nil
}
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let duration = video.timeRange.duration.seconds > audio.timeRange.duration.seconds ? audio.timeRange.duration : video.timeRange.duration
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,duration), of: video, at: kCMTimeZero)
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, duration), of: audio, at: kCMTimeZero)
}catch{
return nil
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,duration)
return mixComposition
}
}
The file is:
1) Exportable
2) Presets and format are compatible
3) I tried to move the file to the document's before I export 4) I tried to change the file extension.
It’s a bug.
Bug report : https://bugreport.apple.com/web/?problemID=34574848
Alternatives are welcome...
I ran into the same problem, and got the same error code -12780.
The only thing that fixed it for me was:
sending to exportSession?.outputURL a NSURL variable and putting as URL
I have no idea why it worked and I hope you'll find it helpful as well.

Attach a video to an email using AVAssetExportSession

I'm rather new to programming, I apologize if my code is way off, I'm trying to use AVAssetExportSession to convert an AVAsset (video) into Data so that I can attach it to an email using MFMailComposer. I'm having a hell of a time trying to figure out this is done.
I can not figure out how to access the exported file so I don't know what to set the data to before I launch the mailComposer.
Am I close? Any help would be incredibly appreciated! THANKS.
func createVideo() {
let uiImages = self.images
let settings = CXEImagesToVideo.videoSettings(codec: AVVideoCodecH264, width: (uiImages[0].cgImage?.width)!, height: (uiImages[0].cgImage?.height)!)
let movieMaker = CXEImagesToVideo(videoSettings: settings)
movieMaker.createMovieFrom(images: uiImages) { (fileURL:URL) in
var video = AVAsset(url: fileURL)
let exportPath = NSTemporaryDirectory().appendingFormat("/video.mp4")
let exportURL = URL(fileURLWithPath: exportPath)
let exporter = AVAssetExportSession(asset: video, presetName: AVAssetExportPresetMediumQuality)
exporter!.outputFileType = AVFileTypeMPEG4
exporter?.outputURL = exportURL
exporter?.exportAsynchronously(completionHandler: {
if exporter?.status == AVAssetExportSessionStatus.completed {
if MFMailComposeViewController.canSendMail() {
let mail = MFMailComposeViewController()
mail.mailComposeDelegate = self
var subject = "Here's your GIF"
var body = "<p>Here is your GIF!!</p>"
if let emailDescription = UserDefaults.standard.object(forKey: "emailDescription") as? [String : String] {
if let theSubject = emailDescription["subject"] {
subject = theSubject
}
if let theBody = emailDescription["body"] {
body = theBody
}
}
mail.setSubject(subject)
mail.setMessageBody(body, isHTML: true)
let data = URL(fileURLWithPath: exportURL)
mail.addAttachmentData(data, mimeType: "video/mp4", fileName: "gif.mp4")
}
}
})
}
}

(Cocoa error -1) When attempting to save video created with AVCaptureSession

I'm generating a video using an AVCapture session and then using an AVVideoCompositionCoreAnimationTool to add a simple overlay. I then use an AVAssetExportSession to output the file. This all seems to work but then when I attempt to save it to the Photo Library using PHPhotoLibrary (because ALAssetsLibrary has been depreciated) is fails with the message:"Cant complete operation cocoa error -1". After extensive Google use and checking the docs I can't work out whats going wrong.
Any help would be great thanks.
func videoOutput() {
videoToMake = AVAsset(URL: videoToMakeURL!)
if (videoToMake == nil) {
return
}
//This holds the different tracks of the video like audio and the layers
let mixComposition = AVMutableComposition()
let videoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
print("The duration of the video to create is \(videoToMake!.duration.seconds)")
do{
try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero,videoToMake!.duration), ofTrack: videoToMake!.tracksWithMediaType(AVMediaTypeVideo)[0], atTime: kCMTimeZero)
}catch let error as NSError{
print("Error inserting time range on video track \(error.localizedDescription)")
return
}catch{
print("An unknown error occured")
}
//Make the instructions for the other layers
let mainInstrucation = AVMutableVideoCompositionInstruction()
mainInstrucation.timeRange = CMTimeRangeMake(kCMTimeZero, videoToMake!.duration)
//Create the layer instructions
let videoLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
let videoAssetTrack = videoToMake!.tracksWithMediaType(AVMediaTypeVideo)[0]
let assetInfo = orientationFromTransform(videoAssetTrack.preferredTransform)
// sort size it in respect to the video orientation.
videoLayerInstruction.setTransform(videoAssetTrack.preferredTransform, atTime: kCMTimeZero)
videoLayerInstruction.setOpacity(0.0, atTime:videoToMake!.duration)
//Add the instructions
mainInstrucation.layerInstructions = [videoLayerInstruction]
let mainCompositionInst = AVMutableVideoComposition()
var naturalSize:CGSize
if assetInfo.isPortrait {
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
}else{
naturalSize = videoAssetTrack.naturalSize
}
let renderWidth = naturalSize.width
let renderHeight = naturalSize.height
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight)
mainCompositionInst.instructions = [mainInstrucation]
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
//So now the main composition has been created add the video affects
applyVideoEffectsToComposition(mainCompositionInst, size: naturalSize)
let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory,.UserDomainMask, true)
let documentsDirectory = paths[0]
let random = Int(arc4random_uniform(1000))
let url = NSURL(fileURLWithPath:documentsDirectory).URLByAppendingPathComponent("FinalVideo\(random)")
//Create the exporter
let exporter = AVAssetExportSession(asset: mixComposition, presetName:AVAssetExportPresetHighestQuality)
exporter!.outputURL = url
exporter!.outputFileType = AVFileTypeMPEG4
exporter!.shouldOptimizeForNetworkUse = true
exporter!.videoComposition = mainCompositionInst
//Perform the export
exporter!.exportAsynchronouslyWithCompletionHandler() {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
self.exportDidFinish(exporter!)
})
}
}
Well it turns out I was missing the extension from the end of my movie name:
let url = NSURL(fileURLWithPath:documentsDirectory).URLByAppendingPathComponent("FinalVideo\(random)")
So it should have been "FinalVideo(random).mov"
Hope this helps somebody one day.

My system sound in Swift is not played

This what I use:
if let filePath = NSBundle.mainBundle().pathForResource("Aurora", ofType: "aiff") {
//it doesn't gets here
let fileURL = NSURL(fileURLWithPath: filePath)
var soundID:SystemSoundID = 0
AudioServicesCreateSystemSoundID(fileURL, &soundID)
AudioServicesPlaySystemSound(soundID)
}
How to do this? I've read a lot about this, but nothing has worked.
Testing on a real device.
You can use the following code:
func getSoundID() -> SystemSoundID {
var soundID: SystemSoundID = 0
let soundURL = CFBundleCopyResourceURL(CFBundleGetMainBundle(), "Glass", "mp3", nil)
AudioServicesCreateSystemSoundID(soundURL, &soundID)
return soundID
}
let soundId: SystemSoundID = playSound()
AudioServicesPlaySystemSound(soundId)
You can also check your system is playing the sound or not by putting sound ID as follows:
AudioServicesPlaySystemSound(1200)
Please also check the value of the system.

How I can edit or trim video start and point particular parts of video

How I can edit or trim video start and point particular parts of video?
Also I want to use slider to point out trimming start and end points.
func trimVideo(sourceURL: NSURL, destinationURL: NSURL, trimPoints: TrimPoints, completion: TrimCompletion?) {
assert(sourceURL.fileURL)
assert(destinationURL.fileURL)
let options = [ AVURLAssetPreferPreciseDurationAndTimingKey: true ]
let asset = AVURLAsset(URL: sourceURL, options: options)
let preferredPreset = AVAssetExportPresetPassthrough
if verifyPresetForAsset(preferredPreset, asset: asset) {
let composition = AVMutableComposition()
let videoCompTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let audioCompTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let assetVideoTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo).first as! AVAssetTrack
let assetAudioTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeAudio).first as! AVAssetTrack
var compError: NSError?
var accumulatedTime = kCMTimeZero
for (startTimeForCurrentSlice, endTimeForCurrentSlice) in trimPoints {
let durationOfCurrentSlice = CMTimeSubtract(endTimeForCurrentSlice, startTimeForCurrentSlice)
let timeRangeForCurrentSlice = CMTimeRangeMake(startTimeForCurrentSlice, durationOfCurrentSlice)
videoCompTrack.insertTimeRange(timeRangeForCurrentSlice, ofTrack: assetVideoTrack, atTime: accumulatedTime, error: &compError)
audioCompTrack.insertTimeRange(timeRangeForCurrentSlice, ofTrack: assetAudioTrack, atTime: accumulatedTime, error: &compError)
if compError != nil {
NSLog("error during composition: \(compError)")
if let completion = completion {
completion(compError)
}
}
accumulatedTime = CMTimeAdd(accumulatedTime, durationOfCurrentSlice)
}
let exportSession = AVAssetExportSession(asset: composition, presetName: preferredPreset)
exportSession.outputURL = destinationURL
exportSession.outputFileType = AVFileTypeAppleM4V
exportSession.shouldOptimizeForNetworkUse = true
removeFileAtURLIfExists(destinationURL)
exportSession.exportAsynchronouslyWithCompletionHandler({ () -> Void in
if let completion = completion {
completion(exportSession.error)
}
})
} else {
NSLog("Could not find a suitable export preset for the input video")
let error = NSError(domain: "org.linuxguy.VideoLab", code: -1, userInfo: nil)
if let completion = completion {
completion(error)
}
}
}