I'm rather new to programming, I apologize if my code is way off, I'm trying to use AVAssetExportSession to convert an AVAsset (video) into Data so that I can attach it to an email using MFMailComposer. I'm having a hell of a time trying to figure out this is done.
I can not figure out how to access the exported file so I don't know what to set the data to before I launch the mailComposer.
Am I close? Any help would be incredibly appreciated! THANKS.
func createVideo() {
let uiImages = self.images
let settings = CXEImagesToVideo.videoSettings(codec: AVVideoCodecH264, width: (uiImages[0].cgImage?.width)!, height: (uiImages[0].cgImage?.height)!)
let movieMaker = CXEImagesToVideo(videoSettings: settings)
movieMaker.createMovieFrom(images: uiImages) { (fileURL:URL) in
var video = AVAsset(url: fileURL)
let exportPath = NSTemporaryDirectory().appendingFormat("/video.mp4")
let exportURL = URL(fileURLWithPath: exportPath)
let exporter = AVAssetExportSession(asset: video, presetName: AVAssetExportPresetMediumQuality)
exporter!.outputFileType = AVFileTypeMPEG4
exporter?.outputURL = exportURL
exporter?.exportAsynchronously(completionHandler: {
if exporter?.status == AVAssetExportSessionStatus.completed {
if MFMailComposeViewController.canSendMail() {
let mail = MFMailComposeViewController()
mail.mailComposeDelegate = self
var subject = "Here's your GIF"
var body = "<p>Here is your GIF!!</p>"
if let emailDescription = UserDefaults.standard.object(forKey: "emailDescription") as? [String : String] {
if let theSubject = emailDescription["subject"] {
subject = theSubject
}
if let theBody = emailDescription["body"] {
body = theBody
}
}
mail.setSubject(subject)
mail.setMessageBody(body, isHTML: true)
let data = URL(fileURLWithPath: exportURL)
mail.addAttachmentData(data, mimeType: "video/mp4", fileName: "gif.mp4")
}
}
})
}
}
Related
I'm trying to Export my Database.
I've tried by Email and by sharing it.
The thing is that I want to export the current state of the dataBase (with all the information in it).
I've tried this code :
func exportDatabase(){
var url:String = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).last! as String
url = url + "/WalletDatabase.sqlite"
let fileManager = FileManager.default
if fileManager.fileExists(atPath: url) {
sendMail(sUrl: url)
}else{
print("error")
}
}
func sendMail(sUrl:String){
if( MFMailComposeViewController.canSendMail() ) {
print("Can send email.")
let fileManager = FileManager.default
let mailComposer = MFMailComposeViewController()
mailComposer.setToRecipients(["***#gmail.com"])
mailComposer.mailComposeDelegate = self
//Set the subject and message of the email
mailComposer.setSubject("Lorem Ipsum")
mailComposer.setMessageBody("Lorem Ipsum.", isHTML: false)
if let fileData = NSData(contentsOfFile: sUrl) {
print("File data loaded.")
mailComposer.addAttachmentData(fileData as Data, mimeType: "application/x-sqlite3", fileName: "WalletDatabase")
}
let fileData = fileManager.contents(atPath: sUrl)
} else {
print("error")
}
But :
The file send doesn't have any type
The database is empty, only the table and col remain
Could you guys give me a little help
I fix it by adding this following code :
static func migrateStoreSwift() -> NSURL
{
let lApp:AppController = UIApplication.shared.delegate as! AppController;
let lCurrentStore:NSPersistentStore = lApp.persistentStoreCoordinator.persistentStores.last!
let lNewDataBase = "Database.sqlite"
let lNewStoreURL:NSURL = lApp.applicationDocumentsDirectory()?.appendingPathComponent(lNewDataBase) as! NSURL
try! lApp.persistentStoreCoordinator.migratePersistentStore(lCurrentStore, to: lNewStoreURL as URL, options: nil, withType: NSSQLiteStoreType)
return lNewStoreURL
}
This code create a copy of the current version of the database.
When I try to export the asset with AVAssetExport I get the following error only on videos received through whatsapp probably.
I could not find a working solution. I've also tried implementing code to fix video duration, but I did not fix it.
Error is:
Error Domain=NSOSStatusErrorDomain Code=-12780 \"(null)\"
Here code
PHCachingImageManager().requestAVAsset(forVideo: asset.phAsset!, options: nil, resultHandler: { (AVAssetRecivied, audioMix, info) in
let AVAssetMy = AVAssetRecivied!.normalizingMediaDuration()
let exportSession : AVAssetExportSession?
if (AVAssetMy as? AVURLAsset) != nil {
exportSession = AVAssetExportSession(asset: (AVAssetMy as? AVURLAsset)!, presetName: AVAssetExportPresetMediumQuality)
}
else {
exportSession = AVAssetExportSession(asset: (AVAssetMy as? AVComposition)!, presetName: AVAssetExportPresetMediumQuality)
}
exportSession?.outputURL = URL(fileURLWithPath: NSTemporaryDirectory() + NSUUID().uuidString + ".m4v")
exportSession?.outputFileType = AVFileTypeQuickTimeMovie
exportSession?.audioMix = audioMix
exportSession?.shouldOptimizeForNetworkUse = true
exportSession?.exportAsynchronously { () -> Void in
if exportSession?.status == .completed {
self.getFileSize(url: exportSession!.outputURL!)
if self.myMediaArray == nil {
self.myMediaArray = [["Video" : AVAsset(url: exportSession!.outputURL!)]]
DispatchQueue.main.async {
self.collectionViewImage.reloadData()
}
} else {
self.myMediaArray?.append(["Video" : AVAsset(url: exportSession!.outputURL!)])
DispatchQueue.main.async {
self.collectionViewImage.reloadData()
}
}}
}
})
Here is the method for adjusting the duration of the video
func normalizingMediaDuration() -> AVAsset? {
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
guard let video = tracks(withMediaType: AVMediaTypeVideo).first else {
return nil
}
guard let audio = tracks(withMediaType: AVMediaTypeAudio).first else {
return nil
}
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let duration = video.timeRange.duration.seconds > audio.timeRange.duration.seconds ? audio.timeRange.duration : video.timeRange.duration
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,duration), of: video, at: kCMTimeZero)
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, duration), of: audio, at: kCMTimeZero)
}catch{
return nil
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,duration)
return mixComposition
}
}
The file is:
1) Exportable
2) Presets and format are compatible
3) I tried to move the file to the document's before I export 4) I tried to change the file extension.
It’s a bug.
Bug report : https://bugreport.apple.com/web/?problemID=34574848
Alternatives are welcome...
I ran into the same problem, and got the same error code -12780.
The only thing that fixed it for me was:
sending to exportSession?.outputURL a NSURL variable and putting as URL
I have no idea why it worked and I hope you'll find it helpful as well.
I searched for getting the EXIF data from picture files and write them back for Swift. But I only could find predefied libs for different languages.
I also found references to "CFDictionaryGetValue", but which keys do I need to get the data? And how can I write it back?
I'm using this to get EXIF infos from an image file:
import ImageIO
let fileURL = theURLToTheImageFile
if let imageSource = CGImageSourceCreateWithURL(fileURL as CFURL, nil) {
let imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil)
if let dict = imageProperties as? [String: Any] {
print(dict)
}
}
It gives you a dictionary containing various informations like the color profile - the EXIF info specifically is in dict["{Exif}"].
Swift 4
extension UIImage {
func getExifData() -> CFDictionary? {
var exifData: CFDictionary? = nil
if let data = self.jpegData(compressionQuality: 1.0) {
data.withUnsafeBytes {(bytes: UnsafePointer<UInt8>)->Void in
if let cfData = CFDataCreate(kCFAllocatorDefault, bytes, data.count) {
let source = CGImageSourceCreateWithData(cfData, nil)
exifData = CGImageSourceCopyPropertiesAtIndex(source!, 0, nil)
}
}
}
return exifData
}
}
Swift 5
extension UIImage {
func getExifData() -> CFDictionary? {
var exifData: CFDictionary? = nil
if let data = self.jpegData(compressionQuality: 1.0) {
data.withUnsafeBytes {
let bytes = $0.baseAddress?.assumingMemoryBound(to: UInt8.self)
if let cfData = CFDataCreate(kCFAllocatorDefault, bytes, data.count),
let source = CGImageSourceCreateWithData(cfData, nil) {
exifData = CGImageSourceCopyPropertiesAtIndex(source, 0, nil)
}
}
}
return exifData
}
}
You can use AVAssetExportSession to write metadata.
let asset = AVAsset(url: existingUrl)
let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)
exportSession?.outputURL = newURL
exportSession?.metadata = [
// whatever [AVMetadataItem] you want to write
]
exportSession?.exportAsynchronously {
// respond to file writing completion
}
I'm generating a video using an AVCapture session and then using an AVVideoCompositionCoreAnimationTool to add a simple overlay. I then use an AVAssetExportSession to output the file. This all seems to work but then when I attempt to save it to the Photo Library using PHPhotoLibrary (because ALAssetsLibrary has been depreciated) is fails with the message:"Cant complete operation cocoa error -1". After extensive Google use and checking the docs I can't work out whats going wrong.
Any help would be great thanks.
func videoOutput() {
videoToMake = AVAsset(URL: videoToMakeURL!)
if (videoToMake == nil) {
return
}
//This holds the different tracks of the video like audio and the layers
let mixComposition = AVMutableComposition()
let videoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
print("The duration of the video to create is \(videoToMake!.duration.seconds)")
do{
try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero,videoToMake!.duration), ofTrack: videoToMake!.tracksWithMediaType(AVMediaTypeVideo)[0], atTime: kCMTimeZero)
}catch let error as NSError{
print("Error inserting time range on video track \(error.localizedDescription)")
return
}catch{
print("An unknown error occured")
}
//Make the instructions for the other layers
let mainInstrucation = AVMutableVideoCompositionInstruction()
mainInstrucation.timeRange = CMTimeRangeMake(kCMTimeZero, videoToMake!.duration)
//Create the layer instructions
let videoLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
let videoAssetTrack = videoToMake!.tracksWithMediaType(AVMediaTypeVideo)[0]
let assetInfo = orientationFromTransform(videoAssetTrack.preferredTransform)
// sort size it in respect to the video orientation.
videoLayerInstruction.setTransform(videoAssetTrack.preferredTransform, atTime: kCMTimeZero)
videoLayerInstruction.setOpacity(0.0, atTime:videoToMake!.duration)
//Add the instructions
mainInstrucation.layerInstructions = [videoLayerInstruction]
let mainCompositionInst = AVMutableVideoComposition()
var naturalSize:CGSize
if assetInfo.isPortrait {
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
}else{
naturalSize = videoAssetTrack.naturalSize
}
let renderWidth = naturalSize.width
let renderHeight = naturalSize.height
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight)
mainCompositionInst.instructions = [mainInstrucation]
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
//So now the main composition has been created add the video affects
applyVideoEffectsToComposition(mainCompositionInst, size: naturalSize)
let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory,.UserDomainMask, true)
let documentsDirectory = paths[0]
let random = Int(arc4random_uniform(1000))
let url = NSURL(fileURLWithPath:documentsDirectory).URLByAppendingPathComponent("FinalVideo\(random)")
//Create the exporter
let exporter = AVAssetExportSession(asset: mixComposition, presetName:AVAssetExportPresetHighestQuality)
exporter!.outputURL = url
exporter!.outputFileType = AVFileTypeMPEG4
exporter!.shouldOptimizeForNetworkUse = true
exporter!.videoComposition = mainCompositionInst
//Perform the export
exporter!.exportAsynchronouslyWithCompletionHandler() {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
self.exportDidFinish(exporter!)
})
}
}
Well it turns out I was missing the extension from the end of my movie name:
let url = NSURL(fileURLWithPath:documentsDirectory).URLByAppendingPathComponent("FinalVideo\(random)")
So it should have been "FinalVideo(random).mov"
Hope this helps somebody one day.
How I can edit or trim video start and point particular parts of video?
Also I want to use slider to point out trimming start and end points.
func trimVideo(sourceURL: NSURL, destinationURL: NSURL, trimPoints: TrimPoints, completion: TrimCompletion?) {
assert(sourceURL.fileURL)
assert(destinationURL.fileURL)
let options = [ AVURLAssetPreferPreciseDurationAndTimingKey: true ]
let asset = AVURLAsset(URL: sourceURL, options: options)
let preferredPreset = AVAssetExportPresetPassthrough
if verifyPresetForAsset(preferredPreset, asset: asset) {
let composition = AVMutableComposition()
let videoCompTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let audioCompTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let assetVideoTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo).first as! AVAssetTrack
let assetAudioTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeAudio).first as! AVAssetTrack
var compError: NSError?
var accumulatedTime = kCMTimeZero
for (startTimeForCurrentSlice, endTimeForCurrentSlice) in trimPoints {
let durationOfCurrentSlice = CMTimeSubtract(endTimeForCurrentSlice, startTimeForCurrentSlice)
let timeRangeForCurrentSlice = CMTimeRangeMake(startTimeForCurrentSlice, durationOfCurrentSlice)
videoCompTrack.insertTimeRange(timeRangeForCurrentSlice, ofTrack: assetVideoTrack, atTime: accumulatedTime, error: &compError)
audioCompTrack.insertTimeRange(timeRangeForCurrentSlice, ofTrack: assetAudioTrack, atTime: accumulatedTime, error: &compError)
if compError != nil {
NSLog("error during composition: \(compError)")
if let completion = completion {
completion(compError)
}
}
accumulatedTime = CMTimeAdd(accumulatedTime, durationOfCurrentSlice)
}
let exportSession = AVAssetExportSession(asset: composition, presetName: preferredPreset)
exportSession.outputURL = destinationURL
exportSession.outputFileType = AVFileTypeAppleM4V
exportSession.shouldOptimizeForNetworkUse = true
removeFileAtURLIfExists(destinationURL)
exportSession.exportAsynchronouslyWithCompletionHandler({ () -> Void in
if let completion = completion {
completion(exportSession.error)
}
})
} else {
NSLog("Could not find a suitable export preset for the input video")
let error = NSError(domain: "org.linuxguy.VideoLab", code: -1, userInfo: nil)
if let completion = completion {
completion(error)
}
}
}