Swift Merging audio and video files into one video - swift

I wrote a program in Swift.I want to merge a video with an audio file, but got this error.
"failed Error Domain=AVFoundationErrorDomain Code=-11838 "Operation Stopped" UserInfo=0x17da4230 {NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The operation is not supported for this media.}"
Code
func mergeVideoWithAudio(videoUrl: URL, audioUrl: URL, success: #escaping ((URL) -> Void), failure: #escaping ((Error?) -> Void)) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
let aAudioAsset: AVAsset = AVAsset(url: audioUrl)
if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
mutableCompositionVideoTrack.append(videoTrack)
mutableCompositionAudioTrack.append(audioTrack)
if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first, let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: .audio).first {
do {
try mutableCompositionVideoTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
try mutableCompositionAudioTrack.first?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
videoTrack.preferredTransform = aVideoAssetTrack.preferredTransform
} catch{
print(error)
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero,duration: videoTrack.timeRange.duration)
}
}
let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mutableVideoComposition.renderSize = CGSize(width: 480, height: 640)
if let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first {
let outputURL = URL(fileURLWithPath: documentsPath).appendingPathComponent("\("fileName").mp4")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch { }
// var outputURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("temp_video_data.mp4", isDirectory: false)
// outputURL = URL(fileURLWithPath: outputURL.path)
// let mediaItems = MPMediaQuery.songs().items
// let mediaCollection = MPMediaItemCollection(items: mediaItems ?? [])
// let player1 = MPMusicPlayerController.systemMusicPlayer
// player1.setQueue(with: mediaCollection)
// player1.play()
// guard let url = URL(string: url )else {return}
// let player = AVPlayer(url: url)
// let playerController = AVPlayerViewController()
// playerController.player = player
// playerController.allowsPictureInPicturePlayback = true
// playerController.delegate = self
// playerController.player?.play()
// present(playerController, animated: true, completion: nil)
// let player3 = AVPlayer(url: vidUrl!)
// let playerLayer3 = AVPlayerLayer(player: player3)
// playerLayer3.videoGravity = .resizeAspect
// playerLayer3.needsDisplayOnBoundsChange = true //
// playerLayer3.frame = self.videoImg.bounds // 1
//
// self.videoImg.layer.masksToBounds = true // 2
// self.videoImg.layer.addSublayer(playerLayer3)
// player3.play()
// let assetsLib = PHPhotoLibrary()
// assetsLib.
// assetsLib.writeVideoAtPath(toSavedPhotosAlbum: outputURL, completionBlock: nil)
print(outputURL, "outputURL.....")
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) {
exportSession.outputURL = NSURL.fileURL(withPath: outputURL.path)
exportSession.outputFileType = AVFileType.mov
exportSession.shouldOptimizeForNetworkUse = true
// try to export the file and handle the status cases
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .failed:
if let _error = exportSession.error {
print("failedddd",_error)
failure(_error)
}
case .cancelled:
if let _error = exportSession.error {
print("cancellled")
failure(_error)
}
default:
print("finished")
success(outputURL)
}
})
} else {
print("failurrrrrrrr")
failure(nil)
}
}
}
Here the calling of Function
let vidUrl! = file:///private/var/mobile/Containers/Data/PluginKitPlugin/C6BD4529-DF11-4A67-B56E-C3A841D86D75/tmp/trim.CC7EABA8-08EC-4C33-8E6A-E85A5EBCFB64.MOV
let outputURL! = file:///var/mobile/Containers/Data/Application/9115DCFE-4134-48AD-84BF-452AC0D33278/Documents/y2matecom-ChallaOfficialFullVideoKhanSaabAYMediaRecordsLatestPunjabiSongs2016.mp3
mergeVideoWithAudio(videoUrl: vidUrl!, audioUrl: outputURL!) { resultUrl in
print("sucessssssssss",resultUrl)
} failure: { error in
print(error?.localizedDescription,"hgjjhfghdg")
}
Note:Every time the Failure Block calls and cause operation stopped
In my idea media type like mpeg4 is wrong. Where is the problem? What am i missing?

Related

Swift AVFoundation instructions don't set the opacity

I have a function that merges videos. All videos merge properly and the first two videos will play perfectly but then only the audio for the third video plays. I am assuming the video is there, but it is just blocked by the second video. I am confused though because I am using instructions to set the opacity of each asset to 0 when the video is done, but it doesn't work. Also even if I don't set any instructions, the first video disappears to allow the second video to play, but the second video never disappears. What is going on!?
func mergeVideo(completion: #escaping (_ url: URL?, _ error: Error?) -> Void) {
let mixComposition = AVMutableComposition()
let videoComposition = AVMutableVideoComposition()
guard let documentDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else {
completion(nil, nil)
return
}
let outputURL = documentDirectory.appendingPathComponent("\(id).mov")
do {
if FileManager.default.fileExists(atPath: outputURL.path) {
try FileManager.default.removeItem(at: outputURL)
}
} catch {
print(error.localizedDescription)
}
// If there is only one video, save export time.
if let video = videos.first, videos.count == 1 {
do {
if let url = URL(string: video.videoURL) {
try FileManager().copyItem(at: url, to: outputURL)
completion(outputURL, nil)
mergedVideoURL = outputURL.lastPathComponent
}
} catch let error {
completion(nil, error)
}
return
}
var currentTime = CMTime.zero
let renderSize = CGSize(width: 1280.0, height: 720.0)
let mainInstruction = AVMutableVideoCompositionInstruction()
videos.enumerated().forEach { index, video in
if let vidURL = URL(string: video.videoURL)?.lastPathComponent {
let url = documentDirectory.appendingPathComponent(vidURL)
let asset = AVAsset(url: url)
guard let assetTrack = asset.tracks.first else { return }
mainInstruction.timeRange = CMTimeRangeMake(start: .zero, duration: CMTimeAdd(mixComposition.duration, asset.duration))
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: assetTrack)
instruction.setOpacity(0.0, at: asset.duration)
mainInstruction.layerInstructions.append(instruction)
do {
let timeRange = CMTimeRangeMake(start: .zero, duration: asset.duration)
try mixComposition.insertTimeRange(timeRange, of: asset, at: currentTime)
currentTime = CMTimeAdd(currentTime, asset.duration)
} catch let error {
completion(nil, error)
}
}
}//forEach
videoComposition.instructions = [mainInstruction]
videoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
videoComposition.renderSize = renderSize
guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetPassthrough) else {
completion(nil, nil)
return
}
exporter.outputURL = outputURL
exporter.outputFileType = .mov
// Pass Video Composition to the Exporter.
exporter.videoComposition = videoComposition
exporter.exportAsynchronously {
DispatchQueue.main.async {
switch exporter.status {
case .completed:
completion(exporter.outputURL, nil)
case .failed:
completion(exporter.outputURL, exporter.error)
case.cancelled:
completion(exporter.outputURL, exporter.error)
case .unknown:
completion(exporter.outputURL, exporter.error)
case .waiting:
print("waiting")
case .exporting:
print("exporting")
#unknown default:
completion(exporter.outputURL, exporter.error)
}
}
}
}

Change volume of audio track within AVMutableComposition

I'm trying to merge a pre-existing video with a newly recorded audio voiceover track. User can set the relative sound volume for the two audio tracks (audio belonging to video, and new audio).
This code works in terms of merging it into one new video file, however I can't figure out how to adjust track volume. I tried some code (commented out) but don't understand how to use AVMutableAudioMixInputParameters with the code I already have.
static func mergeFilesWithUrl(videoUrl: URL, videoVolume: Float, audioUrl: URL, audioVolume: Float, completion: #escaping (URL?, Error?) -> Void) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioOfVideoTrack: [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
let aAudioAsset: AVAsset = AVAsset(url: audioUrl)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)!)
mutableCompositionAudioTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)!)
mutableCompositionAudioOfVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)!)
let aAudioOfVideoTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaType.audio)[0]
let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaType.video)[0]
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaType.audio)[0]
do {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioOfVideoTrack, at: CMTime.zero)
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
} catch {
}
//TODO: how to set audio track volume
// let audioMixInputParams = AVMutableAudioMixInputParameters()
// audioMixInputParams.trackID = aAudioAssetTrack.trackID
// audioMixInputParams.setVolume(0.0, at: CMTime.zero)
// aAudioAssetTrack.inputParameters.append(audioMixInputParams)
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mutableVideoComposition.renderSize = CGSize(width: 720, height: 1280)//CGSize(1280,720)
//find your video on this URl
let savePathUrl: NSURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
do { // delete old video
try FileManager.default.removeItem(at: savePathUrl as URL)
} catch {
print(error.localizedDescription)
}
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileType.mp4
assetExport.outputURL = savePathUrl as URL
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously {
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
print("success")
completion(assetExport.outputURL, nil)
case AVAssetExportSessionStatus.failed:
print("failed \(String(describing: assetExport.error))")
completion(nil, assetExport.error)
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(String(describing: assetExport.error))")
completion(nil, assetExport.error)
default:
print("complete")
}
}
}
Here is the code I used to change volume of a track:
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
var audioMixParam: [AVMutableAudioMixInputParameters] = []
let assetAudioFromVideo: AVAssetTrack = videoAsset.tracks(withMediaType: AVMediaType.audio)[0]
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetAudioFromVideo)
videoParam.trackID = videoAudioTrack!.trackID
videoParam.setVolume(inputs.levels.videoVolume, at: CMTime.zero)
audioMixParam.append(videoParam)
audioMix.inputParameters = audioMixParam
//...
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileType.mp4
assetExport.outputURL = savePathUrl as URL
assetExport.shouldOptimizeForNetworkUse = true
assetExport.audioMix = audioMix
assetExport.videoComposition = videoComposition
assetExport.exportAsynchronously { //...

Composing Video and Audio - Video's audio is gone

My question is, I am using the function below, to compose a video and audio. I want to keep video's original sound but it goes away somehow, I do not have any clue.
I got this function from this answer
I tried to change volumes right after appending AVMutableCompositionTracks but it did not work
For instance;
mutableVideoCompositionTrack.prefferedVolume = 1.0
mutableAudioCompositionTrack.prefferedVolume = 0.05
But still, all you can hear is only the audio file.
The function;
private func mergeAudioAndVideo(audioUrl: URL, videoUrl: URL, completion: #escaping (Bool)->Void){
let mixComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let videoAsset = AVAsset(url: videoUrl)
let audioAsset = AVAsset(url: audioUrl)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack[0].preferredVolume = 0.05
mutableCompositionVideoTrack[0].preferredVolume = 1.0
let videoAssetTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let audioAssetTrack = audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do {
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAssetTrack.timeRange.duration), of: videoAssetTrack, at: kCMTimeZero)
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAssetTrack.timeRange.duration), of: audioAssetTrack, at: kCMTimeZero)
}catch{
print("ERROR#1")
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAssetTrack.timeRange.duration)
let mutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)
//exporting
savePathUrl = try! FileManager.default.url(for: FileManager.SearchPathDirectory.documentDirectory, in: FileManager.SearchPathDomainMask.userDomainMask, appropriateFor: nil, create: true).appendingPathComponent("merged").appendingPathExtension("mov")
let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
do {
try FileManager.default.removeItem(at: savePathUrl)
}catch {
print(error)
}
assetExport.exportAsynchronously {
switch assetExport.status{
case .completed:
print("completed")
completion(true)
default:
print("failed \(assetExport.error!)")
completion(false)
}
}
}
You can adjust volume for video and audio separately #Faruk, Here a is little bit code for that.
//Extract audio from the video and the music
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
var audioMixParam: [AVMutableAudioMixInputParameters] = []
let assetVideoTrack: AVAssetTrack = assetVideo.tracksWithMediaType(AVMediaTypeAudio)[0]
let assetMusicTrack: AVAssetTrack = assetMusic.tracksWithMediaType(AVMediaTypeAudio)[0]
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetVideoTrack)
videoParam.trackID = compositionAudioVideo.trackID
let musicParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetMusicTrack)
musicParam.trackID = compositionAudioMusic.trackID
//Set final volume of the audio record and the music
videoParam.setVolume(volumeVideo, atTime: kCMTimeZero)
musicParam.setVolume(volumeAudio, atTime: kCMTimeZero)
//Add setting
audioMixParam.append(musicParam)
audioMixParam.append(videoParam)
//Add audio on final record
//First: the audio of the record and Second: the music
do {
try compositionAudioVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, assetVideo.duration), ofTrack: assetVideoTrack, atTime: kCMTimeZero)
} catch _ {
assertionFailure()
}
do {
try compositionAudioMusic.insertTimeRange(CMTimeRangeMake(CMTimeMake(Int64(startAudioTime * 10000), 10000), assetVideo.duration), ofTrack: assetMusicTrack, atTime: kCMTimeZero)
} catch _ {
assertionFailure()
}
//Add parameter
audioMix.inputParameters = audioMixParam
let completeMovie = "\(docsDir)/\(randomString(5)).mp4"
let completeMovieUrl = NSURL(fileURLWithPath: completeMovie)
let exporter: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputURL = completeMovieUrl
exporter.outputFileType = AVFileTypeMPEG4
exporter.audioMix = audioMix
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status {
case AVAssetExportSessionStatus.Completed:
print("success with output url \(completeMovieUrl)")
case AVAssetExportSessionStatus.Failed:
print("failed \(String(exporter.error))")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(String(exporter.error))")
default:
print("complete")
}
})
}
Here's the combined code of all the answers. It took me a while to decode those answers so I decided to add it here for any future users:
Swift 5:
enum MixError: Error {
case TimeRangeFailure
case ExportFailure
}
var selectedVideoLevel = 1.0
var selectedMusicLevel = 1.0
func mix(videoUrl: URL, musicUrl: URL, completion: ((Result<URL, Error>) -> Void)?) {
let videoAsset = AVAsset(url: videoUrl)
let musicAsset = AVAsset(url: musicUrl)
let audioVideoComposition = AVMutableComposition()
let audioMix = AVMutableAudioMix()
var mixParameters = [AVMutableAudioMixInputParameters]()
let videoCompositionTrack = audioVideoComposition
.addMutableTrack(withMediaType: .video, preferredTrackID: .init())!
let audioCompositionTrack = audioVideoComposition
.addMutableTrack(withMediaType: .audio, preferredTrackID: .init())!
let musicCompositionTrack = audioVideoComposition
.addMutableTrack(withMediaType: .audio, preferredTrackID: .init())!
let videoAssetTrack = videoAsset.tracks(withMediaType: .video)[0]
let audioAssetTrack = videoAsset.tracks(withMediaType: .audio).first
let musicAssetTrack = musicAsset.tracks(withMediaType: .audio)[0]
let audioParameters = AVMutableAudioMixInputParameters(track: audioAssetTrack)
audioParameters.trackID = audioCompositionTrack.trackID
let musicParameters = AVMutableAudioMixInputParameters(track: musicAssetTrack)
musicParameters.trackID = musicCompositionTrack.trackID
audioParameters.setVolume(selectedVideoLevel, at: .zero)
musicParameters.setVolume(selectedMusicLevel, at: .zero)
mixParameters.append(audioParameters)
mixParameters.append(musicParameters)
audioMix.inputParameters = mixParameters
/// prevents video from unnecessary rotations
videoCompositionTrack.preferredTransform = videoAssetTrack.preferredTransform
do {
let timeRange = CMTimeRange(start: .zero, duration: videoAsset.duration)
try videoCompositionTrack.insertTimeRange(timeRange, of: videoAssetTrack, at: .zero)
if let audioAssetTrack = audioAssetTrack {
try audioCompositionTrack.insertTimeRange(timeRange, of: audioAssetTrack, at: .zero)
}
try musicCompositionTrack.insertTimeRange(timeRange, of: musicAssetTrack, at: .zero)
} catch {
completion?(.failure(MixError.TimeRangeFailure)
}
let exportUrl = FileManager.default
.urls(for: .applicationSupportDirectory, in: .userDomainMask).first?
.appendingPathComponent("\(Date().timeIntervalSince1970)-video.mp4")
let exportSession = AVAssetExportSession(
asset: audioVideoComposition,
presetName: AVAssetExportPresetHighestQuality
)
exportSession?.audioMix = audioMix
exportSession?.outputFileType = .m4v
exportSession?.outputURL = exportUrl
exportSession?.exportAsynchronously(completionHandler: {
guard let status = exportSession?.status else { return }
switch status {
case .completed:
completion?(.success(exportUrl!))
case .failed:
completion?(.failure(MixError.ExportError)
default:
print(status)
}
})
}
I figured it out. It seems an AVAsset which loads a video holds the audio and video separately. So you can reach them writing``
videoAsset.tracks(withMediaType: AVMediaTypeAudio)[0] //audio of a video
videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0] //video of a video(without sound)
So I added these lines to the code and it worked!
var mutableCompositionBackTrack : [AVMutableCompositionTrack] = []
mutableCompositionBackTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
try mutableCompositionBackTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAssetTrack.timeRange.duration), of: backAssetTrack, at: kCMTimeZero)
There is a still missing point that I do not know how to do, and that is setting volumes of these audio assets. I will update this answer as soon as I figure out how.

Exporting mp4 through AVAssetExportSession fails

I start saying that I spent a lot of time searching through documentation, posts here and somewhere else, but I can't figure out the solution for this problem.
I'm using AVAssetExportSession for exporting an .mp4 file stored in a AVAsset instance.
What I do is:
I check the isExportable property of AVAsset
I then get an array of exportPresets compatible with the AVAsset instance
I take the AVAssetExportPreset1920x1080, or, if not existing I try to export the media with AVAssetExportPresetPassthrough (FYI, 100% of times, the preset I need is always contained in the list, but I tried also the passthrough option and it doesn't work anyway)
The outputFileType is AVFileTypeMPEG4 and I tried also by assigning the .mp4 extension to the file, but nothing makes it work.
I always receive this error
Error Domain=AVFoundationErrorDomain Code=-11838 "Operation Stopped"
UserInfo={NSUnderlyingError=0x600000658c30 {Error
Domain=NSOSStatusErrorDomain Code=-12109 "(null)"},
NSLocalizedFailureReason=The operation is not supported for this
media., NSLocalizedDescription=Operation Stopped}
Below is the code I'm using
func _getDataFor(_ item: AVPlayerItem, completion: #escaping (Data?) -> ()) {
guard item.asset.isExportable else {
completion(nil)
return
}
let compatiblePresets = AVAssetExportSession.exportPresets(compatibleWith: item.asset)
var preset: String = AVAssetExportPresetPassthrough
if compatiblePresets.contains(AVAssetExportPreset1920x1080) { preset = AVAssetExportPreset1920x1080 }
guard
let exportSession = AVAssetExportSession(asset: item.asset, presetName: preset),
exportSession.supportedFileTypes.contains(AVFileTypeMPEG4) else {
completion(nil)
return
}
var tempFileUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("temp_video_data.mp4", isDirectory: false)
tempFileUrl = URL(fileURLWithPath: tempFileUrl.path)
exportSession.outputURL = tempFileUrl
exportSession.outputFileType = AVFileTypeMPEG4
let startTime = CMTimeMake(0, 1)
let timeRange = CMTimeRangeMake(startTime, item.duration)
exportSession.timeRange = timeRange
exportSession.exportAsynchronously {
print("\(exportSession.error)")
let data = try? Data(contentsOf: tempFileUrl)
_ = try? FileManager.default.removeItem(at: tempFileUrl)
completion(data)
}
}
Seems like converting the AVAsset instance in a AVMutableComposition did the trick. If, please, anyone knows the reason why this works let me know.
This is the new _getDataFor(_:completion:) method implementation
func _getDataFor(_ item: AVPlayerItem, completion: #escaping (Data?) -> ()) {
guard item.asset.isExportable else {
completion(nil)
return
}
let composition = AVMutableComposition()
let compositionVideoTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
let compositionAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
let sourceVideoTrack = item.asset.tracks(withMediaType: AVMediaTypeVideo).first!
let sourceAudioTrack = item.asset.tracks(withMediaType: AVMediaTypeAudio).first!
do {
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, item.duration), of: sourceVideoTrack, at: kCMTimeZero)
try compositionAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, item.duration), of: sourceAudioTrack, at: kCMTimeZero)
} catch(_) {
completion(nil)
return
}
let compatiblePresets = AVAssetExportSession.exportPresets(compatibleWith: composition)
var preset: String = AVAssetExportPresetPassthrough
if compatiblePresets.contains(AVAssetExportPreset1920x1080) { preset = AVAssetExportPreset1920x1080 }
guard
let exportSession = AVAssetExportSession(asset: composition, presetName: preset),
exportSession.supportedFileTypes.contains(AVFileTypeMPEG4) else {
completion(nil)
return
}
var tempFileUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("temp_video_data.mp4", isDirectory: false)
tempFileUrl = URL(fileURLWithPath: tempFileUrl.path)
exportSession.outputURL = tempFileUrl
exportSession.outputFileType = AVFileTypeMPEG4
let startTime = CMTimeMake(0, 1)
let timeRange = CMTimeRangeMake(startTime, item.duration)
exportSession.timeRange = timeRange
exportSession.exportAsynchronously {
print("\(tempFileUrl)")
print("\(exportSession.error)")
let data = try? Data(contentsOf: tempFileUrl)
_ = try? FileManager.default.removeItem(at: tempFileUrl)
completion(data)
}
}
Swift 5:
import Foundation
import AVKit
func getDataFor(_ asset: AVAsset, completion: #escaping (Data?) -> ()) {
guard asset.isExportable,
let sourceVideoTrack = asset.tracks(withMediaType: .video).first,
let sourceAudioTrack = asset.tracks(withMediaType: .audio).first else {
completion(nil)
return
}
let composition = AVMutableComposition()
let compositionVideoTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
let compositionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
do {
try compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: .zero, duration: asset.duration), of: sourceVideoTrack, at: .zero)
try compositionAudioTrack?.insertTimeRange(CMTimeRangeMake(start: .zero, duration: asset.duration), of: sourceAudioTrack, at: .zero)
} catch {
completion(nil)
return
}
let compatiblePresets = AVAssetExportSession.exportPresets(compatibleWith: composition)
var preset = AVAssetExportPresetPassthrough
let preferredPreset = AVAssetExportPreset1920x1080
if compatiblePresets.contains(preferredPreset) {
preset = preferredPreset
}
let fileType: AVFileType = .mp4
guard let exportSession = AVAssetExportSession(asset: composition, presetName: preset),
exportSession.supportedFileTypes.contains(fileType) else {
completion(nil)
return
}
let tempFileUrl = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("temp_video_data")
exportSession.outputURL = tempFileUrl
exportSession.outputFileType = fileType
let startTime = CMTimeMake(value: 0, timescale: 1)
let timeRange = CMTimeRangeMake(start: startTime, duration: asset.duration)
exportSession.timeRange = timeRange
exportSession.exportAsynchronously {
print(tempFileUrl)
print(String(describing: exportSession.error))
let data = try? Data(contentsOf: tempFileUrl)
try? FileManager.default.removeItem(at: tempFileUrl)
completion(data)
}
}
Check if you set delegate property for AVURLAsset correctly.
[self.playerAsset.resourceLoader setDelegate:self queue:dispatch_get_main_queue()];
And conform to AVAssetResourceLoaderDelegate protocol.
That is all you need to do.
I had this same issue because I was adding an audio track to a video without audio. Removing the audio track fixed it.
I ran into this problem because the Microphone permission was off/denied. Once I set turned it on this error went away.
I solved this problem by removing the CompositionTrack with media type .audio and empty segments from the AVMutableComposition
if let audioTrack = exportComposition.tracks(withMediaType: .audio).first,
audioTrack.segments.isEmpty {
exportComposition.removeTrack(audioTrack)
}
I had the same error, when I try to ExportSession with AVAssetExportPresetPassthrough always fail and in my case I can't use another preset because I must have the same resolution like at origin video
I fixed
let asset = AVAsset(url: originUrl)
let videoTrack = asset.tracks( withMediaType: .video ).first! as AVAssetTrack
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(
width: videoTrack.naturalSize.width,
height: videoTrack.naturalSize.height
)
videoComposition.frameDuration = CMTime(
value: 1,
timescale: videoTrack.naturalTimeScale
)
let transformer = AVMutableVideoCompositionLayerInstruction(
assetTrack: videoTrack
)
transformer.setOpacity(1.0, at: CMTime.zero)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = timeRange
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
guard let exportSession = AVAssetExportSession(
asset: asset,
presetName: AVAssetExportPresetMediumQuality
) else {
return handleFailure(error: .mediaSavingError, completion: completion)
}
exportSession.videoComposition = videoComposition
exportSession.outputURL = outputUrl
exportSession.outputFileType = .mp4
exportSession.timeRange = timeRange
exportSession.exportAsynchronously { [weak self] in
"your code"
}
Forks great for me, and it's saved the same resolution as video before

How I can edit or trim video start and point particular parts of video

How I can edit or trim video start and point particular parts of video?
Also I want to use slider to point out trimming start and end points.
func trimVideo(sourceURL: NSURL, destinationURL: NSURL, trimPoints: TrimPoints, completion: TrimCompletion?) {
assert(sourceURL.fileURL)
assert(destinationURL.fileURL)
let options = [ AVURLAssetPreferPreciseDurationAndTimingKey: true ]
let asset = AVURLAsset(URL: sourceURL, options: options)
let preferredPreset = AVAssetExportPresetPassthrough
if verifyPresetForAsset(preferredPreset, asset: asset) {
let composition = AVMutableComposition()
let videoCompTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let audioCompTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let assetVideoTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo).first as! AVAssetTrack
let assetAudioTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeAudio).first as! AVAssetTrack
var compError: NSError?
var accumulatedTime = kCMTimeZero
for (startTimeForCurrentSlice, endTimeForCurrentSlice) in trimPoints {
let durationOfCurrentSlice = CMTimeSubtract(endTimeForCurrentSlice, startTimeForCurrentSlice)
let timeRangeForCurrentSlice = CMTimeRangeMake(startTimeForCurrentSlice, durationOfCurrentSlice)
videoCompTrack.insertTimeRange(timeRangeForCurrentSlice, ofTrack: assetVideoTrack, atTime: accumulatedTime, error: &compError)
audioCompTrack.insertTimeRange(timeRangeForCurrentSlice, ofTrack: assetAudioTrack, atTime: accumulatedTime, error: &compError)
if compError != nil {
NSLog("error during composition: \(compError)")
if let completion = completion {
completion(compError)
}
}
accumulatedTime = CMTimeAdd(accumulatedTime, durationOfCurrentSlice)
}
let exportSession = AVAssetExportSession(asset: composition, presetName: preferredPreset)
exportSession.outputURL = destinationURL
exportSession.outputFileType = AVFileTypeAppleM4V
exportSession.shouldOptimizeForNetworkUse = true
removeFileAtURLIfExists(destinationURL)
exportSession.exportAsynchronouslyWithCompletionHandler({ () -> Void in
if let completion = completion {
completion(exportSession.error)
}
})
} else {
NSLog("Could not find a suitable export preset for the input video")
let error = NSError(domain: "org.linuxguy.VideoLab", code: -1, userInfo: nil)
if let completion = completion {
completion(error)
}
}
}