Rotate video 90 degrees in Swift 4 - swift

I've been trying to search for this all day, but all answers point to older versions of Swift or Obj-C.
I tried layer instructions, but AVMutableComposition has no member instructions. I remember this being really easy with just an affineTransform, but now I no longer know where I found this.
var mainVideoURL:URL!
let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let tempPath = paths[0] + "/mainVideo.mp4"
if(FileManager.default.fileExists(atPath: tempPath)){
guard (try? FileManager.default.removeItem(atPath: tempPath)) != nil else {
print("remove path failed")
self.enableButtons(enabled:true)
return
}
}
mainVideoURL = URL(fileURLWithPath: tempPath)
let firstAsset = AVURLAsset(url: fileURL)
let mixComposition = AVMutableComposition()
// repeat video number of times
let videoRepeat = photoVideoRepeats
for i in 0 ... videoRepeat - 1 {
do {
try mixComposition.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration),
of: firstAsset,
at: kCMTimeZero + CMTimeMultiply(firstAsset.duration,Int32(i)))
} catch _ {
print("Failed to load first track")
}
}
guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }

After my video was recorded, I was able to do a CGAffineTransform on a AVMutableCompositionTrack.
In my case I needed to merge an audio track with the video, but you can see where the transforms take place:
func mergeVideoAndAudio(videoUrl: URL,
audioUrl: URL) -> AVAsset {
let mixComposition = AVMutableComposition()
var mutableCompositionVideoTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioOfVideoTrack = [AVMutableCompositionTrack]()
//start merge
let aVideoAsset = AVAsset(url: videoUrl)
let aAudioAsset = AVAsset(url: audioUrl)
let compositionAddVideo = mixComposition.addMutableTrack(withMediaType: .video,
preferredTrackID: kCMPersistentTrackID_Invalid)
let compositionAddAudio = mixComposition.addMutableTrack(withMediaType: .audio,
preferredTrackID: kCMPersistentTrackID_Invalid)
let compositionAddAudioOfVideo = mixComposition.addMutableTrack(withMediaType: .audio,
preferredTrackID: kCMPersistentTrackID_Invalid)
let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaType.video)[0]
let aAudioOfVideoAssetTrack: AVAssetTrack? = aVideoAsset.tracks(withMediaType: AVMediaType.audio).first
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaType.audio)[0]
// Default must have tranformation
compositionAddVideo?.preferredTransform = aVideoAssetTrack.preferredTransform
var transforms = aVideoAssetTrack.preferredTransform
if UIDevice.current.orientation == UIDeviceOrientation.landscapeLeft {
transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(-90.0 * .pi / 180)))
transforms = transforms.concatenating(CGAffineTransform(translationX: 1280, y: 0))
}
else if UIDevice.current.orientation == UIDeviceOrientation.landscapeRight {
transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0 * .pi / 180)))
transforms = transforms.concatenating(CGAffineTransform(translationX: 1280, y: 0))
}
else if UIDevice.current.orientation == UIDeviceOrientation.portraitUpsideDown {
transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(180.0 * .pi / 180)))
transforms = transforms.concatenating(CGAffineTransform(translationX: 0, y: 720))
}
compositionAddVideo?.preferredTransform = transforms
mutableCompositionVideoTrack.append(compositionAddVideo!)
mutableCompositionAudioTrack.append(compositionAddAudio!)
mutableCompositionAudioOfVideoTrack.append(compositionAddAudioOfVideo!)
do {
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: aVideoAssetTrack.timeRange.duration),
of: aVideoAssetTrack,
at: CMTime.zero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: aVideoAssetTrack.timeRange.duration),
of: aAudioAssetTrack,
at: CMTime.zero)
// adding audio (of the video if exists) asset to the final composition
if let aAudioOfVideoAssetTrack = aAudioOfVideoAssetTrack {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: aVideoAssetTrack.timeRange.duration),
of: aAudioOfVideoAssetTrack,
at: CMTime.zero)
}
} catch {
print(error.localizedDescription)
}
return mixComposition
}

Related

AVURLAsset reverting to original orientation even though the video file is mirrored

I am trying to merge videos that are recorded to local files. I want to support both the front and back camera but saw that recording with the front camera causes the video to flip. So when recording I used...
func startRecording(){
if isCameraButtonDisabled == false {
// MARK: Temporary URL for recording Video
let tempURL = NSTemporaryDirectory() + "\(Date()).mov"
if self.videoDeviceInput.device.position == .back {
output.startRecording(to: URL(fileURLWithPath: tempURL), recordingDelegate: self)
} else {
output.connection(with: AVMediaType.video)?.isVideoMirrored = true
output.startRecording(to: URL(fileURLWithPath: tempURL), recordingDelegate: self)
}
withAnimation(.spring(response: 0.5, dampingFraction: 0.5, blendDuration: 1)) {
isRecording = true
}
}
}
However, when I return only the outputFileURL the front-facing video is mirrored as I intended. But after I convert each video into an AVURLAsset for the purpose of combining the videos, it changes the orientation of the video back to the original (non-mirrored) version. I am very confused as to why this happens and how I can maintain the mirrored version when joining videos using AVURLAsset. Here is the function that returns the video file as well as the one that merges them.
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if let error = error {
print(error.localizedDescription)
return
}
withAnimation {
self.recordedURLs.append(outputFileURL)
}
if self.recordedURLs.count == 1{
self.previewURL = outputFileURL
return
}
let assets = recordedURLs.compactMap { url -> AVURLAsset in
return AVURLAsset(url: url)
}
for asset in assets {
print(asset.preferredTransform)
}
self.previewURL = nil
mergeVideos(assets: assets) { exporter in
exporter.exportAsynchronously {
if exporter.status == .failed{
// HANDLE ERROR
print(exporter.error!)
}
else{
if let finalURL = exporter.outputURL{
print(finalURL)
DispatchQueue.main.async {
self.previewURL = finalURL
}
}
}
}
}
}
func mergeVideos(assets: [AVURLAsset],completion: #escaping (_ exporter: AVAssetExportSession)->()){
let compostion = AVMutableComposition()
var lastTime: CMTime = .zero
guard let videoTrack = compostion.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
guard let audioTrack = compostion.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
for asset in assets {
// Linking Audio and Video
do{
try videoTrack.insertTimeRange(CMTimeRange(start: .zero, duration: asset.duration), of: asset.tracks(withMediaType: .video)[0], at: lastTime)
// Safe Check if Video has Audio
if !asset.tracks(withMediaType: .audio).isEmpty{
try audioTrack.insertTimeRange(CMTimeRange(start: .zero, duration: asset.duration), of: asset.tracks(withMediaType: .audio)[0], at: lastTime)
}
}
catch{
print(error.localizedDescription)
}
lastTime = CMTimeAdd(lastTime, asset.duration)
}
// MARK: Temp Output URL
let tempURL = URL(fileURLWithPath: NSTemporaryDirectory() + "\(Date()).mp4")
let layerInstructions = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
var transform = CGAffineTransform.identity
transform = transform.rotated(by: 90 * (.pi / 180))//
transform = transform.translatedBy(x: 0, y: -videoTrack.naturalSize.height)//
layerInstructions.setTransform(transform, at: .zero)
let instructions = AVMutableVideoCompositionInstruction()
instructions.timeRange = CMTimeRange(start: .zero, duration: lastTime)
instructions.layerInstructions = [layerInstructions]
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: videoTrack.naturalSize.height, height: videoTrack.naturalSize.width)
videoComposition.instructions = [instructions]
videoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
guard let exporter = AVAssetExportSession(asset: compostion, presetName: AVAssetExportPresetHighestQuality) else{return}
exporter.outputFileType = .mp4
exporter.outputURL = tempURL
exporter.videoComposition = videoComposition
completion(exporter)
}

Play video synthesized by AVMutableComposition in AVPlayer with error

I am creating an IOS video editing tool by AVMutableComposition.
If I combine multiple videos in a single video mutabletrack, it works fine. However, when I create mutabletrack per video and synthesize the video and play it in AVPlayer, only sound plays and video stops when move on the next video. However, if I seek video, it works normally. What's the cause?
The following is the code in which the current situation occur.
static func mergeClips(
videos: [ExCAVideoLayer]?,
audios: [ExCAAudioLayer]? = nil
) -> AVPlayerItem? {
guard let videos = videos else {
return nil
}
let mixComposition = AVMutableComposition()
var layerInstructions = [AVMutableVideoCompositionLayerInstruction]()
do {
// Determind video aspect ratio
var aspectRatio: Double = 0.00001
for video in videos {
guard let url = video.videoUrl else {
continue
}
if let naturalSize = self.resolutionForLocalVideo(url: url) {
let naturalAspectRatio = naturalSize.height == 0 ? 1 : naturalSize.width / naturalSize.height
if aspectRatio < naturalAspectRatio {
aspectRatio = naturalAspectRatio
}
}
}
var frameWidth = UIScreen.main.bounds.width
var frameHeight = round(UIScreen.main.bounds.width / aspectRatio)
if frameWidth.truncatingRemainder(dividingBy: 2) > 0 {
frameWidth = frameWidth - 1
}
if frameHeight.truncatingRemainder(dividingBy: 2) > 0 {
frameHeight = frameHeight - 1
}
let frameSize = CGSize(width: frameWidth, height: frameHeight)
// Video composition
var currentDuration = mixComposition.duration
for video in videos {
guard let url = video.videoUrl else {
continue
}
let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
let avAsset = AVAsset(url: url)
if let layerInstruction = self.videoCompositionInstruction(videoTrack, asset: avAsset, frameSize: frameSize) {
layerInstructions.append(layerInstruction)
}
let streamRange = CMTimeRangeMake(start: CMTime.zero, duration: CMTimeAdd(avAsset.duration, CMTimeMakeWithSeconds(video.gap, preferredTimescale: 1)))
if let streamVideoTrack = avAsset.tracks(withMediaType: .video).first {
try videoTrack?.insertTimeRange(streamRange, of: streamVideoTrack, at: currentDuration)
videoTrack?.preferredTransform = streamVideoTrack.preferredTransform
}
if let streamAudioTrack = avAsset.tracks(withMediaType: .audio).first {
try audioTrack?.insertTimeRange(streamRange, of: streamAudioTrack, at: currentDuration)
}
currentDuration = mixComposition.duration
}
// Audio composition
if let audios = audios {
for audio in audios {
guard let url = audio.audioUrl else {
continue
}
let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
let avAsset = AVAsset(url: url)
let streamRange = CMTimeRangeMake(start: CMTime.zero, duration: avAsset.duration)
if let streamAudioTrack = avAsset.tracks(withMediaType: .audio).first {
try audioTrack?.insertTimeRange(streamRange, of: streamAudioTrack, at: CMTimeMakeWithSeconds(audio.startTime, preferredTimescale: 1))
}
}
}
//AVVideoComposition and layout
let videoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
videoComposition.renderSize = CGSize(width: frameSize.width, height: frameSize.height)
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction .timeRange = CMTimeRange(start: .zero, duration: mixComposition.duration)
mainInstruction.backgroundColor = UIColor.black.cgColor
mainInstruction.layerInstructions = layerInstructions
videoComposition.instructions = [mainInstruction]
let assetKeys = [
"playable",
"hasProtectedContent"
]
let avPlayItem = AVPlayerItem(asset: mixComposition, automaticallyLoadedAssetKeys: assetKeys)
avPlayItem.videoComposition = videoComposition
return avPlayItem
}
catch(let error) {
print("Could not create mixComposition \(error.localizedDescription)")
return nil
}
}
func PlayVideo() {
guard let avPlayerItem = VideoComposition.mergeClips(videos: self.projectRender?.videoItems) else {
return
}
self.avPlayer = AVPlayer(playerItem: avPlayerItem)
self.avPlayerLayer = AVPlayerLayer(player: self.avPlayer)
self.avPlayerLayer?.needsDisplayOnBoundsChange = true
if let avPlayerLayer = self.avPlayerLayer {
self.videoContainer?.layer.addSublayer(avPlayerLayer)
}
self.avPlayer?.play()
}

Change volume of audio track within AVMutableComposition

I'm trying to merge a pre-existing video with a newly recorded audio voiceover track. User can set the relative sound volume for the two audio tracks (audio belonging to video, and new audio).
This code works in terms of merging it into one new video file, however I can't figure out how to adjust track volume. I tried some code (commented out) but don't understand how to use AVMutableAudioMixInputParameters with the code I already have.
static func mergeFilesWithUrl(videoUrl: URL, videoVolume: Float, audioUrl: URL, audioVolume: Float, completion: #escaping (URL?, Error?) -> Void) {
let mixComposition: AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack: [AVMutableCompositionTrack] = []
var mutableCompositionAudioOfVideoTrack: [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let aVideoAsset: AVAsset = AVAsset(url: videoUrl)
let aAudioAsset: AVAsset = AVAsset(url: audioUrl)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)!)
mutableCompositionAudioTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)!)
mutableCompositionAudioOfVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)!)
let aAudioOfVideoTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaType.audio)[0]
let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaType.video)[0]
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaType.audio)[0]
do {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioOfVideoTrack, at: CMTime.zero)
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
} catch {
}
//TODO: how to set audio track volume
// let audioMixInputParams = AVMutableAudioMixInputParameters()
// audioMixInputParams.trackID = aAudioAssetTrack.trackID
// audioMixInputParams.setVolume(0.0, at: CMTime.zero)
// aAudioAssetTrack.inputParameters.append(audioMixInputParams)
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mutableVideoComposition.renderSize = CGSize(width: 720, height: 1280)//CGSize(1280,720)
//find your video on this URl
let savePathUrl: NSURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
do { // delete old video
try FileManager.default.removeItem(at: savePathUrl as URL)
} catch {
print(error.localizedDescription)
}
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileType.mp4
assetExport.outputURL = savePathUrl as URL
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously {
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
print("success")
completion(assetExport.outputURL, nil)
case AVAssetExportSessionStatus.failed:
print("failed \(String(describing: assetExport.error))")
completion(nil, assetExport.error)
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(String(describing: assetExport.error))")
completion(nil, assetExport.error)
default:
print("complete")
}
}
}
Here is the code I used to change volume of a track:
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
var audioMixParam: [AVMutableAudioMixInputParameters] = []
let assetAudioFromVideo: AVAssetTrack = videoAsset.tracks(withMediaType: AVMediaType.audio)[0]
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetAudioFromVideo)
videoParam.trackID = videoAudioTrack!.trackID
videoParam.setVolume(inputs.levels.videoVolume, at: CMTime.zero)
audioMixParam.append(videoParam)
audioMix.inputParameters = audioMixParam
//...
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileType.mp4
assetExport.outputURL = savePathUrl as URL
assetExport.shouldOptimizeForNetworkUse = true
assetExport.audioMix = audioMix
assetExport.videoComposition = videoComposition
assetExport.exportAsynchronously { //...

Merge Videos Array in Swift

I am trying to merge the videos from the array of avssets but I am able to get only first and last video. Videos between the array are showing black area.
Check the code I am using.
func mergeVideoArray() {
let mixComposition = AVMutableComposition()
for videoAsset in videoURLArray {
let videoTrack =
mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try videoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration),
of: videoAsset.tracks(withMediaType: AVMediaType.video).first!,
at: totalTime)
videoSize = (videoTrack?.naturalSize)!
} catch let error as NSError {
print("error: \(error)")
}
let trackArray = videoAsset.tracks(withMediaType: .audio)
if trackArray.count > 0 {
let audioTrack =
mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try audioTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaType.audio).first!, at: audioTime)
audioTime = audioTime + videoAsset.duration
}
catch {
}
}
totalTime = totalTime + videoAsset.duration
let videoInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack!)
if videoAsset != videoURLArray.last{
videoInstruction.setOpacity(0.0, at: videoAsset.duration)
}
layerInstructionsArray.append(videoInstruction)
}
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime)
mainInstruction.layerInstructions = layerInstructionsArray
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(width: videoSize.width, height: videoSize.height)
let url = "merge_video".outputURL
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter!.outputURL = url
exporter!.outputFileType = AVFileType.mov
exporter!.shouldOptimizeForNetworkUse = false
exporter!.videoComposition = mainComposition
exporter!.exportAsynchronously {
let video = AVAsset(url: url)
let playerItem = AVPlayerItem(asset: video)
let player = AVPlayer(playerItem: playerItem)
self.playerViewController.player = player
self.present(self.playerViewController, animated: true) {
self.playerViewController.player!.play()
}
}
}
Please help me resolving this issue. Thanks in advance.
Note I am able to create a video from the array but only first and last index values are showing in videos. For rest of the values only blank screen is showing.
I just solved my question just need to update the one line in the code. Please have a look in to the code.
if videoAsset != videoURLArray.last{
videoInstruction.setOpacity(0.0, at: totalTime)
}
Note: just need to change the at position of the next video for the every value of array.
Right way to do it would be to change
if videoAsset != videoURLArray.last{
videoInstruction.setOpacity(0.0, at: videoAsset.duration)
}
to:
videoInstruction.setOpacity(0.0, at: totalTime)
I want to emphasize here that adding
totalTime = totalTime + videoAsset.duration
before setting opacity to layer instruction makes all the difference. Otherwise videos are black screen.

Composing Video and Audio - Video's audio is gone

My question is, I am using the function below, to compose a video and audio. I want to keep video's original sound but it goes away somehow, I do not have any clue.
I got this function from this answer
I tried to change volumes right after appending AVMutableCompositionTracks but it did not work
For instance;
mutableVideoCompositionTrack.prefferedVolume = 1.0
mutableAudioCompositionTrack.prefferedVolume = 0.05
But still, all you can hear is only the audio file.
The function;
private func mergeAudioAndVideo(audioUrl: URL, videoUrl: URL, completion: #escaping (Bool)->Void){
let mixComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
let videoAsset = AVAsset(url: videoUrl)
let audioAsset = AVAsset(url: audioUrl)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack[0].preferredVolume = 0.05
mutableCompositionVideoTrack[0].preferredVolume = 1.0
let videoAssetTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let audioAssetTrack = audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do {
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAssetTrack.timeRange.duration), of: videoAssetTrack, at: kCMTimeZero)
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAssetTrack.timeRange.duration), of: audioAssetTrack, at: kCMTimeZero)
}catch{
print("ERROR#1")
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAssetTrack.timeRange.duration)
let mutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)
//exporting
savePathUrl = try! FileManager.default.url(for: FileManager.SearchPathDirectory.documentDirectory, in: FileManager.SearchPathDomainMask.userDomainMask, appropriateFor: nil, create: true).appendingPathComponent("merged").appendingPathExtension("mov")
let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true
do {
try FileManager.default.removeItem(at: savePathUrl)
}catch {
print(error)
}
assetExport.exportAsynchronously {
switch assetExport.status{
case .completed:
print("completed")
completion(true)
default:
print("failed \(assetExport.error!)")
completion(false)
}
}
}
You can adjust volume for video and audio separately #Faruk, Here a is little bit code for that.
//Extract audio from the video and the music
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
var audioMixParam: [AVMutableAudioMixInputParameters] = []
let assetVideoTrack: AVAssetTrack = assetVideo.tracksWithMediaType(AVMediaTypeAudio)[0]
let assetMusicTrack: AVAssetTrack = assetMusic.tracksWithMediaType(AVMediaTypeAudio)[0]
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetVideoTrack)
videoParam.trackID = compositionAudioVideo.trackID
let musicParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetMusicTrack)
musicParam.trackID = compositionAudioMusic.trackID
//Set final volume of the audio record and the music
videoParam.setVolume(volumeVideo, atTime: kCMTimeZero)
musicParam.setVolume(volumeAudio, atTime: kCMTimeZero)
//Add setting
audioMixParam.append(musicParam)
audioMixParam.append(videoParam)
//Add audio on final record
//First: the audio of the record and Second: the music
do {
try compositionAudioVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, assetVideo.duration), ofTrack: assetVideoTrack, atTime: kCMTimeZero)
} catch _ {
assertionFailure()
}
do {
try compositionAudioMusic.insertTimeRange(CMTimeRangeMake(CMTimeMake(Int64(startAudioTime * 10000), 10000), assetVideo.duration), ofTrack: assetMusicTrack, atTime: kCMTimeZero)
} catch _ {
assertionFailure()
}
//Add parameter
audioMix.inputParameters = audioMixParam
let completeMovie = "\(docsDir)/\(randomString(5)).mp4"
let completeMovieUrl = NSURL(fileURLWithPath: completeMovie)
let exporter: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputURL = completeMovieUrl
exporter.outputFileType = AVFileTypeMPEG4
exporter.audioMix = audioMix
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status {
case AVAssetExportSessionStatus.Completed:
print("success with output url \(completeMovieUrl)")
case AVAssetExportSessionStatus.Failed:
print("failed \(String(exporter.error))")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(String(exporter.error))")
default:
print("complete")
}
})
}
Here's the combined code of all the answers. It took me a while to decode those answers so I decided to add it here for any future users:
Swift 5:
enum MixError: Error {
case TimeRangeFailure
case ExportFailure
}
var selectedVideoLevel = 1.0
var selectedMusicLevel = 1.0
func mix(videoUrl: URL, musicUrl: URL, completion: ((Result<URL, Error>) -> Void)?) {
let videoAsset = AVAsset(url: videoUrl)
let musicAsset = AVAsset(url: musicUrl)
let audioVideoComposition = AVMutableComposition()
let audioMix = AVMutableAudioMix()
var mixParameters = [AVMutableAudioMixInputParameters]()
let videoCompositionTrack = audioVideoComposition
.addMutableTrack(withMediaType: .video, preferredTrackID: .init())!
let audioCompositionTrack = audioVideoComposition
.addMutableTrack(withMediaType: .audio, preferredTrackID: .init())!
let musicCompositionTrack = audioVideoComposition
.addMutableTrack(withMediaType: .audio, preferredTrackID: .init())!
let videoAssetTrack = videoAsset.tracks(withMediaType: .video)[0]
let audioAssetTrack = videoAsset.tracks(withMediaType: .audio).first
let musicAssetTrack = musicAsset.tracks(withMediaType: .audio)[0]
let audioParameters = AVMutableAudioMixInputParameters(track: audioAssetTrack)
audioParameters.trackID = audioCompositionTrack.trackID
let musicParameters = AVMutableAudioMixInputParameters(track: musicAssetTrack)
musicParameters.trackID = musicCompositionTrack.trackID
audioParameters.setVolume(selectedVideoLevel, at: .zero)
musicParameters.setVolume(selectedMusicLevel, at: .zero)
mixParameters.append(audioParameters)
mixParameters.append(musicParameters)
audioMix.inputParameters = mixParameters
/// prevents video from unnecessary rotations
videoCompositionTrack.preferredTransform = videoAssetTrack.preferredTransform
do {
let timeRange = CMTimeRange(start: .zero, duration: videoAsset.duration)
try videoCompositionTrack.insertTimeRange(timeRange, of: videoAssetTrack, at: .zero)
if let audioAssetTrack = audioAssetTrack {
try audioCompositionTrack.insertTimeRange(timeRange, of: audioAssetTrack, at: .zero)
}
try musicCompositionTrack.insertTimeRange(timeRange, of: musicAssetTrack, at: .zero)
} catch {
completion?(.failure(MixError.TimeRangeFailure)
}
let exportUrl = FileManager.default
.urls(for: .applicationSupportDirectory, in: .userDomainMask).first?
.appendingPathComponent("\(Date().timeIntervalSince1970)-video.mp4")
let exportSession = AVAssetExportSession(
asset: audioVideoComposition,
presetName: AVAssetExportPresetHighestQuality
)
exportSession?.audioMix = audioMix
exportSession?.outputFileType = .m4v
exportSession?.outputURL = exportUrl
exportSession?.exportAsynchronously(completionHandler: {
guard let status = exportSession?.status else { return }
switch status {
case .completed:
completion?(.success(exportUrl!))
case .failed:
completion?(.failure(MixError.ExportError)
default:
print(status)
}
})
}
I figured it out. It seems an AVAsset which loads a video holds the audio and video separately. So you can reach them writing``
videoAsset.tracks(withMediaType: AVMediaTypeAudio)[0] //audio of a video
videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0] //video of a video(without sound)
So I added these lines to the code and it worked!
var mutableCompositionBackTrack : [AVMutableCompositionTrack] = []
mutableCompositionBackTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
try mutableCompositionBackTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAssetTrack.timeRange.duration), of: backAssetTrack, at: kCMTimeZero)
There is a still missing point that I do not know how to do, and that is setting volumes of these audio assets. I will update this answer as soon as I figure out how.