AVAssetWriter recorded audio no sound - swift

My app can record the audio from the chat and save it in file. I recorded some music on the app screen but when I playback the audio.m4a file there is no sound coming out. The file show as "Apple MPEG-4 audio" and has 12KB size. Did I config the setting wrong? Thanks in advence.
edit: I added the stop recording function.
var assetWriter: AVAssetWriter?
var input: AVAssetWriterInput?
var channelLayout = AudioChannelLayout()
func record() {
guard let doc = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else {
return
}
let inputURL = docURL.appendingPathComponent("audio.m4a")
do {
try assetWriter = AVAssetWriter(outputURL: inputURL, fileType: .m4a)
} catch {
print("error: \(error)")
assetWriter = nil
return
}
guard let assetWriter = assetWriter else {
return
}
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_D
let audioSettings: [String : Any] = [
AVNumberOfChannelsKey: 6,
AVFormatIDKey: kAudioFormatMPEG4AAC_HE,
AVSampleRateKey: 44100,
AVEncoderBitRateKey: 128000,
AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout)),
]
input = AVAssetWriterInput(mediaType: .audio, outputSettings: settings)
guard let audioInput = input else {
print("Failed to find input.")
return
}
audioInput.expectsMediaDataInRealTime = true
if ((assetWriter.canAdd(audioInput)) != nil) {
assetWriter.add(audioInput)
}
RPScreenRecorder.shared().startCapture(handler: { (sample, bufferType, error) in
guard error == nil else {
print("Failed to capture with error: \(String(describing: error))")
return
}
if bufferType == .audioApp {
if assetWriter.status == AVAssetWriter.Status.unknown {
if ((assetWriter.startWriting()) != nil) {
assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
}
}
if assetWriter.status == AVAssetWriter.Status.writing {
if audioInput.isReadyForMoreMediaData == true {
if audioInput.append(sample) == false {
}
}
}
}
})
}
func stopRecord() {
RPScreenRecorder.shared().stopCapture{ (error) in
self.audioInput.markAsFinished()
if error == nil{
self.assetWriter.finishWriting {
print("finish writing")
}
} else {
print(error as Any)
}
}
}

In light of your comments, you definitely don't need six channel audio. Try these simpler mono audio settings.
let audioSettings: [String : Any] = [
AVNumberOfChannelsKey: 1,
AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 44100,
AVEncoderBitRateKey: 128000,
]
You don't say whether this is on iOS or macOS. You have a problem on macOS because as of 11.2.1 no .audioApp buffers are captured. If you still want microphone, you can configure that:
let recorder = RPScreenRecorder.shared()
recorder.isMicrophoneEnabled = true
recorder.startCapture(handler: { (sample, bufferType, error) in
if bufferType == .audioMic {
// etc
}
})
Don't bother checking for writer status, just append buffers when you can
if audioInput.isReadyForMoreMediaData {
if !audioInput.append(sample) {
// do something
}
}
PREVIOUSLY
You need to call assetWriter.finishWriting at some point.
It's interesting that you have 6 channel input. Are you using a special device or some kind of virtual device?

Related

copyNextSampleBuffer hanging for AVAssetReaderTrackOutput

I am trying to create a video compression tool which simply takes in a video file URL, compresses it, and returns the new URL in swift 5 for ios 16.
I have been following a few tutorials online, but they all use functions which have since been deprecated, so I have put together this code here which is a refactoring from various sources:
import Foundation
import AVFoundation
class VideoCompressorModel: ObservableObject {
let bitrate = 2_500_000 // MBPs
func compressFile(urlToCompress: URL, outputURL: URL, completionHandler:#escaping (URL?)->Void) async {
let asset = AVAsset(url: urlToCompress);
//create asset reader
var assetReader: AVAssetReader?
do{
assetReader = try AVAssetReader(asset: asset)
} catch{
assetReader = nil
}
guard let reader = assetReader else{
completionHandler(nil)
return
}
var videoTrack: AVAssetTrack?
var audioTrack: AVAssetTrack?
do {
videoTrack = try await asset.loadTracks(withMediaType: AVMediaType.video).first
audioTrack = try await asset.loadTracks(withMediaType: AVMediaType.audio).first
} catch {
completionHandler(nil)
return
}
guard let videoTrack, let audioTrack else {
completionHandler(nil)
return
}
let videoReaderSettings: [String:Any] = [kCVPixelBufferPixelFormatTypeKey as String:kCVPixelFormatType_32ARGB ]
// ADJUST BIT RATE OF VIDEO HERE
var videoSettings: [String:Any]?
do {
videoSettings = await [
AVVideoCompressionPropertiesKey: [AVVideoAverageBitRateKey:self.bitrate],
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoHeightKey: try videoTrack.load(.naturalSize).height,
AVVideoWidthKey: try videoTrack.load(.naturalSize).width
]
} catch {
completionHandler(nil)
return
}
guard let videoSettings else {
completionHandler(nil)
return
}
let audioSettings = [
AVSampleRateKey: 44100,
AVFormatIDKey: kAudioFormatLinearPCM
]
let assetReaderVideoOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
let assetReaderAudioOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: audioSettings)
if reader.canAdd(assetReaderVideoOutput){
reader.add(assetReaderVideoOutput)
}else{
completionHandler(nil)
return
}
if reader.canAdd(assetReaderAudioOutput){
reader.add(assetReaderAudioOutput)
} else{
completionHandler(nil)
return
}
let audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: nil)
let videoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
do {
videoInput.transform = try await videoTrack.load(.preferredTransform)
} catch{
completionHandler(nil)
return
}
//we need to add samples to the video input
let videoInputQueue = DispatchQueue(label: "videoQueue")
let audioInputQueue = DispatchQueue(label: "audioQueue")
var assetWriter: AVAssetWriter?
do{
assetWriter = try AVAssetWriter(outputURL: outputURL, fileType: AVFileType.mov)
} catch {
assetWriter = nil
}
guard let assetWriter else{
completionHandler(nil)
return
}
assetWriter.shouldOptimizeForNetworkUse = true
assetWriter.add(videoInput)
assetWriter.add(audioInput)
assetWriter.startWriting()
reader.startReading()
assetWriter.startSession(atSourceTime: CMTime.zero)
let closeWriterBothDone:()->Void = {
assetWriter.finishWriting(completionHandler: {
completionHandler((assetWriter.outputURL))
})
reader.cancelReading()
}
print("STARTING ")
let closeWriterAudioDone:()->Void = {
print("FINISHED AUDIO")
videoInput.requestMediaDataWhenReady(on: videoInputQueue) {
while(videoInput.isReadyForMoreMediaData){
let sample = assetReaderVideoOutput.copyNextSampleBuffer()
if (sample != nil){
videoInput.append(sample!)
} else{
videoInput.markAsFinished()
DispatchQueue.main.async {
closeWriterBothDone()
}
break;
}
}
}
}
audioInput.requestMediaDataWhenReady(on: audioInputQueue) {
while(audioInput.isReadyForMoreMediaData){
let sample = assetReaderAudioOutput.copyNextSampleBuffer()
print("hi")
if (sample != nil){
audioInput.append(sample!)
} else{
audioInput.markAsFinished()
DispatchQueue.main.async {
closeWriterAudioDone()
}
break;
}
}
}
}
}
The issue that occurs is that copyNextSampleBuffer for the audio track output hangs after a few calls. I test this simply by having a print("hi") which only gets called a handful of times before the code blocks.
I have tried changing the audio options provided to AVAssetReaderTrackOutput, which I originally had simply as nil, and this only had the effect of increasing the number of times copyNextSampleBuffer gets called before it freezes.
Perhaps there is a better way overall to compress video in newer versions of swift, as this seems somewhat unelegant? If not, is there any bugs in my code that is causing it to hang?

unable to play video when app rebuild swift

video not playing when app rebuild (file path url saving into coreData) using AVCaptureSession
filepath not changing before and after rebuild.
file:///private/var/mobile/Containers/Data/Application/3DA93FBC-9A20-40B4-A017-B3D5C7768301/tmp/63F6CEED-3202-4F5F-999B-5F138D73635D.mp4
i did all the ways, nothing works
here my code for record the video
func setupPreview() {
// Configure previewLayer
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.frame = shapeLayer.bounds
previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
shapeLayer.layer.addSublayer(previewLayer!)
}
func setupSession() -> Bool {
captureSession.sessionPreset = AVCaptureSession.Preset.high
// Setup Camera
let camera = AVCaptureDevice.default(for: AVMediaType.video)!
do {
let input = try AVCaptureDeviceInput(device: camera)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
activeInput = input
}
} catch {
print("Error setting device video input: \(error)")
return false
}
// Setup Microphone
let microphone = AVCaptureDevice.default(for: AVMediaType.audio)!
do {
let micInput = try AVCaptureDeviceInput(device: microphone)
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
} catch {
print("Error setting device audio input: \(error)")
return false
}
// Movie output
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
}
return true
}
func startSession() {
if !captureSession.isRunning {
videoQueue().async {
self.captureSession.startRunning()
}
}
}
func stopSession() {
if captureSession.isRunning {
videoQueue().async {
self.captureSession.stopRunning()
}
}
}
func videoQueue() -> DispatchQueue {
return DispatchQueue.main
}
func currentVideoOrientation() -> AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
case .portrait:
orientation = AVCaptureVideoOrientation.portrait
case .landscapeRight:
orientation = AVCaptureVideoOrientation.landscapeLeft
case .portraitUpsideDown:
orientation = AVCaptureVideoOrientation.portraitUpsideDown
default:
orientation = AVCaptureVideoOrientation.landscapeRight
}
return orientation
}
func startRecording() {
if movieOutput.isRecording == false {
save.setTitle("stop", for: UIControl.State.normal)
let connection = movieOutput.connection(with: AVMediaType.video)
if (connection?.isVideoOrientationSupported)! {
connection?.videoOrientation = currentVideoOrientation()
}
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto
}
let device = activeInput.device
if (device.isSmoothAutoFocusSupported) {
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error setting configuration: \(error)")
}
}
//EDIT2: And I forgot this
outputURL = tempURL()
movieOutput.startRecording(to: outputURL, recordingDelegate: self)
}
else {
stopRecording()
}
}
func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
path22 = path
let directoryURL: URL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let folderPath: URL = directoryURL.appendingPathComponent("Downloads", isDirectory: true)
let fileURL: URL = folderPath.appendingPathComponent(path)
return URL(fileURLWithPath: path)
}
func stopRecording() {
if movieOutput.isRecording == true {
movieOutput.stopRecording()
}
}
here saving into coredata
let managedObject = self.managedObjectContext
entity = NSEntityDescription.entity(forEntityName: "MediaData", in: managedObject!)
let personMO = NSManagedObject(entity: entity, insertInto: managedObject)
personMO.setValue("\(self.videoURL!)", forKey: "videosS")
personMO.setValue(dataImage, forKey: "thumbnails")
print(personMO)
do
{
try managedObject?.save()
print("video saved")
}
catch
{
print("Catch Erroe : Failed To
}
let appdel = UIApplication.shared.delegate as! AppDelegate
appdel.avplayer = AVPlayer(url: videoURL!)
print(videoURL!)
let playerLayer = AVPlayerLayer(player: appdel.avplayer)
playerLayer.frame = self.view.bounds
self.view.layer.addSublayer(playerLayer)
appdel.avplayer?.play()
You must never save a full filepath into CoreData or anywhere else. File paths are not persistent. Your app is sandboxed. The sandbox path can change at any time, especially between launches and installations.
Instead, save the file name and reconstruct the path each time you need it. Just as you are calling FileManager.default.urls(for: .documentDirectory...) to construct the file path initially, so you must call it every time you want to access this file.

After compressing my audio file, why can I not play the file?

Audio file will not play after reducing it using AVAssetReader/ AVAssetWriter
At the moment, the whole function is being executed fine, with no errors thrown.
For some reason, when I go inside the document directory of the simulator via terminal, the audio file will not play through iTunes and comes up with error when trying to open with quicktime "QuickTime Player can't open "test1.m4a"
Does anyone specialise in this area and understand why this isn't working?
protocol FileConverterDelegate {
func fileConversionCompleted()
}
class WKAudioTools: NSObject {
var delegate: FileConverterDelegate?
var url: URL?
var assetReader: AVAssetReader?
var assetWriter: AVAssetWriter?
func convertAudio() {
let documentDirectory = try! FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true)
let exportURL = documentDirectory.appendingPathComponent(Assets.soundName1).appendingPathExtension("m4a")
url = Bundle.main.url(forResource: Assets.soundName1, withExtension: Assets.mp3)
guard let assetURL = url else { return }
let asset = AVAsset(url: assetURL)
//reader
do {
assetReader = try AVAssetReader(asset: asset)
} catch let error {
print("Error with reading >> \(error.localizedDescription)")
}
let assetReaderOutput = AVAssetReaderAudioMixOutput(audioTracks: asset.tracks, audioSettings: nil)
//let assetReaderOutput = AVAssetReaderTrackOutput(track: track!, outputSettings: nil)
guard let assetReader = assetReader else {
print("reader is nil")
return
}
if assetReader.canAdd(assetReaderOutput) == false {
print("Can't add output to the reader ☹️")
return
}
assetReader.add(assetReaderOutput)
// writer
do {
assetWriter = try AVAssetWriter(outputURL: exportURL, fileType: .m4a)
} catch let error {
print("Error with writing >> \(error.localizedDescription)")
}
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout.size(ofValue: channelLayout))
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo
// use different values to affect the downsampling/compression
let outputSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 44100.0,
AVNumberOfChannelsKey: 2,
AVEncoderBitRateKey: 128000,
AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout))]
let assetWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: outputSettings)
guard let assetWriter = assetWriter else { return }
if assetWriter.canAdd(assetWriterInput) == false {
print("Can't add asset writer input ☹️")
return
}
assetWriter.add(assetWriterInput)
assetWriterInput.expectsMediaDataInRealTime = false
// MARK: - File conversion
assetWriter.startWriting()
assetReader.startReading()
let audioTrack = asset.tracks[0]
let startTime = CMTime(seconds: 0, preferredTimescale: audioTrack.naturalTimeScale)
assetWriter.startSession(atSourceTime: startTime)
// We need to do this on another thread, so let's set up a dispatch group...
var convertedByteCount = 0
let dispatchGroup = DispatchGroup()
let mediaInputQueue = DispatchQueue(label: "mediaInputQueue")
//... and go
dispatchGroup.enter()
assetWriterInput.requestMediaDataWhenReady(on: mediaInputQueue) {
while assetWriterInput.isReadyForMoreMediaData {
let nextBuffer = assetReaderOutput.copyNextSampleBuffer()
if nextBuffer != nil {
assetWriterInput.append(nextBuffer!) // FIXME: Handle this safely
convertedByteCount += CMSampleBufferGetTotalSampleSize(nextBuffer!)
} else {
// done!
assetWriterInput.markAsFinished()
assetReader.cancelReading()
dispatchGroup.leave()
DispatchQueue.main.async {
// Notify delegate that conversion is complete
self.delegate?.fileConversionCompleted()
print("Process complete 🎧")
if assetWriter.status == .failed {
print("Writing asset failed ☹️ Error: ", assetWriter.error)
}
}
break
}
}
}
}
}
You need to call finishWriting on your AVAssetWriter to get the output completely written:
assetWriter.finishWriting {
DispatchQueue.main.async {
// Notify delegate that conversion is complete
self.delegate?.fileConversionCompleted()
print("Process complete 🎧")
if assetWriter.status == .failed {
print("Writing asset failed ☹️ Error: ", assetWriter.error)
}
}
}
If exportURL exists before you start the conversion, you should remove it, otherwise the conversion will fail:
try! FileManager.default.removeItem(at: exportURL)
As #matt points out, why the buffer stuff when you could do the conversion more simply with an AVAssetExportSession, and also why convert one of your own assets when you could distribute it already in the desired format?

AVAssetWriter error: Cannot append media data after ending session

This error occurs when capturing video with AVAssetWriter. However, after calling AVAssetWriter's finishWriting inside of endVideoCapture, there isn't another call to start writing again, so why is this occurring?
As you can see in the delegate function, captureOutput, we check the recording state before trying to append to the asset writer. The recording state is set to false in endVideoCapture.
Optional(Error Domain=AVFoundationErrorDomain Code=-11862 "Cannot
append media data after ending session"
UserInfo={NSLocalizedFailureReason=The application encountered a
programming error., NSLocalizedDescription=The operation is not
allowed, NSDebugDesc
func startVideoCapture() {
// Get capture resolution
let resolution = getCaptureResolution()
// Return if capture resolution not set
if resolution.width == 0 || resolution.height == 0 {
printError("Error starting capture because resolution invalid")
return
}
// If here, start capture
assetWriter = createAssetWriter(Int(resolution.width), outputHeight: Int(resolution.height))
let recordingClock = captureSession.masterClock
assetWriter!.startWriting()
assetWriter!.startSession(atSourceTime: CMClockGetTime(recordingClock!))
// Update time stamp
startTime = CACurrentMediaTime()
// Update <recording> flag & notify delegate
recording = true
delegate?.cameraDidStartVideoCapture()
}
func createAssetWriter(_ outputWidth: Int, outputHeight: Int) -> AVAssetWriter? {
// Update <outputURL> with temp file to hold video
let tempPath = gFile.getUniqueTempPath(gFile.MP4File)
outputURL = URL(fileURLWithPath: tempPath)
// Return new asset writer or nil
do {
// Create asset writer
let newWriter = try AVAssetWriter(outputURL: outputURL, fileType: AVFileTypeMPEG4)
// Define video settings
let videoSettings: [String : AnyObject] = [
AVVideoCodecKey : AVVideoCodecH264 as AnyObject,
AVVideoWidthKey : outputWidth as AnyObject,
AVVideoHeightKey : outputHeight as AnyObject,
]
// Add video input to writer
assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
assetWriterVideoInput!.expectsMediaDataInRealTime = true
newWriter.add(assetWriterVideoInput!)
// Define audio settings
let audioSettings : [String : AnyObject] = [
AVFormatIDKey : NSInteger(kAudioFormatMPEG4AAC) as AnyObject,
AVNumberOfChannelsKey : 2 as AnyObject,
AVSampleRateKey : NSNumber(value: 44100.0 as Double)
]
// Add audio input to writer
assetWriterAudioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
assetWriterAudioInput!.expectsMediaDataInRealTime = true
newWriter.add(assetWriterAudioInput!)
// Return writer
print("Created asset writer for \(outputWidth)x\(outputHeight) video")
return newWriter
} catch {
printError("Error creating asset writer: \(error)")
return nil
}
}
func endVideoCapture() {
// Update flag to stop data capture
recording = false
// Return if asset writer undefined
if assetWriter == nil {
return
}
// If here, end capture
// -- Mark inputs as done
assetWriterVideoInput!.markAsFinished()
assetWriterAudioInput!.markAsFinished()
// -- Finish writing
assetWriter!.finishWriting() {
self.assetWriterDidFinish()
}
}
func assetWriterDidFinish() {
print("Asset writer finished with status: \(getAssetWriterStatus())")
// Return early on error & tell delegate
if assetWriter!.error != nil {
printError("Error finishing asset writer: \(assetWriter!.error)")
delegate?.panabeeCameraDidEndVideoCapture(videoURL: nil, videoDur: 0, error: assetWriter!.error)
logEvent("Asset Writer Finish Error", userData: ["Error" : assetWriter!.error.debugDescription])
return
}
// If here, no error so extract video properties & tell delegate
let videoAsset = AVURLAsset(url: outputURL, options: nil)
let videoDur = CMTimeGetSeconds(videoAsset.duration)
let videoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
print("Camera created video. Duration: \(videoDur). Size: \(videoTrack.naturalSize). Transform: \(videoTrack.preferredTransform). URL: \(outputURL).")
// Tell delegate
delegate?.cameraDidEndVideoCapture(videoURL: outputURL.path, videoDur: videoDur, error: assetWriter!.error)
// Reset <assetWriter> to nil
assetWriter = nil
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
// Return if not recording
if !recording {
return
}
// If here, capture data
// Write video data?
if captureOutput == videoOutput && assetWriterVideoInput!.isReadyForMoreMediaData {
assetWriterVideoQueue!.async {
self.assetWriterVideoInput!.append(sampleBuffer)
}
}
// No, write audio data?
if captureOutput == audioOutput && assetWriterAudioInput!.isReadyForMoreMediaData {
assetWriterAudioQueue!.async {
self.assetWriterAudioInput!.append(sampleBuffer)
}
}
}

Media type of sample buffer must match receiver's media type ("soun")

based on this answer https://stackoverflow.com/a/16035330/1615183 I created the following code in Swift for compressing a video:
var videoWriter:AVAssetWriter!
var videoWriterInput:AVAssetWriterInput!
var processingQueue:dispatch_queue_t = dispatch_queue_create("processingQueue1", nil)
var processingQueue2:dispatch_queue_t = dispatch_queue_create("processingQueue2", nil)
var audioWriterInput:AVAssetWriterInput!
func encode(){
NSFileManager.defaultManager().removeItemAtURL(self.outputFile, error: nil)
let videoCleanApertureSettings = [AVVideoCleanApertureHeightKey: 720,
AVVideoCleanApertureWidthKey: 1280,
AVVideoCleanApertureHorizontalOffsetKey: 2,
AVVideoCleanApertureVerticalOffsetKey: 2
]
let codecSettings = [AVVideoAverageBitRateKey: 1024000,
AVVideoCleanApertureKey: videoCleanApertureSettings
]
let videoSettings = [AVVideoCodecKey: AVVideoCodecKey,
AVVideoCompressionPropertiesKey: codecSettings,
AVVideoHeightKey: 720, AVVideoWidthKey: 1280]
//setup video writer
var error:NSError?
let asset = AVURLAsset(URL: self.inputFile, options: nil)
let videoTrack:AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
let videoSize:CGSize = videoTrack.naturalSize
videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
videoWriterInput.expectsMediaDataInRealTime = false
videoWriterInput.transform = videoTrack.preferredTransform
videoWriter = AVAssetWriter(URL: self.outputFile, fileType: AVFileTypeQuickTimeMovie, error: &error)
if videoWriter.canAddInput(videoWriterInput) {
videoWriter.addInput(videoWriterInput)
}else{
println("cant add video writer input")
return
}
//setup video reader
let videoReaderSettings = [ kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
let videoReaderOutput:AVAssetReaderTrackOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings) // should it be videoReaderSettings?
let videoReader:AVAssetReader = AVAssetReader(asset: asset, error: &error)
if videoReader.canAddOutput(videoReaderOutput) {
videoReader.addOutput(videoReaderOutput)
}
//setup audio writer
audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: nil)
audioWriterInput.expectsMediaDataInRealTime = false
if videoWriter.canAddInput(audioWriterInput){
videoWriter.addInput(audioWriterInput)
}
//setup audio reader
let audioTrack:AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
let audioReaderOutput:AVAssetReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
let audioReader:AVAssetReader = AVAssetReader(asset: asset, error: &error)
if audioReader.canAddOutput(audioReaderOutput) {
audioReader.addOutput(audioReaderOutput)
}else {
println("cant add audio reader")
return
}
videoWriter.startWriting()
videoReader.startReading()
videoWriter.startSessionAtSourceTime(kCMTimeZero)
videoWriterInput.requestMediaDataWhenReadyOnQueue(processingQueue) {
while self.videoWriterInput.readyForMoreMediaData {
println("First loop")
var sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
if videoReader.status == .Reading && sampleBuffer != nil {
println("Appending")
self.videoWriterInput.appendSampleBuffer(sampleBuffer)
}else {
self.videoWriterInput.markAsFinished()
if videoReader.status == .Completed {
audioReader.startReading()
self.videoWriter.startSessionAtSourceTime(kCMTimeZero)
self.audioWriterInput.requestMediaDataWhenReadyOnQueue(self.processingQueue2) {
while self.audioWriterInput.readyForMoreMediaData {
println("Second loop")
var sampleBuffer2:CMSampleBufferRef? = audioReaderOutput.copyNextSampleBuffer()
if audioReader.status == .Reading && sampleBuffer2 != nil {
self.audioWriterInput.appendSampleBuffer(sampleBuffer2)
}else {
self.audioWriterInput.markAsFinished()
println("Audio finish")
self.videoWriter.finishWritingWithCompletionHandler { println("Done") }
}
}
}
}
else {
println("Video Reader not completed")
}
println("Finished")
break
}// else vidoSampleBuffer
}
}
}
However if I remove the audio part I get only an empty file. If I ran it as is I the first time that the second loop runs there is no problem, but on the second iteration it crashes with the following error:
*** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVAssetWriterInput appendSampleBuffer:] Media type of sample buffer must match receiver's media type ("soun")'
Did anyone have the same problem?
Change the AVMediaTypeVideo to Audio:
let audioTrack:AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
should be
let audioTrack:AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeAudio)[0] as AVAssetTrack