I have recorded audio in .AAC format using microphone and successfully saved in document directory.When i try to play particular recorded audio it is not playing but play methods are calling,Am not getting any sound in microphone . If any one can help me to figure out this issue
Recording Audio sample code below
func record() {
self.prepareToRecord()
if let recorder = self.audioRecorder {
recorder.record()
}
}
func prepareToRecord() {
let dirPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as String
let currentDateTime = NSDate()
let formatter = NSDateFormatter()
formatter.dateFormat = "ddMMyyyy-HHmmss"
str_Recordedname = formatter.stringFromDate(currentDateTime)+".AAC"
let pathArray = [dirPath, str_Recordedname]
let filePath = NSURL.fileURLWithPathComponents(pathArray)
print(filePath)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100.0,
AVNumberOfChannelsKey: 2 as NSNumber,
AVEncoderAudioQualityKey: AVAudioQuality.High.rawValue
]
do
{
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord)
self.audioRecorder = try AVAudioRecorder(URL:filePath!, settings:[:])
self.audioRecorder!.meteringEnabled = true
self.audioRecorder!.prepareToRecord()
self.audioRecorder!.record()
self.audioRecorder!.delegate = self
print("Recorded Audio is",filePath!)
}
catch let error as NSError
{
print(error.description)
}
}
Playing Sample code is
var filePath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
print("\nfilePath: \(filePath)")
filePath = filePath.stringByAppendingPathComponent(self.str_Recordedname)
print("\nfilePath: \(filePath)")
let filePathURL = NSURL.fileURLWithPath(filePath as String)
print("\nfilePathURL: \(filePathURL)")
do {
var player = AVPlayer()
let asset = AVURLAsset(URL: filePathURL)
let playerItem = AVPlayerItem(asset: asset)
player = AVPlayer(playerItem: playerItem)
player.play()
} catch let error as NSError {
print("Error: ", error.localizedDescription)
}
Thanks in Advance
Edit:
Removed AVPlayer Variable from Locally and pasted Globally its worked
Related
I have a AVMutableComposition containing only audio that I want to export to a .wav audio file.
The simplest solution for exporting audio I found was using AVAssetExportSession like in this simplified example:
let composition = AVMutableComposition()
// add tracks...
let exportSession = AVAssetExportSession(asset: composition,
presetName: AVAssetExportPresetAppleM4A)!
exportSession.outputFileType = .m4a
exportSession.outputURL = someOutUrl
exportSession.exportAsynchronously {
// done
}
But it only works for .m4a
This post mentions that in order to export to other formats, one would have to use AVAssetReader and AVAssetWriter, unfortunately though it does not go into further details.
I have tried to implement it but got stuck in the process.
This is what I have so far (again simplified):
let composition = AVMutableComposition()
let outputSettings: [String : Any] = [
AVFormatIDKey: kAudioFormatLinearPCM,
AVLinearPCMIsBigEndianKey: false,
AVLinearPCMIsFloatKey: false,
AVLinearPCMBitDepthKey: 32,
AVLinearPCMIsNonInterleaved: false,
AVSampleRateKey: 44100.0,
AVChannelLayoutKey: NSData(),
]
let assetWriter = try! AVAssetWriter(outputURL: someOutUrl, fileType: .wav)
let input = AVAssetWriterInput(mediaType: .audio, outputSettings: outputSettings)
assetWriter.add(input)
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: CMTime.zero)
input.requestMediaDataWhenReady(on: .main) {
// as I understand, I need to bring in data from my
// AVMutableComposition here...
let sampleBuffer: CMSampleBuffer = ???
input.append(sampleBuffer)
}
assetWriter.finishWriting {
// done
}
It boils down to my question:
Can you provide a working example for exporting audio from a AVMutableComposition to a wav file?
After some more research I came up with the following solution.
The missing piece was the usage of AVAssetReader.
(simplified code)
// composition
let composition = AVMutableComposition()
// add stuff to composition
// reader
guard let assetReader = try? AVAssetReader(asset: composition) else { return }
assetReader.timeRange = CMTimeRange(start: .zero, duration: CMTime(value: composition.duration.value, timescale: composition.duration.timescale))
let assetReaderAudioMixOutput = AVAssetReaderAudioMixOutput(audioTracks: composition.tracks(withMediaType: .audio), audioSettings: nil)
assetReader.add(assetReaderAudioMixOutput)
guard assetReader.startReading() else { return }
// writer
let outputSettings: [String : Any] = [
AVFormatIDKey: kAudioFormatLinearPCM,
AVLinearPCMIsBigEndianKey: false,
AVLinearPCMIsFloatKey: false,
AVLinearPCMBitDepthKey: 32,
AVLinearPCMIsNonInterleaved: false,
AVSampleRateKey: 44100.0,
AVChannelLayoutKey: NSData(),
]
guard let assetWriter = try? AVAssetWriter(outputURL: someOutUrl, fileType: .wav) else { return }
let writerInput = AVAssetWriterInput(mediaType: .audio, outputSettings: outputSettings)
assetWriter.add(writerInput)
guard assetWriter.startWriting() else { return }
assetWriter.startSession(atSourceTime: CMTime.zero)
let queue = DispatchQueue(label: "my.queue.id")
writerInput.requestMediaDataWhenReady(on: queue) {
// capture assetReader in my block to prevent it being released
let readerOutput = assetReader.outputs.first!
while writerInput.isReadyForMoreMediaData {
if let nextSampleBuffer = readerOutput.copyNextSampleBuffer() {
writerInput.append(nextSampleBuffer)
} else {
writerInput.markAsFinished()
assetWriter.endSession(atSourceTime: composition.duration)
assetWriter.finishWriting() {
DispatchQueue.main.async {
// done, call my completion
}
}
break;
}
}
}
as you mention you are creating .m4a successfully why not convert .m4a file to .wav file in just put some line of code
I simply changed the extension of the file to .wav and removed the .m4a file and it worked.
func getDirectory() -> URL {
let path = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentDirectory = path[0]
return documentDirectory
}
let date = Date().timeIntervalSince1970
fileName = getDirectory().appendingPathComponent("\(date).m4a")
wavFileName = getDirectory().appendingPathComponent("\(date).wav")
try! FileManager.default.copyItem(at: fileName, to: wavFileName)
try! FileManager.default.removeItem(at: fileName)
I even played .wav file and it's working fine.
audioPlayer = try! AVAudioPlayer(contentsOf: wavFileName)
audioPlayer.play()
check out above example is this extension replacement causing of any load or time in your app or not
Is there any sample code or tutorials about that? I've found that AVAudioRecorder supported since WatchOS 4.0 https://developer.apple.com/documentation/avfoundation/avaudiorecorder. But when I am trying to use it - it records 1 second and no actual sound (just noise).
Here is my code:
let audioURL = self.getRecordedFileURL()
print(audioURL.absoluteString)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
do {
recorder = try AVAudioRecorder(url: audioURL, settings: settings)
recorder?.delegate = self
recorder?.record()
} catch {
finishRecording(success: false)
}
Also, should I use AudioSession here? If yes, is it required requestRecordPermission and how do deal with it? Thank you for your help!
This one works:
let recordingName = "audio.m4a"
let dirPath = getDirectory()
let pathArray = [dirPath, recordingName]
guard let filePath = URL(string: pathArray.joined(separator: "/")) else { return }
let settings = [AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey:12000,
AVNumberOfChannelsKey:1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
//start recording
do {
audioRecorder = try AVAudioRecorder(url: filePath, settings: settings)
audioRecorder.delegate = self
audioRecorder.record()
} catch {
print("Recording Failed")
}
func getDirectory()-> String {
let dirPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
return dirPath
}
Don't forget to add NSMicrophoneUsageDescription into your phone companion app Info.plist.
I am trying to save a video with this code on an Apple TV:
func playerItemDidReachEnd(_ notification: Notification) {
if notification.object as? AVPlayerItem == player.currentItem {
player.seek(to: kCMTimeZero)
let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)
let filename = "video.mp4"
let documentsDirectory = FileManager.default.urls(for: FileManager.SearchPathDirectory.documentDirectory, in: FileManager.SearchPathDomainMask.userDomainMask).last!
let outputURL = documentsDirectory.appendingPathComponent(filename)
exporter?.outputURL = NSURL.fileURL(withPath: outputURL.path)
exporter?.outputFileType = AVFileTypeMPEG4
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
let url = NSURL(fileURLWithPath: path)
let filePath = url.appendingPathComponent(filename)?.path
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath!) {
} else {
exporter?.exportAsynchronously(completionHandler: {
print(exporter?.status.rawValue)
print(exporter?.error)
})
}
}
}
I am also using this as the asset and do not want to change the asset:
lazy var asset: AVURLAsset = {
var asset: AVURLAsset = AVURLAsset(url: self.url)
asset.resourceLoader.setDelegate(self as? AVAssetResourceLoaderDelegate, queue: DispatchQueue.main)
return asset
}()
But I am getting this error. What does it mean, and how can I fix it? Thanks :)
Optional(Error Domain=AVFoundationErrorDomain Code=-11838 "Operation Stopped" UserInfo={NSUnderlyingError=0x608000443300 {Error Domain=NSOSStatusErrorDomain Code=-12109 "(null)"}, NSLocalizedFailureReason=The operation is not supported for this media., NSLocalizedDescription=Operation Stopped})
I'm new to Swift from Objective-C and have hit a wall. I need to extract audio from a .mov file so I can then run this through speech to text processing. The code I have so far creates a file but it does not contain audio. This is the code I have so far
private var audioFilePath: URL!
private func createAudioFile(){
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
let url = NSURL(fileURLWithPath: path)
self.audioFilePath = url.appendingPathComponent("myAudio.m4a")
let theAsset = AVAsset(url: self.outputFilePath)
let exporter = AVAssetExportSession(asset: theAsset, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = self.audioFilePath
exporter?.outputFileType = AVFileTypeMPEG4
print("file path = ", self.audioFilePath)
exporter?.exportAsynchronously(completionHandler: {
print("exporter?.status\(exporter?.status)")
})
}
Thanks for your help!
How I can edit or trim video start and point particular parts of video?
Also I want to use slider to point out trimming start and end points.
func trimVideo(sourceURL: NSURL, destinationURL: NSURL, trimPoints: TrimPoints, completion: TrimCompletion?) {
assert(sourceURL.fileURL)
assert(destinationURL.fileURL)
let options = [ AVURLAssetPreferPreciseDurationAndTimingKey: true ]
let asset = AVURLAsset(URL: sourceURL, options: options)
let preferredPreset = AVAssetExportPresetPassthrough
if verifyPresetForAsset(preferredPreset, asset: asset) {
let composition = AVMutableComposition()
let videoCompTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let audioCompTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let assetVideoTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo).first as! AVAssetTrack
let assetAudioTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeAudio).first as! AVAssetTrack
var compError: NSError?
var accumulatedTime = kCMTimeZero
for (startTimeForCurrentSlice, endTimeForCurrentSlice) in trimPoints {
let durationOfCurrentSlice = CMTimeSubtract(endTimeForCurrentSlice, startTimeForCurrentSlice)
let timeRangeForCurrentSlice = CMTimeRangeMake(startTimeForCurrentSlice, durationOfCurrentSlice)
videoCompTrack.insertTimeRange(timeRangeForCurrentSlice, ofTrack: assetVideoTrack, atTime: accumulatedTime, error: &compError)
audioCompTrack.insertTimeRange(timeRangeForCurrentSlice, ofTrack: assetAudioTrack, atTime: accumulatedTime, error: &compError)
if compError != nil {
NSLog("error during composition: \(compError)")
if let completion = completion {
completion(compError)
}
}
accumulatedTime = CMTimeAdd(accumulatedTime, durationOfCurrentSlice)
}
let exportSession = AVAssetExportSession(asset: composition, presetName: preferredPreset)
exportSession.outputURL = destinationURL
exportSession.outputFileType = AVFileTypeAppleM4V
exportSession.shouldOptimizeForNetworkUse = true
removeFileAtURLIfExists(destinationURL)
exportSession.exportAsynchronouslyWithCompletionHandler({ () -> Void in
if let completion = completion {
completion(exportSession.error)
}
})
} else {
NSLog("Could not find a suitable export preset for the input video")
let error = NSError(domain: "org.linuxguy.VideoLab", code: -1, userInfo: nil)
if let completion = completion {
completion(error)
}
}
}