Play an audio file using Swift for MacOS - swift

I'm trying to simply play a file (in the main bundle or on the disk) using AVAudioFile, AVAudioEngine and AVAudioPlayerNode.
Here is what I'm doing:
import Foundation
import AppKit
import AudioToolbox
import AVFoundation
struct readFile {
static var arrayFloatValues:[Float] = []
static var points:[CGFloat] = []
}
class AudioAnalisys : NSObject {
class func open_audiofile() {
let audioEngine: AVAudioEngine = AVAudioEngine()
let audioPlayer: AVAudioPlayerNode = AVAudioPlayerNode()
//get where the file is
let url = Bundle.main.url(forResource: "TeamPlaylist", withExtension: "mp3")
//put it in an AVAudioFile
let audioFile = try! AVAudioFile(forReading: url!)
//Get the audio file format
//let format = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: file.fileFormat.sampleRate, channels: file.fileFormat.channelCount, interleaved: false)
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
//how many channels?
print(audioFile.fileFormat.channelCount)
print(audioFrameCount)
//Setup the buffer for audio data
let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: UInt32(audioFile.length))
//put audio data in the buffer
try! audioFile.read(into: audioFileBuffer!)
//readFile.arrayFloatValues = Array(UnsafeBufferPointer(start: audioFileBuffer!.floatChannelData?[0], count:Int(audioFileBuffer!.frameLength)))
//Init engine and player
let mainMixer = audioEngine.mainMixerNode
audioEngine.attach(audioPlayer)
audioEngine.connect(audioPlayer, to:mainMixer, format: audioFileBuffer!.format)
audioPlayer.scheduleBuffer(audioFileBuffer!, completionHandler: nil)
audioEngine.prepare()
do {
try audioEngine.start()
print("engine started")
} catch let error {
print(error.localizedDescription)
}
audioPlayer.play()
}
}
I can see the channel count, the FrameCount.
I can't hear anything. What am I doing wrong?
Here is what I get in the console:
2
17414784
Optional(0x00006080000006c0)
2018-10-09 21:21:02.161593+0200 spectrum[1668:327525] [AudioHAL_Client] AudioHardware.cpp:666:AudioObjectGetPropertyData: AudioObjectGetPropertyData: no object with given ID 0
engine started
2018-10-09 21:21:02.594136+0200 spectrum[1668:327593] MessageTracer: Falling back to default whitelist

here is the answer:
Can't play file from documents in AVAudioPlayer
which leads to:
import Foundation
import AppKit
import AudioToolbox
import AVFoundation
struct readFile {
static var arrayFloatValues:[Float] = []
static var points:[CGFloat] = []
}
let audioEngine: AVAudioEngine = AVAudioEngine()
let audioPlayer: AVAudioPlayerNode = AVAudioPlayerNode()
class AudioAnalisys : NSObject {
class func open_audiofile() {
//get where the file is
let url = Bundle.main.url(forResource: "TeamPlaylist", withExtension: "mp3")
//put it in an AVAudioFile
let audioFile = try! AVAudioFile(forReading: url!)
//Get the audio file format
//let format = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: file.fileFormat.sampleRate, channels: file.fileFormat.channelCount, interleaved: false)
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
//how many channels?
print(audioFile.fileFormat.channelCount)
print(audioFrameCount)
//Setup the buffer for audio data
let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: UInt32(audioFile.length))
//put audio data in the buffer
try! audioFile.read(into: audioFileBuffer!)
//readFile.arrayFloatValues = Array(UnsafeBufferPointer(start: audioFileBuffer!.floatChannelData?[0], count:Int(audioFileBuffer!.frameLength)))
//Init engine and player
let mainMixer = audioEngine.mainMixerNode
audioEngine.attach(audioPlayer)
audioEngine.connect(audioPlayer, to:mainMixer, format: audioFileBuffer!.format)
audioPlayer.scheduleBuffer(audioFileBuffer!, completionHandler: nil)
audioEngine.prepare()
do {
try audioEngine.start()
print("engine started")
} catch let error {
print(error.localizedDescription)
}
audioPlayer.play()
}
}

Related

[Swift]I want to instantly save the sound with AVAudioEngine's effect as a file

I'm creating a process to read an existing audio file, add an effect using AVAudioEngine, and then save it as another audio file.
However, with the following method using an AVAudioPlayerNode, the save process must wait until the end of playback.
import UIKit
import AVFoundation
class ViewController: UIViewController {
let engine = AVAudioEngine()
let playerNode = AVAudioPlayerNode()
let reverbNode = AVAudioUnitReverb()
override func viewDidLoad() {
super.viewDidLoad()
do {
let url = URL(fileURLWithPath: Bundle.main.path(forResource: "original", ofType: "mp3")!)
let file = try AVAudioFile(forReading: url)
// playerNode
engine.attach(playerNode)
// reverbNode
reverbNode.loadFactoryPreset(.largeChamber)
reverbNode.wetDryMix = 5.0
engine.attach(reverbNode)
engine.connect(playerNode, to: reverbNode, format: file.processingFormat)
engine.connect(reverbNode, to: engine.mainMixerNode, format: file.processingFormat)
playerNode.scheduleFile(file, at: nil, completionCallbackType: .dataPlayedBack){ [self] _ in
reverbNode.removeTap(onBus: 0)
}
// start
try engine.start()
playerNode.play()
let url2 = URL(fileURLWithPath: fileInDocumentsDirectory(filename: "changed.wav"))
let outputFile = try! AVAudioFile(forWriting: url2, settings: playerNode.outputFormat(forBus: 0).settings)
reverbNode.installTap(onBus: 0, bufferSize: AVAudioFrameCount(reverbNode.outputFormat(forBus: 0).sampleRate), format: reverbNode.outputFormat(forBus: 0)) { (buffer, when) in
do {
try outputFile.write(from: buffer)
} catch let error {
print(error)
}
}
} catch {
print(error.localizedDescription)
}
}
func getDocumentsURL() -> NSURL {
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as NSURL
return documentsURL
}
func fileInDocumentsDirectory(filename: String) -> String {
let fileURL = getDocumentsURL().appendingPathComponent(filename)
return fileURL!.path
}
}
Is there a way to complete the writing without waiting for the playback to complete? My ideal is to complete the write in the time required by CPU and storage performance.
It seems that
reverbNode.installTap(...) { (buffer, when) in ...}
in the code is processed in parallel with the current playback position. But I would like to dramatically improve the processing speed.
Best regards.

Change BPM in real time with AVAudioEngine using Swift

Hello I am trying to implement simple audio app using AVAudioEngine, which plays short wav audio files in a loop at some bpm, that can be changed in real time (by slider or something).
Current solution logic:
set bpm=60
create audioFile from sample.wav
calculate bufferSize: AVAudioFrameCount(audioFile.processingFormat.sampleRate * 60 / Double(bpm))
set bufferSize to audioBuffer
load file audioFile into audioBuffer.
schedule audioBuffer to play
This solution works, but the issue is - if I want to change bpm I need to recreate buffer with different bufferSize, so it will not be in real time, since I need to stop player and reschedule buffer with different bufferSize.
Any thoughts how it can be done ?
Thanks in advance !
Code (main part):
var bpm:Float = 30
let engine = AVAudioEngine()
var player = AVAudioPlayerNode()
var audioBuffer: AVAudioPCMBuffer?
var audioFile: AVAudioFile?
override func viewDidLoad() {
super.viewDidLoad()
audioFile = loadfile(from: "sound.wav")
audioBuffer = tickBuffer(audioFile: audioFile!)
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: audioFile?.processingFormat)
do {
engine.prepare()
try engine.start()
} catch {
print(error)
}
}
private func loadfile(from fileName: String) -> AVAudioFile? {
let path = Bundle.main.path(forResource: fileName, ofType: nil)!
let url = URL(fileURLWithPath: path)
do {
let audioFile = try AVAudioFile(forReading: url)
return audioFile
} catch {
print("Error loading buffer1 \(error)")
}
return nil
}
func tickBuffer(audioFile: AVAudioFile) -> AVAudioPCMBuffer {
let periodLength = AVAudioFrameCount(audioFile.processingFormat.sampleRate * 60 / Double(bpm))
let buffer = AVAudioPCMBuffer(pcmFormat: audioFile.processingFormat, frameCapacity: periodLength)!
try! audioFile.read(into: buffer)
buffer.frameLength = periodLength
return buffer
}
func play() {
player.scheduleBuffer(audioBuffer, at: nil, options: .loops, completionHandler: nil)
player.play()
}
func stop() {
player.stop()
}

AVAudioEngine doesn't playback a sound

I am trying to play with AVAudioEngine to playback the wav file. I tried to do it in a few different ways, but nothing work.
Try 1
...
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var mixer: AVAudioMixerNode = AVAudioMixerNode()
private var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
func Play1() {
guard let filePath = Bundle.main.url(forResource: "testwav", withExtension: "wav", subdirectory: "res") else {
print("file not found")
return
}
print("\(filePath)")
guard let audioFile = try? AVAudioFile(forReading: filePath) else{ return }
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
guard let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameCount) else{ return }
do{
try audioFile.read(into: audioFileBuffer)
} catch{
print("over")
}
let mainMixer = audioEngine.mainMixerNode
audioEngine.attach(audioFilePlayer)
audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
audioEngine.connect(mainMixer, to:audioEngine.outputNode, format: audioFileBuffer.format)
try? audioEngine.start()
audioFilePlayer.play()
audioFilePlayer.scheduleBuffer(audioFileBuffer, at: nil, options:AVAudioPlayerNodeBufferOptions.loops)
}
...
Try 2
...
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var mixer: AVAudioMixerNode = AVAudioMixerNode()
private var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
func Play2() {
DispatchQueue.global(qos: .background).async {
self.audioEngine.attach(self.mixer)
self.audioEngine.connect(self.mixer, to: self.audioEngine.outputNode, format: nil)
// !important - start the engine *before* setting up the player nodes
try! self.audioEngine.start()
let audioPlayer = AVAudioPlayerNode()
self.audioEngine.attach(audioPlayer)
// Notice the output is the mixer in this case
self.audioEngine.connect(audioPlayer, to: self.mixer, format: nil)
guard let fileUrl = Bundle.main.url(forResource: "testwav", withExtension: "wav", subdirectory: "res") else {
// guard let url = Bundle.main.url(forResource: "audiotest", withExtension: "mp3", subdirectory: "res") else {
print("mp3 not found")
return
}
do {
let file = try AVAudioFile(forReading: fileUrl)
audioPlayer.scheduleFile(file, at: nil, completionHandler: nil)
audioPlayer.play(at: nil)
} catch let error {
print(error.localizedDescription)
}
}
}
...
...
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var mixer: AVAudioMixerNode = AVAudioMixerNode()
private var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
func Play3() {
DispatchQueue.global(qos: .background).async {
self.audioEngine = AVAudioEngine()
_ = self.audioEngine.mainMixerNode
self.audioEngine.prepare()
do {
try self.audioEngine.start()
} catch {
print(error)
}
guard let url = Bundle.main.url(forResource: "testwav", withExtension: "wav", subdirectory: "res") else {
// guard let url = Bundle.main.url(forResource: "audiotest", withExtension: "mp3", subdirectory: "res") else {
print("mp3 not found")
return
}
let player = AVAudioPlayerNode()
player.volume = 1.0
do {
let audioFile = try AVAudioFile(forReading: url)
let format = audioFile.processingFormat
print(format)
self.audioEngine.attach(player)
self.audioEngine.connect(player, to: self.audioEngine.mainMixerNode, format: format)
player.scheduleFile(audioFile, at: nil, completionHandler: nil)
} catch let error {
print(error.localizedDescription)
}
player.play()
}
}
...
Also should be mentioned that there are no errors, while debugging I see that all the methods are executed and everything is ok, but I don't hear sound playback...
What am I doing wrong?
Try to activate your audio session with the following method:
func setActive(_ active: Bool, options: AVAudioSession.SetActiveOptions = []) throws.
Please note that if another active audio session has higher priority than yours (for example, a phone call), and neither audio session allows mixing, attempting to activate your audio session fails. Deactivating an audio session that has running audio objects stops them, deactivates the session, and return an AVAudioSession.ErrorCode.isBusy error.

Swift5 Play aac file AvAudioPlayer

I currently play a wav file with the code below. But now i have compressed the files to aac-files and i can't figure out how to play them? I tried to change the withExtension to "aac" instead but no sound. Any ideas?
guard let url = Bundle.main.url(forResource: fileName, withExtension: "aac") else { return }
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
let audioPlayer = try AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileType.ac3.rawValue)
audioPlayer.volume = currentVolume
newAudioPlayer.audioPlayer.play()
} catch let error {
print(error.localizedDescription)
}
If you need to play AAC files you can use AVAudioEngine audio player node:
import UIKit
import AVFoundation
class ViewController: UIViewController {
let audioEngine = AVAudioEngine()
let player = AVAudioPlayerNode()
override func viewDidLoad() {
super.viewDidLoad()
let url = Bundle.main.url(forResource: "audio_name", withExtension: "aac")!
do {
let audioFile = try AVAudioFile(forReading: url)
guard let buffer = AVAudioPCMBuffer(pcmFormat: audioFile.processingFormat, frameCapacity: .init(audioFile.length)) else { return }
try audioFile.read(into: buffer)
audioEngine.attach(player)
audioEngine.connect(player, to: audioEngine.mainMixerNode, format: buffer.format)
try audioEngine.start()
player.play()
player.scheduleBuffer(buffer, at: nil, options: .loops)
} catch {
print(error)
}
}
}

How to Play Audio Using AVAudioEngine and AVAudioPlayerNode in Swift?

I'm trying to play audio using AVAudioEngine and AVAudioPlayerNode. However, in "engine.connect...", I get this:
"PlaySound[21867:535102] [AudioHAL_Client] AudioHardware.cpp:666:AudioObjectGetPropertyData: AudioObjectGetPropertyData: no object with given ID 0"
The code after "engine.connect..." seems to run because it prints "buffer?.format", but there's no sound.
Could someone let me know what I'm missing? Here's my testing code.
let audioNode = AVAudioPlayerNode()
let path = Bundle.main.path(forResource: "Sounds/Test.mp3", ofType: nil)
let url = URL(fileURLWithPath: path!)
let file = try! AVAudioFile(forReading: url)
let buffer = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity: AVAudioFrameCount(file.length))
try! file.read(into:buffer!)
let engine = AVAudioEngine()
engine.attach(audioNode)
engine.connect(audioNode, to: engine.mainMixerNode, format: buffer?.format)
// PlaySound[21867:535102] [AudioHAL_Client] AudioHardware.cpp:666:AudioObjectGetPropertyData: AudioObjectGetPropertyData: no object with given ID 0
engine.prepare()
try! engine.start()
audioNode.play()
audioNode.scheduleBuffer(buffer!, at: nil, options: .loops, completionHandler: nil)
debugPrint(buffer?.format)
// Optional(<AVAudioFormat 0x60000212d1d0: 1 ch, 44100 Hz, Float32>)
In engine.connect(), I also tried engine.outputNode.outputFormat(forBus: 0) instead of buffer?.format. I got no luck.
It works if I put engine outside like this:
class ViewController: NSViewController {
let engine = AVAudioEngine()
override func viewDidLoad() {