I currently play a wav file with the code below. But now i have compressed the files to aac-files and i can't figure out how to play them? I tried to change the withExtension to "aac" instead but no sound. Any ideas?
guard let url = Bundle.main.url(forResource: fileName, withExtension: "aac") else { return }
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
let audioPlayer = try AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileType.ac3.rawValue)
audioPlayer.volume = currentVolume
newAudioPlayer.audioPlayer.play()
} catch let error {
print(error.localizedDescription)
}
If you need to play AAC files you can use AVAudioEngine audio player node:
import UIKit
import AVFoundation
class ViewController: UIViewController {
let audioEngine = AVAudioEngine()
let player = AVAudioPlayerNode()
override func viewDidLoad() {
super.viewDidLoad()
let url = Bundle.main.url(forResource: "audio_name", withExtension: "aac")!
do {
let audioFile = try AVAudioFile(forReading: url)
guard let buffer = AVAudioPCMBuffer(pcmFormat: audioFile.processingFormat, frameCapacity: .init(audioFile.length)) else { return }
try audioFile.read(into: buffer)
audioEngine.attach(player)
audioEngine.connect(player, to: audioEngine.mainMixerNode, format: buffer.format)
try audioEngine.start()
player.play()
player.scheduleBuffer(buffer, at: nil, options: .loops)
} catch {
print(error)
}
}
}
Related
I'm creating a process to read an existing audio file, add an effect using AVAudioEngine, and then save it as another audio file.
However, with the following method using an AVAudioPlayerNode, the save process must wait until the end of playback.
import UIKit
import AVFoundation
class ViewController: UIViewController {
let engine = AVAudioEngine()
let playerNode = AVAudioPlayerNode()
let reverbNode = AVAudioUnitReverb()
override func viewDidLoad() {
super.viewDidLoad()
do {
let url = URL(fileURLWithPath: Bundle.main.path(forResource: "original", ofType: "mp3")!)
let file = try AVAudioFile(forReading: url)
// playerNode
engine.attach(playerNode)
// reverbNode
reverbNode.loadFactoryPreset(.largeChamber)
reverbNode.wetDryMix = 5.0
engine.attach(reverbNode)
engine.connect(playerNode, to: reverbNode, format: file.processingFormat)
engine.connect(reverbNode, to: engine.mainMixerNode, format: file.processingFormat)
playerNode.scheduleFile(file, at: nil, completionCallbackType: .dataPlayedBack){ [self] _ in
reverbNode.removeTap(onBus: 0)
}
// start
try engine.start()
playerNode.play()
let url2 = URL(fileURLWithPath: fileInDocumentsDirectory(filename: "changed.wav"))
let outputFile = try! AVAudioFile(forWriting: url2, settings: playerNode.outputFormat(forBus: 0).settings)
reverbNode.installTap(onBus: 0, bufferSize: AVAudioFrameCount(reverbNode.outputFormat(forBus: 0).sampleRate), format: reverbNode.outputFormat(forBus: 0)) { (buffer, when) in
do {
try outputFile.write(from: buffer)
} catch let error {
print(error)
}
}
} catch {
print(error.localizedDescription)
}
}
func getDocumentsURL() -> NSURL {
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as NSURL
return documentsURL
}
func fileInDocumentsDirectory(filename: String) -> String {
let fileURL = getDocumentsURL().appendingPathComponent(filename)
return fileURL!.path
}
}
Is there a way to complete the writing without waiting for the playback to complete? My ideal is to complete the write in the time required by CPU and storage performance.
It seems that
reverbNode.installTap(...) { (buffer, when) in ...}
in the code is processed in parallel with the current playback position. But I would like to dramatically improve the processing speed.
Best regards.
I am trying to play with AVAudioEngine to playback the wav file. I tried to do it in a few different ways, but nothing work.
Try 1
...
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var mixer: AVAudioMixerNode = AVAudioMixerNode()
private var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
func Play1() {
guard let filePath = Bundle.main.url(forResource: "testwav", withExtension: "wav", subdirectory: "res") else {
print("file not found")
return
}
print("\(filePath)")
guard let audioFile = try? AVAudioFile(forReading: filePath) else{ return }
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
guard let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameCount) else{ return }
do{
try audioFile.read(into: audioFileBuffer)
} catch{
print("over")
}
let mainMixer = audioEngine.mainMixerNode
audioEngine.attach(audioFilePlayer)
audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
audioEngine.connect(mainMixer, to:audioEngine.outputNode, format: audioFileBuffer.format)
try? audioEngine.start()
audioFilePlayer.play()
audioFilePlayer.scheduleBuffer(audioFileBuffer, at: nil, options:AVAudioPlayerNodeBufferOptions.loops)
}
...
Try 2
...
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var mixer: AVAudioMixerNode = AVAudioMixerNode()
private var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
func Play2() {
DispatchQueue.global(qos: .background).async {
self.audioEngine.attach(self.mixer)
self.audioEngine.connect(self.mixer, to: self.audioEngine.outputNode, format: nil)
// !important - start the engine *before* setting up the player nodes
try! self.audioEngine.start()
let audioPlayer = AVAudioPlayerNode()
self.audioEngine.attach(audioPlayer)
// Notice the output is the mixer in this case
self.audioEngine.connect(audioPlayer, to: self.mixer, format: nil)
guard let fileUrl = Bundle.main.url(forResource: "testwav", withExtension: "wav", subdirectory: "res") else {
// guard let url = Bundle.main.url(forResource: "audiotest", withExtension: "mp3", subdirectory: "res") else {
print("mp3 not found")
return
}
do {
let file = try AVAudioFile(forReading: fileUrl)
audioPlayer.scheduleFile(file, at: nil, completionHandler: nil)
audioPlayer.play(at: nil)
} catch let error {
print(error.localizedDescription)
}
}
}
...
...
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var mixer: AVAudioMixerNode = AVAudioMixerNode()
private var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
func Play3() {
DispatchQueue.global(qos: .background).async {
self.audioEngine = AVAudioEngine()
_ = self.audioEngine.mainMixerNode
self.audioEngine.prepare()
do {
try self.audioEngine.start()
} catch {
print(error)
}
guard let url = Bundle.main.url(forResource: "testwav", withExtension: "wav", subdirectory: "res") else {
// guard let url = Bundle.main.url(forResource: "audiotest", withExtension: "mp3", subdirectory: "res") else {
print("mp3 not found")
return
}
let player = AVAudioPlayerNode()
player.volume = 1.0
do {
let audioFile = try AVAudioFile(forReading: url)
let format = audioFile.processingFormat
print(format)
self.audioEngine.attach(player)
self.audioEngine.connect(player, to: self.audioEngine.mainMixerNode, format: format)
player.scheduleFile(audioFile, at: nil, completionHandler: nil)
} catch let error {
print(error.localizedDescription)
}
player.play()
}
}
...
Also should be mentioned that there are no errors, while debugging I see that all the methods are executed and everything is ok, but I don't hear sound playback...
What am I doing wrong?
Try to activate your audio session with the following method:
func setActive(_ active: Bool, options: AVAudioSession.SetActiveOptions = []) throws.
Please note that if another active audio session has higher priority than yours (for example, a phone call), and neither audio session allows mixing, attempting to activate your audio session fails. Deactivating an audio session that has running audio objects stops them, deactivates the session, and return an AVAudioSession.ErrorCode.isBusy error.
I want to play a sound after each network request is successful. I have some objects that I want to send to the server using a for loop. In each iteration I'm sending a request to the server and after each request is complete, I want to play a sound.
I've tried this:
for object in objects {
sendObjectToServer(object)
}
func playSound() {
let url = Bundle.main.url(forResource: "sound", withExtension: "mp3")!
let player = try! AVAudioPlayer(url)
player.play()
}
func sendObjectToServer(object: Object) {
URLSession.shared.dataTask(with url: object.url) {(data,response,error) in
DispatchQueue.main.async {
playSound() // doesn't play sound synchronously
}
}
}
Why are you using the main thread for background-related operations. Only update your UI in the Dispatch.main.async code:
Try this on a physical device (tested and worked)
import AVFoundation
for object in objects {
DispatchQueue.global(qos: .background).async {
sendObjectToServer(object)
}
}
var player: AVAudioPlayer?
func playSound() {
guard let url = Bundle.main.url(forResource: "sound", withExtension: "mp3") else { return }
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
/* The following line is required for the player to work on iOS 11. Change the file type accordingly*/
player = try AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileType.mp3.rawValue)
/* iOS 10 and earlier require the following line:
player = try AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileTypeMPEGLayer3) */
guard let player = player else { return }
player.play()
} catch let error {
print(error.localizedDescription)
}
}
func sendObjectToServer(object: Object) {
URLSession.shared.dataTask(with url: object.url) {(data,response,error) in
// Custom Logic
playSound()
}
}
I need download audio file and then play him. I wrote code for downloading, but I don't know how to transfer him to AVAudioPlayer.
#IBAction func downloadButton(_ sender: Any) {
if let audioUrl = URL(string: "https://www.dropbox.com/s/qwqxsde1yo5m1mz/Track04.mp3?dl=0") {
// then lets create your document folder url
let documentsDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
// lets create your destination file url
let destinationUrl = documentsDirectoryURL.appendingPathComponent(audioUrl.lastPathComponent)
print(destinationUrl)
// to check if it exists before downloading it
if FileManager.default.fileExists(atPath: destinationUrl.path) {
print("The file already exists at path")
// if the file doesn't exist
} else {
let task = URLSession.shared.downloadTask(with: audioUrl) { (location, response, error) in
guard let location = location else {return}
do{
try FileManager.default.moveItem(at: location ,to : destinationUrl)
print("File moved to documents folder")
}
catch {
print("error")
}
}
task.resume()
}
}
}
#IBAction func playAudio(_ sender: Any) {
????????
}
Just create AVAudioPlayer instance with local file url like below:
if FileManager.default.fileExists(atPath: destinationUrl.path) {
do {
self.player = try AVAudioPlayer(contentsOfURL: destinationUrl)
player.prepareToPlay()
player.volume = 1
player.play()
} catch {
print(error)
}
}
use your destintionUrl to get music file data from documentDirectory and then assign data to player object.
func playSongUsing(_ url : URL) {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
try AVAudioSession.sharedInstance().setActive(true)
// For iOS 11
objPlayer = try AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileType.mp3.rawValue)
// For iOS versions < 11
objPlayer = try AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileTypeMPEGLayer3)
guard let aPlayer = objPlayer else { return }
aPlayer.play()
} catch let error {
print(error.localizedDescription)
}
}
How do I create a function from this code in swift3?
I have a button which is pressed then plays this sound "push"
How can it be simplified when there are lots of buttons? I don't want to add all codes to every button.
var myAudio = AVAudioPlayer()
// Add sound
do {
try myAudio = AVAudioPlayer(contentsOf: NSURL(fileURLWithPath:
Bundle.main.path(forResource: "push", ofType: "mp3")!) as URL)
} catch {
NSLog("No file!")
}
//call the sound
myAudio.play()
I made this change
func play(name : String){
do {
try myAudio = AVAudioPlayer(contentsOf: NSURL(fileURLWithPath:
Bundle.main.path(forResource: name, ofType: "mp3")!) as URL)
} catch {
NSLog("No file!")
}
}
#IBAction func buttonFirst(_ sender: UIButton) {
play(name: "push")
myAudio.play()
}
#IBAction func buttonSecond(_ sender: UIButton) {
play(name: "second")
myAudio.play()
}
I got this output:
2017-07-25 16:13:23.270349+0100 sound[1728:933024] [aqme] 254: AQDefaultDevice (173): skipping input stream 0 0 0x0
Is that a problem?
I think you forgot the prepare
var audioPlayer = AVAudioPlayer()
let sound = URL(fileURLWithPath: Bundle.main.path(forResource: "sound", ofType: "mp3")!)
try! AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
try! AVAudioSession.sharedInstance().setActive(true)
try! audioPlayer = AVAudioPlayer(contentsOf: sound)
audioPlayer.prepareToPlay()
audioPlayer.play()
You can use it in the following way
var myAudio : AVAudioPlayer?
func playSound(){
let path = Bundle.main.path(forResource: "push", ofType:"mp3")!
let url = URL(fileURLWithPath: path)
do {
let sound = try AVAudioPlayer(contentsOf: url)
self.myAudio = sound
sound.numberOfLoops = 1
sound.prepareToPlay()
sound.play()
} catch {
print("error loading file")
// couldn't load file :(
}
}
Furthermore you can use SwiftySound that lets you play sounds easily in Swift 3.
for example
Sound.play(file: "push.mp3")