Playing a sound on macOS - swift

I like to play a sound in my app. I found a lot of examples and also could compile and run them on Swift 3. But they are always for iOS. When imlementing this code in my app, AVAudioSession keeps undefined. What do I have to do? Is there something different for OSX?
import AVFoundation
...
var audioPlayer = AVAudioPlayer()
func PlaySound( ) {
let alertSound = URL(fileURLWithPath: Bundle.main.path(forResource: "Sound", ofType: "mp3")!)
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
} catch _ {
}
do {
try AVAudioSession.sharedInstance().setActive(true)
} catch _ {
}
do {
audioPlayer = try AVAudioPlayer(contentsOf: alertSound)
} catch _{
}
audioPlayer.prepareToPlay()
audioPlayer.play()
}

Swift 3 / 4 / 4.2 / 5 - Easiest solution:
NSSound(named: "customSound")?.play()

I've used the following function in conjunction with Swift 3 and a Cocoa project.
import AVFoundation
func playSound(file:String, ext:String) -> Void {
let url = Bundle.main.url(forResource: file, withExtension: ext)!
do {
let player = try AVAudioPlayer(contentsOf: url)
player.prepareToPlay()
player.play()
} catch let error {
print(error.localizedDescription)
}
}
// usage //
playSound(file: "sound", ext: "caf") // where the file name is sound.caf.
Make sure that the target membership checkbox is on when you select an audio file in your Xcode project.

A modified version of El Tomato's answer,
here's the swift 4.2 class that I use quite often for sound:
Usage:
/// EXAMPLE 1
//calling this will create a "bop.m4a" player in memory, and, play it immediately
MakeSound.shared.playSound("bop.m4a")
//calling it a second time has a lot less overhead because the player is already in memory
MakeSound.shared.playSound("bop.m4a")
///EXAMPLE 2
//simply calls the above many times
MakeSound.shared.playSound(["bop.m4a", "beep.m4a", "bong.m4a"])
// EXAMPLE 3
//registers a shorthand, for quick calling
MakeSound.shared.registerSound(fileName: "bop", fileExtension: "m4a", shortHand: "b")
//plays using the shorthand, mostly handy because you define the actual file once,
//and can change the one line of code without having to change every reference
MakeSound.shared.playSound("b")
and the class:
class MakeSound {
private var players = [String : AVAudioPlayer]()
static var shared = MakeSound()
func playSound(fileName: String, fileExtension: String) {
if registerSound(fileName: fileName, fileExtension: fileExtension, shortHand: fileName+"."+fileExtension) {
playSound(fileName+"."+fileExtension)
}
}
//thought about naming this "playsoundS", but i like it with the same name, makes it easier to type
func playSound(_ shortHands : [String]){
for sh in shortHands{
playSound(sh)
}
}
///playSound("shorthand") OR playSound("mySound.mp3")
func playSound(_ shortHand : String) {
if let player = players[shortHand] {
player.prepareToPlay()
player.play()
} else {
if shortHand.contains(".") {
//going to assume that coder will not send "." as a filepath
//also going to assume that coder will not send "xyz." as filepath
var comp = shortHand.components(separatedBy: ".")
let ext = comp.last!
comp.removeLast()
if registerSound(fileName: comp.joined(separator: "."), fileExtension: ext, shortHand: shortHand) {
playSound(shortHand)
}
} else {
print("Sound request sent to makesound, no such shorthand, not a valid filename either.")
}
}
}
///registers a sound with makeSound, and returns a bool based on the success
#discardableResult func registerSound(fileName : String, fileExtension: String, shortHand: String) -> Bool {
guard let url = Bundle.main.url(forResource: fileName, withExtension: fileExtension) else {
print("Unable to register sound: \(shortHand). Filepath not valid.")
return false
}
do {
let player = try AVAudioPlayer(contentsOf: url)
players[shortHand] = player
} catch let error {
print("Audioplayer \"\(shortHand)\" unable to initialize:\n" + error.localizedDescription)
return false
}
return true
}
}
Now I will admit that NSSound is by far the easiest and most straight forward, yet, some of us prefer AVFoundation because we have a bit more control. (And such control is ironically removed in my class)

Even though it is little old-fashioned, you can simply enter the filename in the Attributes Inspector :
All you need to do first is to integrate a sound file into your project:

Related

Recording speech synthesis to a saved file

Below is the code I've put together to attempt to take a phrase, save it to a file, then play that saved file. Not sure what area isn't working (not correct file name, not saving the file, not finding the file). Any help would be appreciated. (The speakPhrase is just a helper function to let me know that the speech synthesizer actually works, which it does).
import AVFoundation
import Foundation
class Coordinator {
let synthesizer: AVSpeechSynthesizer
var player: AVAudioPlayer?
init() {
let synthesizer = AVSpeechSynthesizer()
self.synthesizer = synthesizer
}
var recordingPath: URL {
let soundName = "Finally.caf"
// I've tried numerous file extensions. .caf was in an answer somewhere else. I would think it would be
// .pcm, but that doesn't work either.
// Local Directory
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent(soundName)
}
func speakPhrase(phrase: String) {
let utterance = AVSpeechUtterance(string: phrase)
utterance.voice = AVSpeechSynthesisVoice(language: "en")
synthesizer.speak(utterance)
}
func playFile() {
print("Trying to play the file")
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
player = try AVAudioPlayer(contentsOf: recordingPath, fileTypeHint: AVFileType.caf.rawValue)
guard let player = player else {return}
player.play()
} catch {
print("Error playing file.")
}
}
func saveAVSpeechUtteranceToFile() {
let utterance = AVSpeechUtterance(string: "This is speech to record")
utterance.voice = AVSpeechSynthesisVoice(language: "en-US")
utterance.rate = 0.50
synthesizer.write(utterance) { [self] (buffer: AVAudioBuffer) in
guard let pcmBuffer = buffer as? AVAudioPCMBuffer else {
fatalError("unknown buffer type: \(buffer)")
}
if pcmBuffer.frameLength == 0 {
// Done
} else {
// append buffer to file
do {
let audioFile = try AVAudioFile(forWriting: recordingPath, settings: pcmBuffer.format.settings, commonFormat: .pcmFormatInt16, interleaved: false)
try audioFile.write(from: pcmBuffer)
} catch {
print(error.localizedDescription)
}
}
}
}
}
Did you noticed the bufferCallback in the below function is called multiple times?
func write(_ utterance: AVSpeechUtterance,toBufferCallback bufferCallback: #escaping AVSpeechSynthesizer.BufferCallback)
So the root cause is pretty simple: the AVSpeechUtterance's audio is divided into multiple parts. On my iPhone, the callback calls about 20 times.
So if you create a new audio file in the closure every time, you will get a very tiny audio file(on my iPhone it was a 6kb audio file). That audio is not noticeable if you play it.
So replace the function to
func saveAVSpeechUtteranceToFile() {
let utterance = AVSpeechUtterance(string: "This is speech to record")
utterance.voice = AVSpeechSynthesisVoice(language: "en-US")
utterance.rate = 0.50
// Only create new file handle if `output` is nil.
var output: AVAudioFile?
synthesizer.write(utterance) { [self] (buffer: AVAudioBuffer) in
guard let pcmBuffer = buffer as? AVAudioPCMBuffer else {
fatalError("unknown buffer type: \(buffer)")
}
if pcmBuffer.frameLength == 0 {
// Done
} else {
do{
// this closure is called multiple times. so to save a complete audio, try create a file only for once.
if output == nil {
try output = AVAudioFile(
forWriting: recordingPath,
settings: pcmBuffer.format.settings,
commonFormat: .pcmFormatInt16,
interleaved: false)
}
try output?.write(from: pcmBuffer)
}catch {
print(error.localizedDescription)
}
}
}
}
BTW, I uploaded Github Demo here.
Finally, tell you how to inspect the file contents on an iOS device.
Xcode Window Menu -> Device and Simulators, do like below to copy out your app's content.

How to play sound after each network request is successful?

I want to play a sound after each network request is successful. I have some objects that I want to send to the server using a for loop. In each iteration I'm sending a request to the server and after each request is complete, I want to play a sound.
I've tried this:
for object in objects {
sendObjectToServer(object)
}
func playSound() {
let url = Bundle.main.url(forResource: "sound", withExtension: "mp3")!
let player = try! AVAudioPlayer(url)
player.play()
}
func sendObjectToServer(object: Object) {
URLSession.shared.dataTask(with url: object.url) {(data,response,error) in
DispatchQueue.main.async {
playSound() // doesn't play sound synchronously
}
}
}
Why are you using the main thread for background-related operations. Only update your UI in the Dispatch.main.async code:
Try this on a physical device (tested and worked)
import AVFoundation
for object in objects {
DispatchQueue.global(qos: .background).async {
sendObjectToServer(object)
}
}
var player: AVAudioPlayer?
func playSound() {
guard let url = Bundle.main.url(forResource: "sound", withExtension: "mp3") else { return }
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
/* The following line is required for the player to work on iOS 11. Change the file type accordingly*/
player = try AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileType.mp3.rawValue)
/* iOS 10 and earlier require the following line:
player = try AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileTypeMPEGLayer3) */
guard let player = player else { return }
player.play()
} catch let error {
print(error.localizedDescription)
}
}
func sendObjectToServer(object: Object) {
URLSession.shared.dataTask(with url: object.url) {(data,response,error) in
// Custom Logic
playSound()
}
}

Playing sound in Xcode 9.2

I am having some issues with playing sound in Xcode 9.2.
I tried watching few different tutorials and guides, but with no luck.
Either code is outdated, my app with crashes, the sound doesn't work, or it starts and cuts out after few milliseconds...
func updateDiceImages() {
// This is where the actual sound is attempted to play,
it should be triggered every time the button is pressed. Rest of the app works fine.
var soundEffect: AVAudioPlayer?
let path = Bundle.main.path(forResource: "rollingDice.wav", ofType:nil)!
let url = URL(fileURLWithPath: path)
do {
soundEffect = try AVAudioPlayer(contentsOf: url)
soundEffect?.play()
} catch {
print(error)
}
...
I am just not sure what I am doing wrong, tried tons of variations.
I am completely open to giving any information that I can provide. I didn't paste the whole code so it doesn't bother or swamp the people that try to read it.
Thank you so much for the help! =)
This is a taken piece of code from one of my applications, it works completely.
import AVFoundation
final class AudioManager {
private init() { }
static let shared = AudioManager()
private var player: AVAudioPlayer?
enum SoundType {
case incomingCall
}
open func playSound(_ type: SoundType) {
switch type {
case .incomingCall:
playIncomingCall()
}
}
// MARK: - Sounds
private func playIncomingCall() {
guard let url = Bundle.main.url(forResource: "incoming", withExtension: "wav") else { return }
playSound(url)
}
private func playSound(_ url: URL) {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryAmbient)
try AVAudioSession.sharedInstance().setActive(true)
player = try AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileType.wav.rawValue)
guard let player = player else { return }
player.numberOfLoops = -1 // infinity loop
player.prepareToPlay()
player.play()
debugPrint("playSound play sound")
} catch {
debugPrint(error.localizedDescription)
}
}
open func stop() {
player?.stop()
}
}

Swift Sound Effects // Why Won't My Code Work?

I have been trying to make a game to teach myself swift, and cant seem to get this code to work. I am extremely new to this, and can't seem to find out why it won't work... XCode doesn't flag any problems, build sucseed, and debugger even prints "Got to Stage 1 & Got to Stage 2... anything help?
I Imported AVFoundation..
class GAME {
class func SuperStartGame(playerwhowon1: SKSpriteNode) {
var player = AVAudioPlayer()
func PlaySound() {
guard let URL = Bundle.main.url(forResource: "PowerUp", withExtension: "mp3")
else {
print("Didn't Find URL")
return
}
do {
print("Got to Stage 1")
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
try AVAudioSession.sharedInstance().setActive(true)
player = try AVAudioPlayer(contentsOf: URL, fileTypeHint: "mp3")
player.prepareToPlay()
player.play()
print("Got to Stage 2")
} catch let error as NSError {
print("error: \(error.localizedDescription)")
}
RoundNumber += 1
Round.text = "Round \(RoundNumber)"
if playerwhowon1 == Mine {
MyScore.run(addscoreM) {
PlaySound()
Round.run(NewRoundForRound) {
...
Code keeps going.. thats the only part that is relevant to the sound. I added the sound file to Xcode, and made sure it was added tot he project target... it is in my main bundle.
Make sure that your device isn't muted
mp3 file is copied to bundle
Example VC playing sound:
class ViewController: UIViewController {
var game = Game()
#IBAction func playAction(_ sender: UIButton) {
game.playSound()
}
}
class Game {
var player: AVAudioPlayer?
func playSound() {
guard let URL = Bundle.main.url(forResource: "SampleAudio", withExtension: "mp3") else {
print("Didn't Find URL")
return
}
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
try AVAudioSession.sharedInstance().setActive(true)
player = try AVAudioPlayer(contentsOf: URL, fileTypeHint: "mp3")
player?.prepareToPlay()
player?.play()
} catch let error as NSError {
print("\(error.localizedDescription)")
}
}
}

Swift - Press Button (completion handler issue)

I want a user to press a button, it changes background color (to yellow), a WAV is played and on completion of the WAV the button reverts to its original color (to red). So have a completion handler around the sound. Have tried various combinations of the code below but the WAV plays and the button doesn't appear to change color.
Is this the wrong approach or am I doing something wrong? Don't want to have to put completion handlers around the color changes as that, I presume, is overkill.
Many thanks.
typealias CompletionHandler = (success:Bool) -> Void
#IBAction func fireButton(sender: AnyObject) {
playLaser( { (success)-> Void in
if success {
self.shots -= 1
self.labelShotsLeft.text = String(self.shots)
} else {
}
})
}
func playLaser(completionHandler: CompletionHandler) {
fireButton.layer.backgroundColor = UIColor.yellowColor().CGColor
let url = NSBundle.mainBundle().URLForResource("laser", withExtension: "wav")!
do {
player = try AVAudioPlayer(contentsOfURL: url)
guard let player = player else { return }
player.prepareToPlay()
player.play()
} catch let error as NSError {
print(error.description)
}
self.fireButton.layer.backgroundColor = UIColor.redColor().CGColor
completionHandler(success: true)
}
To detect AVAudioPlayer finish playing, you need to use AVAudioPlayerDelegate.
You may need to write something like this:
func playLaser(completionHandler: CompletionHandler) {
fireButton.layer.backgroundColor = UIColor.yellowColor().CGColor
let url = NSBundle.mainBundle().URLForResource("laser", withExtension: "wav")!
do {
player = try AVAudioPlayer(contentsOfURL: url)
guard let player = player else { return }
player.delegate = self //<- Sorry, this was missing in my first post
player.play()
} catch let error as NSError {
print(error.description)
}
audioPlayerCompletionHandler = completionHandler
}
var audioPlayerCompletionHandler: CompletionHandler?
func audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool) {
self.fireButton.layer.backgroundColor = UIColor.redColor().CGColor
audioPlayerCompletionHandler?(success: true)
}
(You need to add conformance to AVAudioPlayerDelegate to your ViewController's declaration header.)
Code does not magically pause and wait just because you say play.play() — that would be horrible! Thus, your so-called completion handler is not a completion handler at all. It runs immediately — that is, as soon you start playing. Your code does nothing about obtaining information as to when the audio player has finished playing.
For that, you need to configure a delegate and receive the delegate message that audio player emits when it finishes playing.
This is one of those questions which is a little more subtle than meets the eye. I tried putting three completion handlers around each task: change colour to yellow, play sound, change colour back to red. The code was being executed in the correct sequence as I NSLogged it but the button never changed colour due to screen updating controls. Here is the code that works that I hope other readers might find useful:
Swift 2.0
#IBAction func fireButton(sender: AnyObject) {
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)) {
dispatch_sync(dispatch_get_main_queue()) {
self.fireButton.layer.backgroundColor = UIColor.yellowColor().CGColor
}
self.playLaser( { (success)-> Void in
if success {
self.shots -= 1
} else {
}
})
dispatch_sync(dispatch_get_main_queue()) {
self.labelShotsLeft.text = String(self.shots)
self.fireButton.layer.backgroundColor = UIColor.redColor().CGColor
}
}
}
func playLaser(completion: (success: Bool) -> ()) {
let url = NSBundle.mainBundle().URLForResource("laser", withExtension: "wav")!
do {
player = try AVAudioPlayer(contentsOfURL: url)
guard let player = player else { return }
player.play()
completion(success: true)
} catch let error as NSError {
completion(success: false)
}
}
Swift 3.0
#IBAction func fireButton(_ sender: AnyObject) {
let fireQueue = DispatchQueue(label: "queueFirebutton")
fireQueue.async {
DispatchQueue.main.sync {
self.fireButtonDisabled()
}
DispatchQueue.main.sync {
self.playLaser()
self.shots -= 1
if self.shots <= 0 {
self.shots = 0
}
}
DispatchQueue.main.sync {
if self.shots < 0 { self.shots = 0}
self.labelShotsLeft.text = String(self.shots)
sleep(1)
self.fireButtonEnabled()
}
}
}
func playLaser() {
let url = Bundle.main.url(forResource: "laser", withExtension: "wav")!
do {
player = try AVAudioPlayer(contentsOf: url)
guard let player = player else { return }
player.play()
} catch {
}
}