Here’s the problem I have. I have an app that loads sounds from the web and plays it afterward. At the same time, the app has the feature to record voice and play it. I use two different View Controllers for these two features. For these tasks, I’ve created a singleton.
When I launch the app and go directly to the Record View Controller first - everything works fine (I can record the sound, play it, then I can go to my second View Controller with my sounds and can play it with no problems too).
However, if I launch the app and play the sounds FIRST (use my second view controller) and after this go to Record View Controller and try to record something I get this crash with the message : Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: '[[busArray objectAtIndexedSubscript:(NSUInteger)element] setFormat:format error:&nsErr]: returned false, error Error Domain=NSOSStatusErrorDomain Code=-10865 "(null)"'
This crash happens when I taped recordButton. It happens on line - try recorder.record():
func startRecord() {
if AKSettings.headPhonesPlugged {
micBooster.gain = 1
}
micBooster.gain = 0
do {
try recorder.record()
} catch {
print("Can't record because: \(error)")
}
}
This crash happens if I set category .playAndRecord
try audioSession.setCategory(.playAndRecord, mode: .default, options: .mixWithOthers)
My singleton class:
class AudioKitSingleton {
var mic: AKMicrophone!
var micMixer: AKMixer!
var recorder: AKNodeRecorder!
var tape: AKAudioFile!
var player: AKPlayer!
var micBooster: AKBooster!
var mainMixer: AKMixer!
var url: URL?
var name: String?
var artist: String?
var uploader: String?
var performer: String?
var load: Bool = false
var isPlay: Bool = false
var categoryID: Int? = nil
static let shared = AudioKitSingleton()
func initMic() {
stopAudioKitEngine()
AKAudioFile.cleanTempDirectory()
AKSettings.bufferLength = .medium
AKSettings.defaultToSpeaker = true
setupRecordSession()
mic = AKMicrophone()
micMixer = AKMixer(mic)
micBooster = AKBooster(micMixer)
micBooster.gain = 0
recorder = try? AKNodeRecorder(node: micMixer)
if let file = recorder.audioFile {
player = AKPlayer(audioFile: file)
}
player.isLooping = false
mainMixer = AKMixer(player, micBooster)
AudioKit.output = mainMixer
startAudioKitEngine()
}
func deinitMic() {
stopAudioKitEngine()
mic = nil
micMixer = nil
recorder = nil
tape = nil
player = nil
micBooster = nil
mainMixer = nil
}
func startRecord() {
if AKSettings.headPhonesPlugged {
micBooster.gain = 1
}
micBooster.gain = 0
do {
try recorder.record()
} catch {
print("Can't record because: \(error)")
}
}
func stopRecord() {
micBooster.gain = 0
tape = recorder.audioFile!
player.load(audioFile: tape)
player.prepare()
if let _ = player.audioFile?.duration {
recorder.stop()
}
}
func resetRecorder() {
do {
try recorder.reset()
} catch {
print("Can't reset recorder because: \(error)")
}
}
func playerPlayRecord() {
player.play()
}
func playerStopRecord() {
player.stop()
}
func setupRecordSession() {
do {
try audioSession.setCategory(.record, mode: .default, options: .mixWithOthers)
} catch {
print(error)
}
}
func startAudioKitEngine() {
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start because: \(error)")
}
}
func stopAudioKitEngine() {
if AudioKit.engine.isRunning {
do {
try AudioKit.stop()
} catch {
AKLog("AudioKit did not start because: \(error)")
}
}
}
func setupPlayer(url: URL) {
if AudioKit.engine.isRunning {
stopAudioKitEngine()
}
player = try AKPlayer(url: url)
AudioKit.output = player
startAudioKitEngine()
}
func setupPlayer(mixloop: AVAudioFile) {
if AudioKit.engine.isRunning {
stopAudioKitEngine()
}
player = try AKPlayer(audioFile: mixloop)
AudioKit.output = player
startAudioKitEngine()
}
func play() {
try player?.play()
}
func resume() {
try player?.resume()
}
func stop() {
player?.stop()
}
func pause() {
player?.pause()
}
func remove() {
if AudioKit.engine.isRunning {
try? AudioKit.stop()
player?.stop()
player = nil
recorder = nil
AudioKit.output = nil
url = nil
name = nil
artist = nil
uploader = nil
load = false
isPlay = false
}
}
}
This crash happens on all iPhones except iPhone 5s, iOS12. Need help.
I'm pretty sure this will be solved in today's AudioKit 4.5.2 release, uploading now.
Related
I have a function that downloads mp3 file from URL, passes it to AVAudioPlayer and then plays it in PlayerView. I want to implement a feature. When a mp3 will be downloaded, I want to be cached in the app files so If I open it later It wouldn't be downloaded. I saw tutorials of how to do this with Images, but not with mp3. How can this be created?
// Audio Manager itself
import Foundation
import AVFoundation
import AVFAudio
final class AudioManager: ObservableObject {
// static let shared = AudioManager()
var player: AVAudioPlayer?
#Published private(set) var isPlaying: Bool = false {
didSet {
print(isPlaying, "isPlaying")
}
}
func startPlayer(track: String) {
guard let fileURL = URL(string: track) else { return }
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
let soundData = try Data(contentsOf: fileURL)
self.player = try AVAudioPlayer(data: soundData)
guard let player = player else { return }
player.prepareToPlay()
player.play()
isPlaying = true
}
catch {
print(error)
}
}
func playPause() {
guard let player = player else {
print("Audio player not found")
return
}
if player.isPlaying {
player.pause()
isPlaying = false
} else {
player.play()
isPlaying = true
}
}
func stop() {
guard let player = player else {
print("Audio player not found")
return
}
if player.isPlaying {
player.stop()
isPlaying = false
}
}
}
// Main thing in my PlayerView. Passes the track to the audioManager
.onAppear {
// AudioManager.shared.startPlayer(track: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3")
DispatchQueue.main.async {
audioManager.startPlayer(track: track ?? "")
}
}
A simple way to do this would just be to write the Data that you download straight to a file. The next time you try to play that track, check if a file for it exists and load that local file instead.
Here's a (fairly naive) example:
final class AudioManager: ObservableObject {
// static let shared = AudioManager()
var player: AVAudioPlayer?
#Published private(set) var isDownloading = false
#Published private(set) var isPlaying: Bool = false
// MainActor so it always runs on the main queue
#MainActor func startPlayer(track: String) async {
guard let url = URL(string: track) else { return }
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
let songName = url.lastPathComponent
var soundData: Data
let tracksFolderUrl = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).last!.appendingPathComponent("tracks")
let trackUrl = tracksFolderUrl.appendingPathComponent(songName)
if FileManager.default.fileExists(atPath: trackUrl.path) {
// Load local data if it exists
print("Loading data from \(trackUrl)")
soundData = try Data(contentsOf: trackUrl)
} else {
//… otherwise load from network
isDownloading = true
print("Downloading data from \(url)")
(soundData, _) = try await URLSession.shared.data(from: url)
//… then save to disk
try FileManager.default.createDirectory(at: tracksFolderUrl, withIntermediateDirectories: true)
print("Saving data to \(trackUrl)")
try soundData.write(to: trackUrl)
isDownloading = false
}
self.player = try AVAudioPlayer(data: soundData)
guard let player = player else { return }
player.prepareToPlay()
player.play()
isPlaying = true
}
catch {
print(error)
}
}
}
struct ContentView: View {
#StateObject var audioManager = AudioManager()
var body: some View {
ZStack {
if audioManager.isDownloading {
VStack {
Text("Downloading")
ProgressView()
}
} else {
Text("Playing")
}
}
.task {
await audioManager.startPlayer(track: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3")
}
}
}
Note that I've made the startPlayer func async so it doesn't block the main thread and used a different method to download the data
try await URLSession.shared.data(from: url)
After configuring my video layer as such:
public class VideoLayerView: UIView {
override public class var layerClass: Swift.AnyClass {
return AVCaptureVideoPreviewLayer.self
}
public override func awakeFromNib() {
super.awakeFromNib()
self.clipsToBounds = true
}
public func configureCaptureLayer(session: AVCaptureSession?) {
guard let captureLayer = self.layer as? AVCaptureVideoPreviewLayer else { return }
captureLayer.session = session
captureLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
}
}
And setting up the session:
func initializeCamera() {
guard !captureSession.isRunning else {
print("Capture session already running")
return
}
guard Permissions.shared.isCameraAuthorized else {
print("Requesting Camera Permission")
Permissions.shared.requestCamera { _ in
DispatchQueue.main.async {
self.initializeCamera()
}
}
return
}
captureSession.beginConfiguration()
captureSession.sessionPreset = .photo
if let captureDevice = self.captureDevice {
if let captureDeviceInput = try? AVCaptureDeviceInput(device: captureDevice), captureSession.canAddInput(captureDeviceInput) {
captureSession.addInput(captureDeviceInput)
} else {
print("Failed to add capture device input.")
}
}
photoOutput.maxPhotoQualityPrioritization = .quality
if captureSession.canAddOutput(photoOutput) {
photoOutput.isHighResolutionCaptureEnabled = true
captureSession.addOutput(photoOutput)
} else {
print("Failed to add photo output")
}
captureSession.commitConfiguration()
if let connection = photoOutput.connection(with: .video) {
connection.preferredVideoStabilizationMode = .standard
}
videolayerView.configureCaptureLayer(session: captureSession)
sessionQueue.async { [weak self] in
self?.captureSession.startRunning()
}
}
the preview layer displays black on my iPhone X. However it continues to function on other test devices. Removing:
if captureSession.canAddOutput(photoOutput) {
photoOutput.isHighResolutionCaptureEnabled = true
captureSession.addOutput(photoOutput)
} else {
print("Failed to add photo output")
}
resolves the issue but this code is necessary to capture photos. Why is this not working?
This is due to camera state bug in iOS from code written prior (and since removed) that incorrectly configured the camera. Restart your device and the camera will resume functioning.
I used many codes that was for record an play the voice, but most of them are not in swift3 and they don't work in my app.
This code works, but I want to create a separate class from the viewcontroller that do recording an playing voices. Also the mentioned github code is complex an I'm searching for a simplified code.
Update:
After recording, when I check existence of the recorded file, the file doesn't exist, and it raises EXC_BAD_ACCESS error on appDelegate.
What's wrong?
Any suggestions would be appreciated.
Try to record audio by wirting line
let isRec = AudioManager.shared.record(fileName: "rec")
if isRec returned true then recording is happening else not.
To finish recording use : let recordedURL = AudioManager.shared.finishRecording()
To play recorded file send above url to setupUpPlayer() function in manager class
Not to forget to use extension code snippets give below the code snippet which are delegate functions of AVAudioRecorder and AVAudioPlayer
import Foundation
import AVFoundation
class AudioManager: NSObject {
static let shared = AudioManager()
var recordingSession: AVAudioSession?
var recorder: AVAudioRecorder?
var meterTimer: Timer?
var recorderApc0: Float = 0
var recorderPeak0: Float = 0
//PLayer
var player: AVAudioPlayer?
var savedFileURL: URL?
func setup() {
recordingSession = AVAudioSession.sharedInstance()
do {
try recordingSession?.setCategory(AVAudioSessionCategoryPlayAndRecord, with: .defaultToSpeaker)
try recordingSession?.setActive(true)
recordingSession?.requestRecordPermission({ (allowed) in
if allowed {
print("Mic Authorised")
} else {
print("Mic not Authorised")
}
})
} catch {
print("Failed to set Category", error.localizedDescription)
}
}
func record(fileName: String) -> Bool {
setup()
let url = getUserPath().appendingPathComponent(fileName + ".m4a")
let audioURL = URL.init(fileURLWithPath: url.path)
let recordSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue,
AVNumberOfChannelsKey: 2,
AVSampleRateKey: 44100.0]
do {
recorder = try AVAudioRecorder.init(url: audioURL, settings: recordSettings)
recorder?.delegate = self
recorder?.isMeteringEnabled = true
recorder?.prepareToRecord()
recorder?.record()
self.meterTimer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true, block: { (timer: Timer) in
//Update Recording Meter Values so we can track voice loudness
if let recorder = self.recorder {
recorder.updateMeters()
self.recorderApc0 = recorder.averagePower(forChannel: 0)
self.recorderPeak0 = recorder.peakPower(forChannel: 0)
}
})
savedFileURL = url
print("Recording")
return true
} catch {
print("Error Handling", error.localizedDescription)
return false
}
}
func getUserPath() -> URL {
return FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
}
func finishRecording() -> String {
recorder?.stop()
self.meterTimer?.invalidate()
var fileURL: String?
if let url: URL = recorder?.url {
fileURL = String(describing: url)
}
return /fileURL
}
//Player
func setupPlayer(_ url: URL) {
do {
try player = AVAudioPlayer.init(contentsOf: url)
} catch {
print("Error1", error.localizedDescription)
}
player?.prepareToPlay()
player?.play()
player?.volume = 1.0
player?.delegate = self
}
}
//MARK:- Audio Recorder Delegate
extension AudioManager: AVAudioRecorderDelegate {
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
print("AudioManager Finish Recording")
}
func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder, error: Error?) {
print("Encoding Error", /error?.localizedDescription)
}
}
//MARK:- Audio Player Delegates
extension AudioManager: AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer,
successfully flag: Bool) {
player.stop()
print("Finish Playing")
}
func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer,
error: Error?) {
print(/error?.localizedDescription)
}
}
I have app that plays AAC audio stream. Everything works fine, but when I disconnect stream and connect again after one second audio stop playing after half minute. When i don't reconnect i have error after one- two minutes. To reconnect i must stop AVPlayer and start again. I want to reconnect stream or show message immediately after player stops play music. How can I do that? Moreover i have another question: I convert my code to swift 3 and I have problem with one line:
fileprivate var playerItem = AVPlayerItem?()
error: cannot invoke initializer without argument
How i can fix that? Maybe this is the problem?
My Radio Player class:
import Foundation
import AVFoundation
import UIKit
protocol errorMessageDelegate {
func errorMessageChanged(_ newVal: String)
}
protocol sharedInstanceDelegate {
func sharedInstanceChanged(_ newVal: Bool)
}
class RadioPlayer : NSObject {
static let sharedInstance = RadioPlayer()
var instanceDelegate:sharedInstanceDelegate? = nil
var sharedInstanceBool = false {
didSet {
if let delegate = self.instanceDelegate {
delegate.sharedInstanceChanged(self.sharedInstanceBool)
}
}
}
fileprivate var player = AVPlayer(url: URL(string: Globals.radioURL)!)
// fileprivate var playerItem = AVPlayerItem?()
fileprivate var isPlaying = false
var errorDelegate:errorMessageDelegate? = nil
var errorMessage = "" {
didSet {
if let delegate = self.errorDelegate {
delegate.errorMessageChanged(self.errorMessage)
}
}
}
override init() {
super.init()
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(url: URL(string: Globals.radioURL)!, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronously(forKeys: [statusKey], completionHandler: {
var error: NSError? = nil
DispatchQueue.main.async(execute: {
let status: AVKeyValueStatus = asset.statusOfValue(forKey: statusKey, error: &error)
if status == AVKeyValueStatus.loaded{
let playerItem = AVPlayerItem(asset: asset)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
NotificationCenter.default.addObserver(
forName: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime,
object: nil,
queue: nil,
using: { notification in
print("Status: Failed to continue")
self.errorMessage = NSLocalizedString("STREAM_INTERUPT", comment:"Stream was interrupted")
})
print("Initializing new player")
}
func resetPlayer() {
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(url: URL(string: Globals.radioURL)!, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronously(forKeys: [statusKey], completionHandler: {
var error: NSError? = nil
DispatchQueue.main.async(execute: {
let status: AVKeyValueStatus = asset.statusOfValue(forKey: statusKey, error: &error)
if status == AVKeyValueStatus.loaded{
let playerItem = AVPlayerItem(asset: asset)
// playerItem.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.New, context: &ItemStatusContext)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
}
func bufferFull() -> Bool {
return bufferAvailableSeconds() > 45.0
}
func bufferAvailableSeconds() -> TimeInterval {
// Check if there is a player instance
if ((player.currentItem) != nil) {
// Get current AVPlayerItem
let item: AVPlayerItem = player.currentItem!
if (item.status == AVPlayerItemStatus.readyToPlay) {
let timeRangeArray: NSArray = item.loadedTimeRanges as NSArray
if timeRangeArray.count < 1 { return(CMTimeGetSeconds(kCMTimeInvalid)) }
let aTimeRange: CMTimeRange = (timeRangeArray.object(at: 0) as AnyObject).timeRangeValue
// let startTime = CMTimeGetSeconds(aTimeRange.end)
let loadedDuration = CMTimeGetSeconds(aTimeRange.duration)
return (TimeInterval)(loadedDuration);
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
func play() {
player.play()
isPlaying = true
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func pause() {
player.pause()
isPlaying = false
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func currentlyPlaying() -> Bool {
return isPlaying
}
}
I will be grateful for help ;)
For the second issue fileprivate var playerItem = AVPlayerItem?()
write this and it should work fileprivate var playerItem: AVPlayerItem?.
For the first issue
when I disconnect stream and connect again after one second audio stop
playing after half minute. When i don't reconnect i have error after
one- two minutes. To reconnect i must stop AVPlayer and start again. I
want to reconnect stream or show message immediately after player
stops play music. How can I do that?
I don't get what's wrong ? You pause the player by pressing the button then you press the button again and after one - two minutes it stops by itself ?
I have tested the same class today and it works just fine, even after the connection is lost to the server (when the connection resumes you can click the play button and it will play)
I'll leave you my code here, give it a try
import Foundation
import AVFoundation
import UIKit
protocol errorMessageDelegate {
func errorMessageChanged(newVal: String)
}
protocol sharedInstanceDelegate {
func sharedInstanceChanged(newVal: Bool)
}
class RadioPlayer : NSObject {
static let sharedInstance = RadioPlayer()
var instanceDelegate:sharedInstanceDelegate? = nil
var sharedInstanceBool = false {
didSet {
if let delegate = self.instanceDelegate {
delegate.sharedInstanceChanged(newVal: self.sharedInstanceBool)
}
}
}
private var player = AVPlayer(url: NSURL(string: "<# YOUR STREAM HERE #>")! as URL)
private var playerItem: AVPlayerItem?
private var isPlaying = false
var errorDelegate:errorMessageDelegate? = nil
var errorMessage = "" {
didSet {
if let delegate = self.errorDelegate {
delegate.errorMessageChanged(newVal: self.errorMessage)
}
}
}
override init() {
super.init()
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(url: NSURL(string: "<# YOUR STREAM HERE #>")! as URL, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronously(forKeys: [statusKey], completionHandler: {
var error: NSError? = nil
DispatchQueue.main.async(execute: {
let status: AVKeyValueStatus = asset.statusOfValue(forKey: statusKey, error: &error)
if status == AVKeyValueStatus.loaded{
let playerItem = AVPlayerItem(asset: asset)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
NotificationCenter.default.addObserver(
forName: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime,
object: nil,
queue: nil,
using: { notification in
print("Status: Failed to continue")
self.errorMessage = "Stream was interrupted"
})
print("Initializing new player")
}
func resetPlayer() {
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(url: NSURL(string: "<# YOUR STREAM HERE #>")! as URL, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronously(forKeys: [statusKey], completionHandler: {
var error: NSError? = nil
DispatchQueue.main.async(execute: {
let status: AVKeyValueStatus = asset.statusOfValue(forKey: statusKey, error: &error)
if status == AVKeyValueStatus.loaded{
let playerItem = AVPlayerItem(asset: asset)
//playerItem.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.New, context: &ItemStatusContext)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
}
func bufferFull() -> Bool {
return bufferAvailableSeconds() > 45.0
}
func bufferAvailableSeconds() -> TimeInterval {
// Check if there is a player instance
if ((player.currentItem) != nil) {
// Get current AVPlayerItem
let item: AVPlayerItem = player.currentItem!
if (item.status == AVPlayerItemStatus.readyToPlay) {
let timeRangeArray: NSArray = item.loadedTimeRanges as NSArray
if timeRangeArray.count < 1 { return(CMTimeGetSeconds(kCMTimeInvalid)) }
let aTimeRange: CMTimeRange = (timeRangeArray.object(at: 0) as AnyObject).timeRangeValue
//let startTime = CMTimeGetSeconds(aTimeRange.end)
let loadedDuration = CMTimeGetSeconds(aTimeRange.duration)
return (TimeInterval)(loadedDuration)
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
func play() {
player.play()
isPlaying = true
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func pause() {
player.pause()
isPlaying = false
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func currentlyPlaying() -> Bool {
return isPlaying
}
}
I want a user to press a button, it changes background color (to yellow), a WAV is played and on completion of the WAV the button reverts to its original color (to red). So have a completion handler around the sound. Have tried various combinations of the code below but the WAV plays and the button doesn't appear to change color.
Is this the wrong approach or am I doing something wrong? Don't want to have to put completion handlers around the color changes as that, I presume, is overkill.
Many thanks.
typealias CompletionHandler = (success:Bool) -> Void
#IBAction func fireButton(sender: AnyObject) {
playLaser( { (success)-> Void in
if success {
self.shots -= 1
self.labelShotsLeft.text = String(self.shots)
} else {
}
})
}
func playLaser(completionHandler: CompletionHandler) {
fireButton.layer.backgroundColor = UIColor.yellowColor().CGColor
let url = NSBundle.mainBundle().URLForResource("laser", withExtension: "wav")!
do {
player = try AVAudioPlayer(contentsOfURL: url)
guard let player = player else { return }
player.prepareToPlay()
player.play()
} catch let error as NSError {
print(error.description)
}
self.fireButton.layer.backgroundColor = UIColor.redColor().CGColor
completionHandler(success: true)
}
To detect AVAudioPlayer finish playing, you need to use AVAudioPlayerDelegate.
You may need to write something like this:
func playLaser(completionHandler: CompletionHandler) {
fireButton.layer.backgroundColor = UIColor.yellowColor().CGColor
let url = NSBundle.mainBundle().URLForResource("laser", withExtension: "wav")!
do {
player = try AVAudioPlayer(contentsOfURL: url)
guard let player = player else { return }
player.delegate = self //<- Sorry, this was missing in my first post
player.play()
} catch let error as NSError {
print(error.description)
}
audioPlayerCompletionHandler = completionHandler
}
var audioPlayerCompletionHandler: CompletionHandler?
func audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool) {
self.fireButton.layer.backgroundColor = UIColor.redColor().CGColor
audioPlayerCompletionHandler?(success: true)
}
(You need to add conformance to AVAudioPlayerDelegate to your ViewController's declaration header.)
Code does not magically pause and wait just because you say play.play() — that would be horrible! Thus, your so-called completion handler is not a completion handler at all. It runs immediately — that is, as soon you start playing. Your code does nothing about obtaining information as to when the audio player has finished playing.
For that, you need to configure a delegate and receive the delegate message that audio player emits when it finishes playing.
This is one of those questions which is a little more subtle than meets the eye. I tried putting three completion handlers around each task: change colour to yellow, play sound, change colour back to red. The code was being executed in the correct sequence as I NSLogged it but the button never changed colour due to screen updating controls. Here is the code that works that I hope other readers might find useful:
Swift 2.0
#IBAction func fireButton(sender: AnyObject) {
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)) {
dispatch_sync(dispatch_get_main_queue()) {
self.fireButton.layer.backgroundColor = UIColor.yellowColor().CGColor
}
self.playLaser( { (success)-> Void in
if success {
self.shots -= 1
} else {
}
})
dispatch_sync(dispatch_get_main_queue()) {
self.labelShotsLeft.text = String(self.shots)
self.fireButton.layer.backgroundColor = UIColor.redColor().CGColor
}
}
}
func playLaser(completion: (success: Bool) -> ()) {
let url = NSBundle.mainBundle().URLForResource("laser", withExtension: "wav")!
do {
player = try AVAudioPlayer(contentsOfURL: url)
guard let player = player else { return }
player.play()
completion(success: true)
} catch let error as NSError {
completion(success: false)
}
}
Swift 3.0
#IBAction func fireButton(_ sender: AnyObject) {
let fireQueue = DispatchQueue(label: "queueFirebutton")
fireQueue.async {
DispatchQueue.main.sync {
self.fireButtonDisabled()
}
DispatchQueue.main.sync {
self.playLaser()
self.shots -= 1
if self.shots <= 0 {
self.shots = 0
}
}
DispatchQueue.main.sync {
if self.shots < 0 { self.shots = 0}
self.labelShotsLeft.text = String(self.shots)
sleep(1)
self.fireButtonEnabled()
}
}
}
func playLaser() {
let url = Bundle.main.url(forResource: "laser", withExtension: "wav")!
do {
player = try AVAudioPlayer(contentsOf: url)
guard let player = player else { return }
player.play()
} catch {
}
}