Is it possible to maintain playback in AudioKit after a route change? - swift

Is it possible to continue playing sound after a route change (such as switching speakers or plugging in/unplugging headphones) without reloading the sample again in Sampler?
This is the only approach I have found that works:
#objc dynamic private func audioRouteChangeListener(notification:NSNotification) {
let audioRouteChangeReason = notification.userInfo![AVAudioSessionRouteChangeReasonKey] as! UInt
switch audioRouteChangeReason {
case AVAudioSession.RouteChangeReason.newDeviceAvailable.rawValue:
print("headphone plugged in")
DispatchQueue.global().async { [weak self] in
// Note the reload here
self?.conductor?.loadSamples(byIndex: 2)
}
case AVAudioSession.RouteChangeReason.oldDeviceUnavailable.rawValue:
print("headphone pulled out")
DispatchQueue.global().async { [weak self] in
// Note the reload here
self?.conductor?.loadSamples(byIndex: 2)
}
default:
break
}
}
And in the Conductor class, where I load .wv samples in .sfz:
func loadSamples(byIndex: Int) {
sampler1.unloadAllSamples()
if byIndex < 0 || byIndex > 3 { return }
let info = ProcessInfo.processInfo
let begin = info.systemUptime
let sfzFiles = [ "City Piano.sfz", "ShortPiano.sfz", "LongPiano.sfz" ]
sampler1.loadSfzWithEmbeddedSpacesInSampleNames(folderPath: Bundle.main.bundleURL.appendingPathComponent("Sounds/sfz").path,
sfzFileName: sfzFiles[byIndex])
let elapsedTime = info.systemUptime - begin
AKLog("Time to load samples \(elapsedTime) seconds")
}

Related

How to exit from `RunLoop`

I'm subclassing InputStream from iOS Foundation SDK for my needs. I need to implement functionality that worker thread can sleep until data appear in the stream. The test I'm using to cover the functionality is below:
func testStreamWithRunLoop() {
let inputStream = BLEInputStream() // custom input stream subclass
inputStream.delegate = self
let len = Int.random(in: 0..<100)
let randomData = randData(length: len) // random data generation
let tenSeconds = Double(10)
let oneSecond = TimeInterval(1)
runOnBackgroundQueueAfter(oneSecond) {
inputStream.accept(randomData) // input stream receives the data
}
let dateInFuture = Date(timeIntervalSinceNow: tenSeconds) // time in 10 sec
inputStream.schedule(in: .current, forMode: RunLoop.Mode.default) //
RunLoop.current.run(until: dateInFuture) // wait for data appear in input stream
XCTAssertTrue(dateInFuture.timeIntervalSinceNow > 0, "Timeout. RunLoop didn't exit in 1 sec. ")
}
Here the overriden methods of InputStream
public override func schedule(in aRunLoop: RunLoop, forMode mode: RunLoop.Mode) {
self.runLoop = aRunLoop // save RunLoop object
var context = CFRunLoopSourceContext() // make context
self.runLoopSource = CFRunLoopSourceCreate(nil, 0, &context) // make source
let cfloopMode: CFRunLoopMode = CFRunLoopMode(mode as CFString)
CFRunLoopAddSource(aRunLoop.getCFRunLoop(), self.runLoopSource!, cfloopMode)
}
public func accept(_ data: Data) {
guard data.count > 0 else { return }
self.data += data
delegate?.stream?(self, handle: .hasBytesAvailable)
if let runLoopSource {
CFRunLoopSourceSignal(runLoopSource)
}
if let runLoop {
CFRunLoopWakeUp(runLoop.getCFRunLoop())
}
}
But calling CFRunLoopSourceSignal(runLoopSource) and CFRunLoopWakeUp(runLoop.getCFRunLoop()) not get exit from runLoop.
Does anybody know where I'm mistaking ?
Thanks all!
PS: Here the Xcode project on GitHub
Finally I figured out some issues with my code.
First of all I need to remove CFRunLoopSource object from run loop CFRunLoopRemoveSource(). In according with documentation if RunLoop has no input sources then it exits immediately.
public func accept(_ data: Data) {
guard data.count > 0 else { return }
self.data += data
delegate?.stream?(self, handle: .hasBytesAvailable)
if let runLoopSource, let runLoop, let runLoopMode {
CFRunLoopRemoveSource(runLoop.getCFRunLoop(), runLoopSource, runLoopMode)
}
if let runLoop {
CFRunLoopWakeUp(runLoop.getCFRunLoop())
}
}
Second issue is related that I used XCTest environment and it's RunLoop didn't exit for some reasons (Ask the community for help).
I used real application environment and created Thread subclass to check my implementation. The thread by default has run loop without any input sources attached to it. I added input stream to it. And using main thread emulated that stream received data.
Here the Custom Thread implement that runs and sleep until it receive signal from BLEInputStream
class StreamThread: Thread, StreamDelegate {
let stream: BLEInputStream
init(stream: BLEInputStream) {
self.stream = stream
}
override func main() {
stream.delegate = self
stream.schedule(in: .current, forMode: RunLoop.Mode.default)
print("start()")
let tenSeconds = Double(10)
let dateInFuture = Date(timeIntervalSinceNow: tenSeconds)
RunLoop.current.run(until: dateInFuture)
print("after 10 seconds")
}
override func start() {
super.start()
}
func stream(_ aStream: Stream, handle eventCode: Stream.Event) {
if eventCode == .errorOccurred {
print("eventCode == .errorOccurred")
}
else if eventCode == .hasBytesAvailable {
print("eventCode == .hasBytesAvailable")
}
}
}
Here the some UIViewController methods which runs from main thread
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
let baseDate = Date.now
let thread = StreamThread(stream: stream, baseDate: baseDate)
thread.start()
print("main thread pauses at \(Date.now.timeIntervalSince(baseDate))")
Thread.sleep(forTimeInterval: 2)
print("stream accepts Data \(Date.now.timeIntervalSince(baseDate))")
stream.accept(Data([1,2,3]))
}
Here the result:
Everything works as expected - the thread sleeps until input stream receive data. No processor resources consuming.
Although it's allowed to subclass InputStream, there is no good explanation in the documentation how to correctly implement custom InputStream

AVAudioRecorder stops the record at the end of an AVAudioSession.interruptionNotification

I am implementing a recorder in my application using an AVAudioRecorder, but I’m encountering a strange behavior when an interruption is triggered by the system.
Indeed, when an interrupt is caught thanks to the AVAudioSession.interruptionNotification, I call the following function:
#objc private func handleInterruption(notification: Foundation.Notification) {
guard let interruptionTypeValue = notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? UInt,
let interruptionType = AVAudioSession.InterruptionType(rawValue: interruptionTypeValue)
else { return }
switch interruptionType {
case .began:
pause()
case .ended:
guard let optionsValue = notification.userInfo?[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
resume()
} else {
// TODO:
}
#unknown default:
break
}
}
At the beginning of the interruption, I pause the recorder and at the end of it resume the recorder if needed.
However when I resume the recorder after the interruption, it restart the record, deleting the file previously created since I receive a callback from audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool).
How to get around this problem?
Thanks

Timing issues: Metronome using AVAudioEngine scheduleBuffer's completion handler

I want to build a simple metronome app using AVAudioEngine with these features:
Solid timing (I know, I know, I should be using Audio Units, but I'm still struggling with Core Audio stuff / Obj-C wrappers etc.)
Two different sounds on the "1" and on beats "2"/"3"/"4" of the bar.
Some kind of visual feedback (at least a display of the current beat) which needs to be in sync with audio.
So I have created two short click sounds (26ms / 1150 samples # 16 bit / 44,1 kHz / stereo wav files) and load them into 2 buffers. Their lengths will be set to represent one period.
My UI setup is simple: A button to toggle start / pause and a label to display the current beat (my "counter" variable).
When using scheduleBuffer's loop property the timing is okay, but as I need to have 2 different sounds and a way to sync/update my UI while looping the clicks I cannot use this. I figured out to use the completionHandler instead which the restarts my playClickLoop() function - see my code attach below.
Unfortunately while implementing this I didn't really measure the accuracy of the timing. As it now turns out when setting bpm to 120, it plays the loop at only about 117,5 bpm - quite steadily but still way too slow. When bpm is set to 180, my app plays at about 172,3 bpm.
What's going on here? Is this delay introduced by using the completionHandler? Is there any way to improve the timing? Or is my whole approach wrong?
Thanks in advance!
Alex
import UIKit
import AVFoundation
class ViewController: UIViewController {
private let engine = AVAudioEngine()
private let player = AVAudioPlayerNode()
private let fileName1 = "sound1.wav"
private let fileName2 = "sound2.wav"
private var file1: AVAudioFile! = nil
private var file2: AVAudioFile! = nil
private var buffer1: AVAudioPCMBuffer! = nil
private var buffer2: AVAudioPCMBuffer! = nil
private let sampleRate: Double = 44100
private var bpm: Double = 180.0
private var periodLengthInSamples: Double { 60.0 / bpm * sampleRate }
private var counter: Int = 0
private enum MetronomeState {case run; case stop}
private var state: MetronomeState = .stop
#IBOutlet weak var label: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
//
// MARK: Loading buffer1
//
let path1 = Bundle.main.path(forResource: fileName1, ofType: nil)!
let url1 = URL(fileURLWithPath: path1)
do {file1 = try AVAudioFile(forReading: url1)
buffer1 = AVAudioPCMBuffer(
pcmFormat: file1.processingFormat,
frameCapacity: AVAudioFrameCount(periodLengthInSamples))
try file1.read(into: buffer1!)
buffer1.frameLength = AVAudioFrameCount(periodLengthInSamples)
} catch { print("Error loading buffer1 \(error)") }
//
// MARK: Loading buffer2
//
let path2 = Bundle.main.path(forResource: fileName2, ofType: nil)!
let url2 = URL(fileURLWithPath: path2)
do {file2 = try AVAudioFile(forReading: url2)
buffer2 = AVAudioPCMBuffer(
pcmFormat: file2.processingFormat,
frameCapacity: AVAudioFrameCount(periodLengthInSamples))
try file2.read(into: buffer2!)
buffer2.frameLength = AVAudioFrameCount(periodLengthInSamples)
} catch { print("Error loading buffer2 \(error)") }
//
// MARK: Configure + start engine
//
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: file1.processingFormat)
engine.prepare()
do { try engine.start() } catch { print(error) }
}
//
// MARK: Play / Pause toggle action
//
#IBAction func buttonPresed(_ sender: UIButton) {
sender.isSelected = !sender.isSelected
if player.isPlaying {
state = .stop
} else {
state = .run
try! engine.start()
player.play()
playClickLoop()
}
}
private func playClickLoop() {
//
// MARK: Completion handler
//
let scheduleBufferCompletionHandler = { [unowned self] /*(_: AVAudioPlayerNodeCompletionCallbackType)*/ in
DispatchQueue.main.async {
switch state {
case .run:
self.playClickLoop()
case .stop:
engine.stop()
player.stop()
counter = 0
}
}
}
//
// MARK: Schedule buffer + play
//
if engine.isRunning {
counter += 1; if counter > 4 {counter = 1} // Counting from 1 to 4 only
if counter == 1 {
//
// MARK: Playing sound1 on beat 1
//
player.scheduleBuffer(buffer1,
at: nil,
options: [.interruptsAtLoop],
//completionCallbackType: .dataPlayedBack,
completionHandler: scheduleBufferCompletionHandler)
} else {
//
// MARK: Playing sound2 on beats 2, 3 & 4
//
player.scheduleBuffer(buffer2,
at: nil,
options: [.interruptsAtLoop],
//completionCallbackType: .dataRendered,
completionHandler: scheduleBufferCompletionHandler)
}
//
// MARK: Display current beat on UILabel + to console
//
DispatchQueue.main.async {
self.label.text = String(self.counter)
print(self.counter)
}
}
}
}
As Phil Freihofner suggested above, here's the solution to my own problem:
The most important lesson I learned: The completionHandler callback provided by the scheduleBuffer command is not called early enough to trigger re-scheduling of another buffer while the first one is still playing. This will result in (inaudible) gaps between the sounds and mess up the timing. There must already be another buffer "in reserve", i.e. having been schdeduled before the current one has been scheduled.
Using the completionCallbackType parameter of scheduleBuffer didn't change much considering the time of the completion callback: When setting it to .dataRendered or .dataConsumed the callback was already too late to re-schedule another buffer. Using .dataPlayedback made things only worse :-)
So, to achieve seamless playback (with correct timing!) I simply activated a timer that triggers twice per period. All odd numbered timer events will re-schedule another buffer.
Sometimes the solution is so easy it's embarrassing... But sometimes you have to try almost every wrong approach first to find it ;-)
My complete working solution (including the two sound files and the UI) can be found here on GitHub:
https://github.com/Alexander-Nagel/Metronome-using-AVAudioEngine
import UIKit
import AVFoundation
private let DEBUGGING_OUTPUT = true
class ViewController: UIViewController{
private var engine = AVAudioEngine()
private var player = AVAudioPlayerNode()
private var mixer = AVAudioMixerNode()
private let fileName1 = "sound1.wav"
private let fileName2 = "sound2.wav"
private var file1: AVAudioFile! = nil
private var file2: AVAudioFile! = nil
private var buffer1: AVAudioPCMBuffer! = nil
private var buffer2: AVAudioPCMBuffer! = nil
private let sampleRate: Double = 44100
private var bpm: Double = 133.33
private var periodLengthInSamples: Double {
60.0 / bpm * sampleRate
}
private var timerEventCounter: Int = 1
private var currentBeat: Int = 1
private var timer: Timer! = nil
private enum MetronomeState {case running; case stopped}
private var state: MetronomeState = .stopped
#IBOutlet weak var beatLabel: UILabel!
#IBOutlet weak var bpmLabel: UILabel!
#IBOutlet weak var playPauseButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
bpmLabel.text = "\(bpm) BPM"
setupAudio()
}
private func setupAudio() {
//
// MARK: Loading buffer1
//
let path1 = Bundle.main.path(forResource: fileName1, ofType: nil)!
let url1 = URL(fileURLWithPath: path1)
do {file1 = try AVAudioFile(forReading: url1)
buffer1 = AVAudioPCMBuffer(
pcmFormat: file1.processingFormat,
frameCapacity: AVAudioFrameCount(periodLengthInSamples))
try file1.read(into: buffer1!)
buffer1.frameLength = AVAudioFrameCount(periodLengthInSamples)
} catch { print("Error loading buffer1 \(error)") }
//
// MARK: Loading buffer2
//
let path2 = Bundle.main.path(forResource: fileName2, ofType: nil)!
let url2 = URL(fileURLWithPath: path2)
do {file2 = try AVAudioFile(forReading: url2)
buffer2 = AVAudioPCMBuffer(
pcmFormat: file2.processingFormat,
frameCapacity: AVAudioFrameCount(periodLengthInSamples))
try file2.read(into: buffer2!)
buffer2.frameLength = AVAudioFrameCount(periodLengthInSamples)
} catch { print("Error loading buffer2 \(error)") }
//
// MARK: Configure + start engine
//
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: file1.processingFormat)
engine.prepare()
do { try engine.start() } catch { print(error) }
}
//
// MARK: Play / Pause toggle action
//
#IBAction func buttonPresed(_ sender: UIButton) {
sender.isSelected = !sender.isSelected
if state == .running {
//
// PAUSE: Stop timer and reset counters
//
state = .stopped
timer.invalidate()
timerEventCounter = 1
currentBeat = 1
} else {
//
// START: Pre-load first sound and start timer
//
state = .running
scheduleFirstBuffer()
startTimer()
}
}
private func startTimer() {
if DEBUGGING_OUTPUT {
print("# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # ")
print()
}
//
// Compute interval for 2 events per period and set up timer
//
let timerIntervallInSamples = 0.5 * self.periodLengthInSamples / sampleRate
timer = Timer.scheduledTimer(withTimeInterval: timerIntervallInSamples, repeats: true) { timer in
//
// Only for debugging: Print counter values at start of timer event
//
// Values at begin of timer event
if DEBUGGING_OUTPUT {
print("timerEvent #\(self.timerEventCounter) at \(self.bpm) BPM")
print("Entering \ttimerEventCounter: \(self.timerEventCounter) \tcurrentBeat: \(self.currentBeat) ")
}
//
// Schedule next buffer at 1st, 3rd, 5th & 7th timerEvent
//
var bufferScheduled: String = "" // only needed for debugging / console output
switch self.timerEventCounter {
case 7:
//
// Schedule main sound
//
self.player.scheduleBuffer(self.buffer1, at:nil, options: [], completionHandler: nil)
bufferScheduled = "buffer1"
case 1, 3, 5:
//
// Schedule subdivision sound
//
self.player.scheduleBuffer(self.buffer2, at:nil, options: [], completionHandler: nil)
bufferScheduled = "buffer2"
default:
bufferScheduled = ""
}
//
// Display current beat & increase currentBeat (1...4) at 2nd, 4th, 6th & 8th timerEvent
//
if self.timerEventCounter % 2 == 0 {
DispatchQueue.main.async {
self.beatLabel.text = String(self.currentBeat)
}
self.currentBeat += 1; if self.currentBeat > 4 {self.currentBeat = 1}
}
//
// Increase timerEventCounter, two events per beat.
//
self.timerEventCounter += 1; if self.timerEventCounter > 8 {self.timerEventCounter = 1}
//
// Only for debugging: Print counter values at end of timer event
//
if DEBUGGING_OUTPUT {
print("Exiting \ttimerEventCounter: \(self.timerEventCounter) \tcurrentBeat: \(self.currentBeat) \tscheduling: \(bufferScheduled)")
print()
}
}
}
private func scheduleFirstBuffer() {
player.stop()
//
// pre-load accented main sound (for beat "1") before trigger starts
//
player.scheduleBuffer(buffer1, at: nil, options: [], completionHandler: nil)
player.play()
beatLabel.text = String(currentBeat)
}
}
Thanks so much for your help everyone! This is a wonderful community.
Alex
How accurate is the tool or process which you are using to get your measure?
I can't tell for sure that your files have the correct number of PCM frames as I am not a C programmer. It looks like data from the wav header is included when you load the files. This makes me wonder if maybe there is some latency incurred with the playbacks while the header information is processed repeatedly at the start of each play or loop.
I had good luck building a metronome in Java by using a plan of continuously outputting an endless stream derived from reading PCM frames. Timing is achieved by counting PCM frames and routing in either silence (PCM datapoint = 0) or the click's PCM data, based on the period of the chosen metronome setting and the length of the click in PCM frames.

Continuous listen the user voice and detect end of speech silence in SpeechKit framework

I have working an application where we need to open certain screen based on voice command like if user says "Open Setting" then it should open the setting screen, so far that I have used the SpeechKit framework but I am not able to detect the end of speech silence. Like how Siri does it. I want to detect if the user has ended his sentence/phrase.
Please find the below code for same where I have integrate the SpeechKit framework in two ways.
A) Via closure(recognitionTask(with request: SFSpeechRecognitionRequest, resultHandler: #escaping (SFSpeechRecognitionResult?, Error?) -> Swift.Void) -> SFSpeechRecognitionTask)
let audioEngine = AVAudioEngine()
let speechRecognizer = SFSpeechRecognizer()
let request = SFSpeechAudioBufferRecognitionRequest()
var recognitionTask: SFSpeechRecognitionTask?
func startRecording() throws {
let node = audioEngine.inputNode
let recordingFormat = node.outputFormat(forBus: 0)
node.installTap(onBus: 0, bufferSize: 1024,
format: recordingFormat) { [unowned self]
(buffer, _) in
self.request.append(buffer)
}
audioEngine.prepare()
try audioEngine.start()
weak var weakSelf = self
recognitionTask = speechRecognizer?.recognitionTask(with: request) {
(result, error) in
if result != nil {
if let transcription = result?.bestTranscription {
weakSelf?.idenifyVoiceCommand(transcription)
}
}
}
}
But when I say any word/sentence like "Open Setting" then closure(recognitionTask(with:)) called multiple times and I have put the method(idenifyVoiceCommand) inside the closure which call multiple times, so how can I restrict to call only one time.
And I also review the Timer logic while googling it(SFSpeechRecognizer - detect end of utterance) but in my scenarion it does not work beacause I did not stop the audio engine as it continuously listening the user’s voice like Siri does.
B) Via delegate(SFSpeechRecognitionTaskDelegate)
speechRecognizer.recognitionTask(with: self.request, delegate: self)
func speechRecognitionTaskWasCancelled(_ task: SFSpeechRecognitionTask) {
}
func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didFinishSuccessfully successfully: Bool) {
}
And I found that the delegate which handle when the end of speech occurs do not call it and accidentally call it after sometimes.
I had the same issue until now.
I checked your question and I suppose the code below helps you achieve the same thing I did:
recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest,
resultHandler: { (result, error) in
var isFinal = false
if result != nil {
self.inputTextView.text = result?.bestTranscription.formattedString
isFinal = (result?.isFinal)!
}
if let timer = self.detectionTimer, timer.isValid {
if isFinal {
self.inputTextView.text = ""
self.textViewDidChange(self.inputTextView)
self.detectionTimer?.invalidate()
}
} else {
self.detectionTimer = Timer.scheduledTimer(withTimeInterval: 1.5, repeats: false, block: { (timer) in
self.handleSend()
isFinal = true
timer.invalidate()
})
}
})
This checks if input wasn't received for 1.5 seconds
To your speech recogniser class add:
private var timer : Timer?
And modify code here:
recognitionTask = speechRecognizer.recognitionTask(with: request) { (result, error) in
self.timer?.invalidate()
self.timer = Timer.scheduledTimer(withTimeInterval: 1.5, repeats:false) { _ in
self.timer = nil
//do here what do you want to do, when detect pause more than 1.5 sec
}
if result != nil {

How to resume audio after interruption in Swift?

I am following instructions here, I've put together this test project to handle interruptions to audio play. Specifically, I'm using the alarm from the default iphone clock app as interruption. It appears that the interruption handler is getting called but is not getting past the let = interruptionType line as "wrong type" showed up twice.
import UIKit
import AVFoundation
class ViewController: UIViewController {
var player = AVAudioPlayer()
let audioPath = NSBundle.mainBundle().pathForResource("rachmaninov-romance-sixhands-alianello", ofType: "mp3")!
func handleInterruption(notification: NSNotification) {
guard let interruptionType = notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? AVAudioSessionInterruptionType else { print("wrong type"); return }
switch interruptionType {
case .Began:
print("began")
// player is paused and session is inactive. need to update UI)
player.pause()
print("audio paused")
default:
print("ended")
/**/
if let option = notification.userInfo?[AVAudioSessionInterruptionOptionKey] as? AVAudioSessionInterruptionOptions where option == .ShouldResume {
// ok to resume playing, re activate session and resume playing
// need to update UI
player.play()
print("audio resumed")
}
/**/
}
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
do {
try player = AVAudioPlayer(contentsOfURL: NSURL(fileURLWithPath: audioPath))
player.numberOfLoops = -1 // play indefinitely
player.prepareToPlay()
//player.delegate = player
} catch {
// process error here
}
// enable play in background https://stackoverflow.com/a/30280699/1827488 but this audio still gets interrupted by alerts
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
print("AVAudioSession Category Playback OK")
do {
try AVAudioSession.sharedInstance().setActive(true)
print("AVAudioSession is Active")
} catch let error as NSError {
print(error.localizedDescription)
}
} catch let error as NSError {
print(error.localizedDescription)
}
// add observer to handle audio interruptions
// using 'object: nil' does not have a noticeable effect
let theSession = AVAudioSession.sharedInstance()
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(ViewController.handleInterruption(_:)), name: AVAudioSessionInterruptionNotification, object: theSession)
// start playing audio
player.play()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Furthermore, following an idea here, I have modified the handler to
func handleInterruption(notification: NSNotification) {
//guard let interruptionType = notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? AVAudioSessionInterruptionType else { print("wrong type"); return }
if notification.name != AVAudioSessionInterruptionNotification
|| notification.userInfo == nil{
return
}
var info = notification.userInfo!
var intValue: UInt = 0
(info[AVAudioSessionInterruptionTypeKey] as! NSValue).getValue(&intValue)
if let interruptionType = AVAudioSessionInterruptionType(rawValue: intValue) {
switch interruptionType {
case .Began:
print("began")
// player is paused and session is inactive. need to update UI)
player.pause()
print("audio paused")
default:
print("ended")
/** /
if let option = notification.userInfo?[AVAudioSessionInterruptionOptionKey] as? AVAudioSessionInterruptionOptions where option == .ShouldResume {
// ok to resume playing, re activate session and resume playing
// need to update UI
player.play()
print("audio resumed")
}
/ **/
player.play()
print("audio resumed")
}
}
}
Results are that all of "began", "audio paused", "ended" and "audio resumed" show up in console but audio play is not actually resumed.
Note: I moved the player.play() outside of the commented out where option == .ShouldResume if statement because that if condition is not true when the .Ended interruption occurs.
(Posted on behalf of the question author, after it was posted in the question).
Solution found! Following discussion here, inserted this in viewDidLoad()
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, withOptions: AVAudioSessionCategoryOptions.MixWithOthers)
} catch {
}
After clicking "ok" on the alarm interruption, the audio play continued. Unlike previously noted, the solution does NOT require an interruption handler (which #Leo Dabus has since removed).
However if you are using an interruption handler, .play() must NOT be invoked within handleInterruption() as doing so does NOT guarantee play to resume & seems to prevent audioPlayerEndInterruption() to be called (see docs). Instead .play() must be invoked within audioPlayerEndInterruption() (any of its 3 versions) to guarantee resumption.
Furthermore, AVAudioSession must be give option .MixWithOthers noted by #Simon Newstead if you want your app to resume play after interruption when your app is in the background. It seems that if a user wants the app to continue playing when it goes into the background, it is logical to assume the user also wants the app to resume playing after an interruption while the app is in the background. Indeed that is the behaviour exhibited by the Apple Music app.
#rockhammers suggestion worked for me. Here
before class
let theSession = AVAudioSession.sharedInstance()
in viewDidLoad
NotificationCenter.default.addObserver(self, selector: #selector(ViewController.handleInterruption(notification:)), name: NSNotification.Name.AVAudioSessionInterruption, object: theSession)
And then the Function
func handleInterruption(notification: NSNotification) {
print("handleInterruption")
guard let value = (notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? NSNumber)?.uintValue,
let interruptionType = AVAudioSessionInterruptionType(rawValue: value)
else {
print("notification.userInfo?[AVAudioSessionInterruptionTypeKey]", notification.userInfo?[AVAudioSessionInterruptionTypeKey])
return }
switch interruptionType {
case .began:
print("began")
vox.pause()
music.pause()
print("audioPlayer.playing", vox.isPlaying)
/**/
do {
try theSession.setActive(false)
print("AVAudioSession is inactive")
} catch let error as NSError {
print(error.localizedDescription)
}
pause()
default :
print("ended")
if let optionValue = (notification.userInfo?[AVAudioSessionInterruptionOptionKey] as? NSNumber)?.uintValue, AVAudioSessionInterruptionOptions(rawValue: optionValue) == .shouldResume {
print("should resume")
// ok to resume playing, re activate session and resume playing
/**/
do {
try theSession.setActive(true)
print("AVAudioSession is Active again")
vox.play()
music.play()
} catch let error as NSError {
print(error.localizedDescription)
}
play()
}
}
}
some reasons interruptionNotification is not working correctly on iOS 12.x So I added silenceSecondaryAudioHintNotification
With alarm notification incoming, you can try to use silenceSecondaryAudioHintNotification.
#objc func handleSecondaryAudioSilence(notification: NSNotification) {
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey] as? UInt,
let type = AVAudioSession.SilenceSecondaryAudioHintType(rawValue: typeValue) else {
return
}
if type == .end {
// Other app audio stopped playing - restart secondary audio.
reconnectAVPlayer()
}
}