i am trying to catch play/stop/next/prev user action from lock screen when player is active and playing , for some how its not working .
inside class MusicPlayerViewController: BaseViewController
override func viewDidLoad() {
super.viewDidLoad()
do {
UIApplication.shared.beginReceivingRemoteControlEvents()
print("bb> Receiving remote control events\n")
} catch {
print("bb> Audio Session error.\n")
}
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.nextTrackCommand.isEnabled = true
commandCenter.nextTrackCommand.addTarget(self, action: #selector(MusicPlayerViewController.nextTrackCommandSelector))
}
func nextTrackCommandSelector()
{
print("omg")
}
in the log i can see only
bb> Receiving remote control events
also inside AppDelegate.swift has
override func remoteControlReceived(with event: UIEvent?) {
print("remote::")
guard let event = event else {
print("no event\n")
return
}
guard event.type == UIEventType.remoteControl else {
print("received other event type\n")
return
}
switch event.subtype {
case UIEventSubtype.remoteControlPlay:
print("received remote play\n")
case UIEventSubtype.remoteControlPause:
print("received remote pause\n")
case UIEventSubtype.remoteControlTogglePlayPause:
print("received toggle\n")
case UIEventSubtype.remoteControlNextTrack:
print("clicked next \n")
case UIEventSubtype.remoteControlPreviousTrack:
print("clicked Prev \n")
default:
print("received \(event.subtype) which we did not process\n")
}
}
and capabilities
what did i miss ?
A few things:
You're using both the delegate style and MPRemoteCommandCenter style of remote event handling. Pick one, rather than both to see if they're conflicting. Apple recommends the MPRemoteCommandCenter style, but if you're supporting older iOS versions, you may need to stick with the delegate style.
If you do elect to use the delegate style, my recollection is that you must also become the first responder in order to begin receiving remote control events.
Regardless of which style of event handling you choose, you must play audio in your app to let the system know to route events to you. The lock screen (or audio control center) should have your app listed in the "Now Playing" area.
i found the solution
for swift 3 i have to add this
try! AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, with: [])
try! AVAudioSession.sharedInstance().setActive(true)
Related
I have an AVQueuePlayer that gets songs from a Firebase Storage via their URL and plays them in sequence.
static func playQueue() {
for song in songs {
guard let url = song.url else { return }
lofiSongs.append(AVPlayerItem(url: url))
}
if queuePlayer == nil {
queuePlayer = AVQueuePlayer(items: lofiSongs)
} else {
queuePlayer?.removeAllItems()
lofiSongs.forEach { queuePlayer?.insert($0, after: nil) }
}
queuePlayer?.seek(to: .zero) // In case we added items back in
queuePlayer?.play()
}
And this works great.
I can also make the lock screen controls appear and use the play pause button like this:
private static func setRemoteControlActions() {
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [self] event in
queuePlayer?.play()
return .success
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [self] event in
if queuePlayer?.rate == 1.0 {
queuePlayer?.pause()
return .success
}
return .commandFailed
}
}
The problem comes with setting the metadata of the player (name, image, etc).
I know it can be done once by setting MPMediaItemPropertyTitle and MPMediaItemArtwork, but how would I change it when the next track loads?
I'm not sure if my approach works for AVQueueplayer, but for playing live streams with AVPlayer you can "listen" to metadata receiving.
extension ViewController: AVPlayerItemMetadataOutputPushDelegate {
func metadataOutput(_ output: AVPlayerItemMetadataOutput, didOutputTimedMetadataGroups groups: [AVTimedMetadataGroup], from track: AVPlayerItemTrack?) {
//look for metadata in groups
}
}
I added the AVPlayerItemMetadataOutputPushDelegate via an extension to my ViewController.
I also found this post.
I hope this gives you a lead to a solution. As said I'm not sure how this works with AVQueuePlayer.
I am trying to use Apple's Speech framework to do speech recognition on macOS 10.15.1. Before macOS 10.15, speech recognition was only available on iOS, but according to the documentation and this talk, should now be available on macOS as well.
However, all my my attempts to use it have resulted in the SFSpeechRecognizer's isAvailable property being set to false. Per that talk and the documentation I've enabled Siri and made sure that my app has the "Privacy - Speech Recognition Usage Description" key set to a string value in Info.plist.
I've also tried enabling code signing (which this question suggests might be necessary), enabling Dictation under Keyboard > Dictation in the System preferences.
Here's some example code, although the specifics probably aren't important; I've tried it using a Storyboard instead of SwiftUI, putting the instantiation of the SFSpeechRecognizer inside and outside the requestAuthorization callback, leaving the locale unspecified, etc. Nothing seems to have any effect:
import SwiftUI
import Speech
struct ContentView: View {
func tryAuth() {
SFSpeechRecognizer.requestAuthorization { authStatus in
switch authStatus {
case .authorized:
print("authorized")
case .denied:
print("denied")
case .restricted:
print("restricted")
case .notDetermined:
print("notDetermined")
#unknown default:
print("unanticipated auth status encountered")
}
}
}
func speechTest() {
guard let recognizer = SFSpeechRecognizer(locale: Locale(identifier: "en-US")) else {
// Not supported for device's locale
print("couldnt get recognizer")
return
}
if !recognizer.isAvailable {
print("not available")
return
}
print("Success")
}
var body: some View {
VStack {
Button("Try auth") {
self.tryAuth()
}
Button("Test") {
self.speechTest()
}
}
}
}
What's especially odd is that if I run the app and then click the "Try auth" button, the authStatus returned from the callback is always .authorized. However, I've never been presented with a dialog asking me to authorize the app, and the app doesn't show up in the list of authorized apps under System Preferences > Security and Privacy > Privacy > Speech Recogniztion.
Nonetheless, clicking the "Test" button afterwards results in printing not available.
It seems like there's some hole in my understanding of the macOS privacy/permissions system, but I'm not sure how to debug further. I also think it should be possible to get this working, because I've seen other questions on StackOverflow suggesting that people have done so, for example here, here.
EDIT: At the suggestion of a comment, I tried simply ignoring the fact that isAvailable is false by replacing my check for it with code to actually try to transcribe a file, e.g.:
let request = SFSpeechURLRecognitionRequest(url: URL(fileURLWithPath: "/Users/james/Downloads/test.wav"))
recognizer.recognitionTask(with: request) { (result, error) in
guard let result = result else {
print("There was an error transcribing that file")
print("print \(error!.localizedDescription)")
return
}
if result.isFinal {
print(result.bestTranscription.formattedString)
}
}
Then it fails, printing: The operation couldn’t be completed. (kAFAssistantErrorDomain error 1700.). So it seems like it really is necessary to check for isAvailable, and my question remains: how to get it to be true?
Had similar problems with SFSpeechRecognizer... Perhaps you can set the delegate of SFSpeechRecognizer before requesting for authorization, as shown here.
For example:
class ViewController: NSViewController {
var speechRecognizer: SFSpeechRecognizer!
override func viewDidLoad() {
super.viewDidLoad()
speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: "en-US"))
speechRecognizer.delegate = self
}
override func viewWillAppear() {
SFSpeechRecognizer.requestAuthorization { authStatus in
...
}
if !speechRecognizer.isAvailable {
print("Not available!")
}
let url = Bundle.main.url(forResource: "sample", withExtension: "mp3")!
let request = SFSpeechURLRecognitionRequest(url: url)
// will now ask for authorisation
speechRecognizer.recognitionTask(with: request) { (result, error) in
...
}
}
}
extension ViewController: SFSpeechRecognizerDelegate {
}
Then the authorisation dialog will be properly shown.
In addition, it seems that only when there is a call to recognitionTask, that the user will be asked to give permission. Instead, calling requestAuthorization alone will not have any effect.
I have followed this answer to make my app detect when the crown and side button are pressed at the same time. This works for me, but only when the app is in the foreground. My app uses an HKWorkoutSession to stay running in the background, but it does not detect the press of the two buttons. I need the app to detect the pressing of both buttons in the background for my app to work properly.
My code for setting up the workout session is:
func configureWorkout() {
configuration.activityType = .walking
configuration.locationType = .unknown
do {
session = try HKWorkoutSession(healthStore: store, configuration: configuration)
session!.delegate = self
session!.startActivity(with: dateManager.getDate())
}
catch let error as NSError {
fatalError("*** Unable to create the workout session: \(error.localizedDescription) ***")
}
}
The code I have so far to detect the buttons being pressed is:
func workoutSession(_ workoutSession: HKWorkoutSession, didGenerate event: HKWorkoutEvent) {
if(event.type == HKWorkoutEventType.pauseOrResumeRequest) {
print("Detected Press")
}
}
Thanks!
In my app, I have a switch that allows the user to put certain events in their agenda. I handle that as such:
#IBAction func putInAgenda(_ sender: UISwitch) {
let store = manager.store
if (sender.isOn){
store.requestAccess(to: EKEntityType.event, completion: {
(accessGranted: Bool, error: Error?) in
if accessGranted == true {
self.eventsHandler.importEventsInAgenda(id)
} else {
DispatchQueue.main.async {
sender.isOn = false
}
}
})
} else {
//
}
shared?.set(sender.isOn, forKey: "putInAgenda")
shared?.synchronize()
}
However, against my expectation, "store.requestAccess" not only requests, but also SETS.
As a result, when the user CANCELS the dialog, the switch switches back (expected) but any consecutive attempt to switch the switch to the ON position is honored with an OFF position, without a new dialog.
What should I do?
A privacy request is only ever asked once. If you detect that it is currently denied, you could either update the UI or prompt the user to go to Settings and turn it on. You can use UIApplication openSettingsURLString and UIApplication openURL to take the user to your app's settings page in the Settings app.
I am following instructions here, I've put together this test project to handle interruptions to audio play. Specifically, I'm using the alarm from the default iphone clock app as interruption. It appears that the interruption handler is getting called but is not getting past the let = interruptionType line as "wrong type" showed up twice.
import UIKit
import AVFoundation
class ViewController: UIViewController {
var player = AVAudioPlayer()
let audioPath = NSBundle.mainBundle().pathForResource("rachmaninov-romance-sixhands-alianello", ofType: "mp3")!
func handleInterruption(notification: NSNotification) {
guard let interruptionType = notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? AVAudioSessionInterruptionType else { print("wrong type"); return }
switch interruptionType {
case .Began:
print("began")
// player is paused and session is inactive. need to update UI)
player.pause()
print("audio paused")
default:
print("ended")
/**/
if let option = notification.userInfo?[AVAudioSessionInterruptionOptionKey] as? AVAudioSessionInterruptionOptions where option == .ShouldResume {
// ok to resume playing, re activate session and resume playing
// need to update UI
player.play()
print("audio resumed")
}
/**/
}
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
do {
try player = AVAudioPlayer(contentsOfURL: NSURL(fileURLWithPath: audioPath))
player.numberOfLoops = -1 // play indefinitely
player.prepareToPlay()
//player.delegate = player
} catch {
// process error here
}
// enable play in background https://stackoverflow.com/a/30280699/1827488 but this audio still gets interrupted by alerts
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
print("AVAudioSession Category Playback OK")
do {
try AVAudioSession.sharedInstance().setActive(true)
print("AVAudioSession is Active")
} catch let error as NSError {
print(error.localizedDescription)
}
} catch let error as NSError {
print(error.localizedDescription)
}
// add observer to handle audio interruptions
// using 'object: nil' does not have a noticeable effect
let theSession = AVAudioSession.sharedInstance()
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(ViewController.handleInterruption(_:)), name: AVAudioSessionInterruptionNotification, object: theSession)
// start playing audio
player.play()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Furthermore, following an idea here, I have modified the handler to
func handleInterruption(notification: NSNotification) {
//guard let interruptionType = notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? AVAudioSessionInterruptionType else { print("wrong type"); return }
if notification.name != AVAudioSessionInterruptionNotification
|| notification.userInfo == nil{
return
}
var info = notification.userInfo!
var intValue: UInt = 0
(info[AVAudioSessionInterruptionTypeKey] as! NSValue).getValue(&intValue)
if let interruptionType = AVAudioSessionInterruptionType(rawValue: intValue) {
switch interruptionType {
case .Began:
print("began")
// player is paused and session is inactive. need to update UI)
player.pause()
print("audio paused")
default:
print("ended")
/** /
if let option = notification.userInfo?[AVAudioSessionInterruptionOptionKey] as? AVAudioSessionInterruptionOptions where option == .ShouldResume {
// ok to resume playing, re activate session and resume playing
// need to update UI
player.play()
print("audio resumed")
}
/ **/
player.play()
print("audio resumed")
}
}
}
Results are that all of "began", "audio paused", "ended" and "audio resumed" show up in console but audio play is not actually resumed.
Note: I moved the player.play() outside of the commented out where option == .ShouldResume if statement because that if condition is not true when the .Ended interruption occurs.
(Posted on behalf of the question author, after it was posted in the question).
Solution found! Following discussion here, inserted this in viewDidLoad()
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, withOptions: AVAudioSessionCategoryOptions.MixWithOthers)
} catch {
}
After clicking "ok" on the alarm interruption, the audio play continued. Unlike previously noted, the solution does NOT require an interruption handler (which #Leo Dabus has since removed).
However if you are using an interruption handler, .play() must NOT be invoked within handleInterruption() as doing so does NOT guarantee play to resume & seems to prevent audioPlayerEndInterruption() to be called (see docs). Instead .play() must be invoked within audioPlayerEndInterruption() (any of its 3 versions) to guarantee resumption.
Furthermore, AVAudioSession must be give option .MixWithOthers noted by #Simon Newstead if you want your app to resume play after interruption when your app is in the background. It seems that if a user wants the app to continue playing when it goes into the background, it is logical to assume the user also wants the app to resume playing after an interruption while the app is in the background. Indeed that is the behaviour exhibited by the Apple Music app.
#rockhammers suggestion worked for me. Here
before class
let theSession = AVAudioSession.sharedInstance()
in viewDidLoad
NotificationCenter.default.addObserver(self, selector: #selector(ViewController.handleInterruption(notification:)), name: NSNotification.Name.AVAudioSessionInterruption, object: theSession)
And then the Function
func handleInterruption(notification: NSNotification) {
print("handleInterruption")
guard let value = (notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? NSNumber)?.uintValue,
let interruptionType = AVAudioSessionInterruptionType(rawValue: value)
else {
print("notification.userInfo?[AVAudioSessionInterruptionTypeKey]", notification.userInfo?[AVAudioSessionInterruptionTypeKey])
return }
switch interruptionType {
case .began:
print("began")
vox.pause()
music.pause()
print("audioPlayer.playing", vox.isPlaying)
/**/
do {
try theSession.setActive(false)
print("AVAudioSession is inactive")
} catch let error as NSError {
print(error.localizedDescription)
}
pause()
default :
print("ended")
if let optionValue = (notification.userInfo?[AVAudioSessionInterruptionOptionKey] as? NSNumber)?.uintValue, AVAudioSessionInterruptionOptions(rawValue: optionValue) == .shouldResume {
print("should resume")
// ok to resume playing, re activate session and resume playing
/**/
do {
try theSession.setActive(true)
print("AVAudioSession is Active again")
vox.play()
music.play()
} catch let error as NSError {
print(error.localizedDescription)
}
play()
}
}
}
some reasons interruptionNotification is not working correctly on iOS 12.x So I added silenceSecondaryAudioHintNotification
With alarm notification incoming, you can try to use silenceSecondaryAudioHintNotification.
#objc func handleSecondaryAudioSilence(notification: NSNotification) {
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey] as? UInt,
let type = AVAudioSession.SilenceSecondaryAudioHintType(rawValue: typeValue) else {
return
}
if type == .end {
// Other app audio stopped playing - restart secondary audio.
reconnectAVPlayer()
}
}