swift webrtc can not force audio to speaker - swift

before ios16, my webrtc call work fine, but after upgrade to ios16, i can not hear anything, and can not talk to other. Here is my code:
func speakerOn() {
audioQueue.async { [weak self] in
guard let self = self else { return }
self.rtcAudioSession.lockForConfiguration()
do {
try self.rtcAudioSession.setCategory(AVAudioSession.Category.playAndRecord.rawValue, with: [.mixWithOthers])
try self.rtcAudioSession.setMode(AVAudioSession.Mode.voiceChat.rawValue)
try self.rtcAudioSession.overrideOutputAudioPort(.speaker)
try self.rtcAudioSession.setActive(true)
} catch let error {
debugPrint("Couldn't force audio to speaker: \(error)")
}
self.rtcAudioSession.unlockForConfiguration()
}
}
here is the problem i got:
"Couldn't force audio to speaker: Error Domain=NSOSStatusErrorDomain Code=1701737535 "Session activation failed" UserInfo={NSLocalizedDescription=Session activation failed}"

Related

SNAudioStreamAnalyzer not stopping sound classification request

I'm a student studying iOS development currently working on a simple AI project that utilizes SNAudioStreamAnalyzer to classify an incoming audio stream from the device's microphone. I can start the stream and analyze audio no problem, but I've noticed I can't seem to get my app to stop analyzing and close the audio input stream when I'm done. At the beginning, I initialize the audio engine and create the classification request like so:
private func startAudioEngine() {
do {
// start the stream of audio data
try audioEngine.start()
let snoreClassifier = try? SnoringClassifier2_0().model
let classifySoundRequest = try audioAnalyzer.makeRequest(snoreClassifier)
try streamAnalyzer.add(classifySoundRequest,
withObserver: self.audioAnalyzer)
} catch {
print("Unable to start AVAudioEngine: \(error.localizedDescription)")
}
}
After I'm done classifying my audio stream, I attempt to stop the audio engine and close the stream like so:
private func terminateNight() {
streamAnalyzer.removeAllRequests()
audioEngine.stop()
stopAndSaveNight()
do {
let session = AVAudioSession.sharedInstance()
try session.setActive(false)
} catch {
print("unable to terminate audio session")
}
nightSummary = true
}
However, after I call the terminateNight() function my app will continue using the microphone and classifying the incoming audio. Here's my SNResultsObserving implementation:
class AudioAnalyzer: NSObject, SNResultsObserving {
var prediction: String?
var confidence: Double?
let snoringEventManager: SnoringEventManager
internal init(prediction: String? = nil, confidence: Double? = nil, snoringEventManager: SnoringEventManager) {
self.prediction = prediction
self.confidence = confidence
self.snoringEventManager = snoringEventManager
}
func makeRequest(_ customModel: MLModel? = nil) throws -> SNClassifySoundRequest {
if let model = customModel {
let customRequest = try SNClassifySoundRequest(mlModel: model)
return customRequest
} else {
throw AudioAnalysisErrors.ModelInterpretationError
}
}
func request(_ request: SNRequest, didProduce: SNResult) {
guard let classificationResult = didProduce as? SNClassificationResult else { return }
let topClassification = classificationResult.classifications.first
let timeRange = classificationResult.timeRange
self.prediction = topClassification?.identifier
self.confidence = topClassification?.confidence
if self.prediction! == "snoring" {
self.snoringEventManager.snoringDetected()
} else {
self.snoringEventManager.nonSnoringDetected()
}
}
func request(_ request: SNRequest, didFailWithError: Error) {
print("ended with error \(didFailWithError)")
}
func requestDidComplete(_ request: SNRequest) {
print("request finished")
}
}
It was my understanding that upon calling streamAnalyzer.removeAllRequests() and audioEngine.stop() the app would stop streaming from the microphone and call the requestDidComplete function, but this isn't the behavior I'm getting. Any help is appreciated!
From OP's edition:
So I've realized it was a SwiftUI problem. I was calling the startAudioEngine() function in the initializer of the view it was declared on. I thought this would be fine, but since this view was embedded in a parent view when SwiftUI updated the parent it was re-initializing my view and as such calling startAudioEngine() again. The solution was to call this function in on onAppear block so that it activates the audio engine only when the view appears, and not when SwiftUI initializes it.
I don't believe you should expect to receive requestDidComplete due to removing a request. You'd expect to receive that when you call completeAnalysis.

I can detect workout started on backgroun with apple watch, but how can I detect workout finished?

I can detect workout started on backgroun with apple watch, with below code
let workoutevent = HKObjectType.workoutType()
if store.authorizationStatus(for: workoutevent) != HKAuthorizationStatus.notDetermined {
store.enableBackgroundDelivery(for: workoutevent, frequency: .immediate, withCompletion: { (worked, error) in
print(worked)
print(error)
print("workoutevent enableBackgroundDelivery")
guard worked else {
self.logger.error("Unable to set up background delivery from HealthKit: \(error!.localizedDescription)")
print("workoutevent unable to set up background ")
fatalError()
}
if error != nil {
print("workoutevent error is ")
print(error)
}
})
backgroundObserver3 =
HKObserverQuery(sampleType: workoutevent,
predicate: nil,
updateHandler: processUpdate3(query:completionHandler3:error:))
if let queryworkout = backgroundObserver3 {
print("Starting workoutevent333 the background observer query.\(queryworkout)")
store.execute(queryworkout)
}
}else{
print("not determined....")
}
whenever I started workout on apple watch, it goes to
processUpdate3
very well,
but what I need to know is to when user finish workout.
how can I detect it ?
func processUpdate3(query: HKObserverQuery,
completionHandler3: #escaping () -> Void,
error: Error?) {
print("come here when work out started ")
...........
}
I don't see it in your code. But somewhere you must have an HKWorkoutSession. My app is set up to track running and I configure the session to begin like so;
let configuration = HKWorkoutConfiguration()
configuration.activityType = .running
do {
// HKWorkoutSession is set up here.
session = try HKWorkoutSession(healthStore: healthStore, configuration: configuration)
workoutBuilder = session.associatedWorkoutBuilder()
} catch {
// handle errors
}
When the users taps the end workout button I call session.end()
Here is a link to the documentation.

How can I gracefully handle denied authorization in AVCaptureDeviceInput.init?

The documentation for AVCaptureDeviceInput.init(device:) documents its parameters as:
device
The device from which to capture input.
outError
If an error occurs during initialization, upon return contains an NSError object describing the problem.
This outError out-parameter is, in Swift, represented as a thrown Error. I can catch and display this like so:
do {
let deviceInput = try AVCaptureDeviceInput(device: device)
// ...
}
catch {
print("Error: \(error)")
}
There is one specific case I want to handle gracefully: when the user has denied authorization for application to use the camera. In this case, I get the following output:
Error: Error Domain=AVFoundationErrorDomain Code=-11852 "Cannot use FaceTime HD Camera (Built-in)" UserInfo={NSLocalizedFailureReason=This app is not authorized to use FaceTime HD Camera (Built-in)., AVErrorDeviceKey=<AVCaptureDALDevice: 0x100520a60 [FaceTime HD Camera (Built-in)][0x8020000005ac8514]>, NSLocalizedDescription=Cannot use FaceTime HD Camera (Built-in)}
I need to distinguish this error type from other unexpected errors, like so:
do {
let deviceInput = try AVCaptureDeviceInput(device: device)
// ...
}
catch AVError.Code.applicationIsNotAuthorizedToUseDevice {
// Expected error, handle gracefully
errorMessageBox(errorText: "You have denied authorization to access your camera. Fix this in System Preferences > Security & Privacy.")
}
catch {
// Unexpected errors
errorMessageBox("Error: \(error)")
}
This is pseudocode and does not compile. I know that the error code -11852 is AVError.Code.applicationIsNotAuthorizedToUseDevice. However, I don't know how to get the error code out of the opaque error object in order to test it.
What is the specific type of the error thrown by AVCaptureDeviceInput.init(device:)? How do I extract the AVError.Code from it in order to handle this specific error?
There are two possible approaches. One is to check before you even attempt, e.g.
if AVCaptureDevice.authorizationStatus(for: .video) == .denied {
offerToOpenSettings()
return
}
The other approach is to catch the not authorized error:
let input: AVCaptureDeviceInput
do {
input = try AVCaptureDeviceInput(device: camera)
} catch AVError.applicationIsNotAuthorizedToUseDevice {
offerToOpenSettings()
return
} catch {
print("some other error", error)
return
}
Note, that’s catching AVError.applicationIsNotAuthorizedToUseDevice, not AVError.Code.applicationIsNotAuthorizedToUseDevice.
If, for example, this was an iOS app, you could have a function to offer to redirect the user to settings app:
func offerToOpenSettings() {
guard
let settings = URL(string: UIApplication.openSettingsURLString),
UIApplication.shared.canOpenURL(settings)
else { return }
let alert = UIAlertController(title: nil, message: "Would you like to open Settings to enable permission to use the camera?", preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Settings", style: .default) { _ in
UIApplication.shared.open(settings)
})
alert.addAction(UIAlertAction(title: "Cancel", style: .cancel))
present(alert, animated: true)
}
Note, since this is potentially presenting an alert, you don’t want to trigger this in viewDidLoad (which is too early in the process), but rather viewDidAppear.
Or, on macOS, maybe something like:
func offerToOpenSettings() {
let preferences = URL(string: "x-apple.systempreferences:com.apple.preference.security?Privacy_Camera")!
let alert = NSAlert()
alert.messageText = #"The camera is disabled. Please go to the “Camera” section in Security System Preferences, and enable this app."#
alert.addButton(withTitle: "System Preferences")
alert.addButton(withTitle: "Cancel")
if alert.runModal() == .alertFirstButtonReturn {
NSWorkspace.shared.open(preferences)
}
}

Microphone not working with CallKit VoIP calls on iOS 13

I have implemented Web RTC and it's working perfectly. The problem is that when the application is not opened—or terminated state—and when I receive a call, I am not able to pass my voice to the other user, while I am able to listen. I have configured the AVAudioSession before CXAnswer call delegate.
func configureAudioSession() {
let sharedSession = AVAudioSession.sharedInstance()
do {
try sharedSession.setCategory(AVAudioSession.Category.playAndRecord)
try sharedSession.setMode(AVAudioSession.Mode.voiceChat)
try sharedSession.setPreferredIOBufferDuration(TimeInterval(0.005))
try sharedSession.setPreferredSampleRate(44100.0)
} catch {
debugPrint("Failed to configure `AVAudioSession`")
}
func provider(_ provider: CXProvider, perform action: CXAnswerCallAction) {
guard let call = ProviderDelegate.callManager.callWithUUID(action.callUUID) else {
action.fail()
return
}
configureAudioSession()
call.answer { error in
if let error = error {
print("ERROR: failed to answer: \(error.localizedDescription)")
}
action.fulfill()
}
}

How to resume audio after interruption in Swift?

I am following instructions here, I've put together this test project to handle interruptions to audio play. Specifically, I'm using the alarm from the default iphone clock app as interruption. It appears that the interruption handler is getting called but is not getting past the let = interruptionType line as "wrong type" showed up twice.
import UIKit
import AVFoundation
class ViewController: UIViewController {
var player = AVAudioPlayer()
let audioPath = NSBundle.mainBundle().pathForResource("rachmaninov-romance-sixhands-alianello", ofType: "mp3")!
func handleInterruption(notification: NSNotification) {
guard let interruptionType = notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? AVAudioSessionInterruptionType else { print("wrong type"); return }
switch interruptionType {
case .Began:
print("began")
// player is paused and session is inactive. need to update UI)
player.pause()
print("audio paused")
default:
print("ended")
/**/
if let option = notification.userInfo?[AVAudioSessionInterruptionOptionKey] as? AVAudioSessionInterruptionOptions where option == .ShouldResume {
// ok to resume playing, re activate session and resume playing
// need to update UI
player.play()
print("audio resumed")
}
/**/
}
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
do {
try player = AVAudioPlayer(contentsOfURL: NSURL(fileURLWithPath: audioPath))
player.numberOfLoops = -1 // play indefinitely
player.prepareToPlay()
//player.delegate = player
} catch {
// process error here
}
// enable play in background https://stackoverflow.com/a/30280699/1827488 but this audio still gets interrupted by alerts
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
print("AVAudioSession Category Playback OK")
do {
try AVAudioSession.sharedInstance().setActive(true)
print("AVAudioSession is Active")
} catch let error as NSError {
print(error.localizedDescription)
}
} catch let error as NSError {
print(error.localizedDescription)
}
// add observer to handle audio interruptions
// using 'object: nil' does not have a noticeable effect
let theSession = AVAudioSession.sharedInstance()
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(ViewController.handleInterruption(_:)), name: AVAudioSessionInterruptionNotification, object: theSession)
// start playing audio
player.play()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Furthermore, following an idea here, I have modified the handler to
func handleInterruption(notification: NSNotification) {
//guard let interruptionType = notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? AVAudioSessionInterruptionType else { print("wrong type"); return }
if notification.name != AVAudioSessionInterruptionNotification
|| notification.userInfo == nil{
return
}
var info = notification.userInfo!
var intValue: UInt = 0
(info[AVAudioSessionInterruptionTypeKey] as! NSValue).getValue(&intValue)
if let interruptionType = AVAudioSessionInterruptionType(rawValue: intValue) {
switch interruptionType {
case .Began:
print("began")
// player is paused and session is inactive. need to update UI)
player.pause()
print("audio paused")
default:
print("ended")
/** /
if let option = notification.userInfo?[AVAudioSessionInterruptionOptionKey] as? AVAudioSessionInterruptionOptions where option == .ShouldResume {
// ok to resume playing, re activate session and resume playing
// need to update UI
player.play()
print("audio resumed")
}
/ **/
player.play()
print("audio resumed")
}
}
}
Results are that all of "began", "audio paused", "ended" and "audio resumed" show up in console but audio play is not actually resumed.
Note: I moved the player.play() outside of the commented out where option == .ShouldResume if statement because that if condition is not true when the .Ended interruption occurs.
(Posted on behalf of the question author, after it was posted in the question).
Solution found! Following discussion here, inserted this in viewDidLoad()
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, withOptions: AVAudioSessionCategoryOptions.MixWithOthers)
} catch {
}
After clicking "ok" on the alarm interruption, the audio play continued. Unlike previously noted, the solution does NOT require an interruption handler (which #Leo Dabus has since removed).
However if you are using an interruption handler, .play() must NOT be invoked within handleInterruption() as doing so does NOT guarantee play to resume & seems to prevent audioPlayerEndInterruption() to be called (see docs). Instead .play() must be invoked within audioPlayerEndInterruption() (any of its 3 versions) to guarantee resumption.
Furthermore, AVAudioSession must be give option .MixWithOthers noted by #Simon Newstead if you want your app to resume play after interruption when your app is in the background. It seems that if a user wants the app to continue playing when it goes into the background, it is logical to assume the user also wants the app to resume playing after an interruption while the app is in the background. Indeed that is the behaviour exhibited by the Apple Music app.
#rockhammers suggestion worked for me. Here
before class
let theSession = AVAudioSession.sharedInstance()
in viewDidLoad
NotificationCenter.default.addObserver(self, selector: #selector(ViewController.handleInterruption(notification:)), name: NSNotification.Name.AVAudioSessionInterruption, object: theSession)
And then the Function
func handleInterruption(notification: NSNotification) {
print("handleInterruption")
guard let value = (notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? NSNumber)?.uintValue,
let interruptionType = AVAudioSessionInterruptionType(rawValue: value)
else {
print("notification.userInfo?[AVAudioSessionInterruptionTypeKey]", notification.userInfo?[AVAudioSessionInterruptionTypeKey])
return }
switch interruptionType {
case .began:
print("began")
vox.pause()
music.pause()
print("audioPlayer.playing", vox.isPlaying)
/**/
do {
try theSession.setActive(false)
print("AVAudioSession is inactive")
} catch let error as NSError {
print(error.localizedDescription)
}
pause()
default :
print("ended")
if let optionValue = (notification.userInfo?[AVAudioSessionInterruptionOptionKey] as? NSNumber)?.uintValue, AVAudioSessionInterruptionOptions(rawValue: optionValue) == .shouldResume {
print("should resume")
// ok to resume playing, re activate session and resume playing
/**/
do {
try theSession.setActive(true)
print("AVAudioSession is Active again")
vox.play()
music.play()
} catch let error as NSError {
print(error.localizedDescription)
}
play()
}
}
}
some reasons interruptionNotification is not working correctly on iOS 12.x So I added silenceSecondaryAudioHintNotification
With alarm notification incoming, you can try to use silenceSecondaryAudioHintNotification.
#objc func handleSecondaryAudioSilence(notification: NSNotification) {
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey] as? UInt,
let type = AVAudioSession.SilenceSecondaryAudioHintType(rawValue: typeValue) else {
return
}
if type == .end {
// Other app audio stopped playing - restart secondary audio.
reconnectAVPlayer()
}
}