Bluetooth headphones (Airpods) and AVAudioSession overriding - swift

I want to be able to use Airpods in my app, and I have some solution that works (almost).
func playSound(soundSpeech: String) {
let audioSession = AVAudioSession.sharedInstance()
selectDevice(audioSession: audioSession)
...
}
func selectDevice(audioSession: AVAudioSession) {
print("select device")
var headphonesExist = false
var bluetoothExist = false
var speakerExist = false
let currentRoute = AVAudioSession.sharedInstance().currentRoute
for output in audioSession.currentRoute.outputs {
print(output)
if output.portType == AVAudioSessionPortHeadphones || output.portType == AVAudioSessionPortHeadsetMic {
headphonesExist = true
}
if output.portType == AVAudioSessionPortBluetoothA2DP || output.portType == AVAudioSessionPortBluetoothHFP {
bluetoothExist = true
print("bluetooth is enabled")
}
if output.portType == AVAudioSessionPortBuiltInSpeaker {
speakerExist = true
}
}
print("headphones: \(headphonesExist) bluetooth: \(bluetoothExist) speakerExist: \(speakerExist)")
if bluetoothExist == true {
do { try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord, with: AVAudioSessionCategoryOptions.allowBluetoothA2DP) } catch {
print("error with audiosession: bluetooth")
}
} else {
do { try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord, with: AVAudioSessionCategoryOptions.defaultToSpeaker) } catch {
print("error with audiosession: default speaker")
}
}
}
The problem is it does work only if (or after) I switch to other app with sound (i.e. YouTube app) and back. After that it works like a charm, but I believe it should work from the start.

Related

SwiftUI: stop speech recognition recording session upon navigating in TabView

A recording session for speech recognition starts upon opening and navigating to a tab in the TabView, but it also needs to keep recording until either 'back' or 'next' was recognised, after which it can stop the session. Now, if those words weren't recognised and the user navigates within the tabview, the session isn't stopped correctly and throws an error, which then prevents a new recording session from being started.
Error thrown:
[Utility] +[AFAggregator logDictationFailedWithError:] Error Domain=kAFAssistantErrorDomain Code=203 "Corrupt" UserInfo={NSLocalizedDescription=Corrupt, NSUnderlyingError=0x281281aa0 {Error Domain=SiriSpeechErrorDomain Code=102 "(null)"}}
I have tried implementing stopRecording() in the TabView's selection set method, right before a session is started (startRecording()), but that does not seem to work. Is there a way to stop the recording session upon navigating through the TabView? I want to eventually be able to navigate through the tabs using voice.
Content view:
struct ContentView: View {
#State private var selectedTab = 1
static let voiceRecogniser = VoiceRecogniser()
var body: some View {
VStack {
TabView(
selection: Binding(
get: { selectedTab },
set: {
selectedTab = $0
ContentView.voiceRecogniser.startRecording()
})
) {
Text("Tab 1")
.tag(1)
Text("Tab 2")
.tag(2)
Text("Tab 3")
.tag(3)
}
.tabViewStyle(PageTabViewStyle())
}
.onAppear {
ContentView.voiceRecogniser.startRecording()
}
}
}
VoiceRecogniser class:
class VoiceRecogniser {
private let speechRecogniser = SFSpeechRecognizer(locale: Locale(identifier: "en-GB"))!
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
private var recognitionTask: SFSpeechRecognitionTask?
private let audioEngine = AVAudioEngine()
func startRecording() {
guard speechRecogniser.isAvailable else {
return
}
guard SFSpeechRecognizer.authorizationStatus() == .authorized else {
SFSpeechRecognizer.requestAuthorization({ (status) in
})
return
}
recognitionTask?.cancel()
self.recognitionTask = nil
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playAndRecord, mode: .measurement, options: .duckOthers) // Ensure session can play audio as well as record
try audioSession.setMode(AVAudioSession.Mode.measurement)
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
} catch {
print("Error with audio session")
}
let inputNode = audioEngine.inputNode
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
guard let recognitionRequest = recognitionRequest else { fatalError("Unable to create a SFSpeechAudioBufferRecognitionRequest object") }
recognitionRequest.shouldReportPartialResults = true
self.recognitionTask = speechRecogniser.recognitionTask(with: recognitionRequest) { result, error in
var isFinal = false
if let result = result {
let spokenText = result.bestTranscription.formattedString
let voiceCommands = ["Next", "Back"]
let string = spokenText.lowercased()
for command in voiceCommands {
if (string.contains(command.lowercased())) {
switch command {
case "Next":
print("Go next")
case "Back":
print("Go back")
default:
print("Default")
}
isFinal = true // stop listening once a voice command was recognised
}
}
//isFinal = true // stop listening after saying anything
}
if error != nil || isFinal {
self.stopRecording()
}
}
let recordingFormat = inputNode.outputFormat(forBus: 0)
inputNode.removeTap(onBus: 0)
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in
self.recognitionRequest?.append(buffer)
}
audioEngine.prepare()
do {
try audioEngine.start()
} catch {
print("Error whith starting audio engine")
}
}
func stopRecording() {
self.audioEngine.stop()
self.recognitionRequest?.endAudio()
self.recognitionRequest = nil
self.recognitionTask = nil
self.recognitionTask?.cancel()
}
}

AudioKit Conflict between Midi Instrument and Mic behavior

I am trying to make my app produce midi notes at the same time listening to the input from the mic:
var engine = AudioEngine()
var initialDevice: Device!
var mic: AudioEngine.InputNode!
var tappableNodeA: Fader!
var tappableNodeB: Fader!
var tappableNodeC: Fader!
var silence: Fader!
var tracker: PitchTap!
private var instrument = MIDISampler(name: "Instrument 1")
func noteOn(note: MIDINoteNumber) {
instrument.play(noteNumber: note, velocity: 90, channel: 0)
}
func noteOff(note: MIDINoteNumber) {
instrument.stop(noteNumber: note, channel: 0)
}
override func viewDidLoad() {
super.viewDidLoad()
print("init started ")
guard let input = engine.input else { fatalError() }
guard let device = engine.inputDevice else { fatalError() }
print("input selected")
initialDevice = device
engine.output = instrument
mic = input
tappableNodeA = Fader(mic)
tappableNodeB = Fader(tappableNodeA)
tappableNodeC = Fader(tappableNodeB)
silence = Fader(tappableNodeC, gain: 0)
engine.output = silence
print("objects init")
tracker = PitchTap(mic) { pitch, amp in
DispatchQueue.main.async {
self.update(pitch[0], amp[0])
}
}
start()
// other init that are not related
}
The start function is written below:
func start() {
do {
if let fileURL = Bundle.main.url(forResource: "Sounds/Sampler Instruments/sawPiano1", withExtension: "exs") {
try instrument.loadInstrument(url: fileURL)
} else {
Log("Could not find file")
}
} catch {
Log("Could not load instrument")
}
do {
try engine.start()
tracker.start()
} catch let err {
print("caught error at start")
Log(err)
}
}
As long as I making the first try call to set up the instrument I get the following error:
*** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: _engine != nil
Why the would the condition be false?
Ok, so the solution was to separate the calls into two functions, and position the first call before tapNode configuration:
var engine = AudioEngine()
var initialDevice: Device!
var mic: AudioEngine.InputNode!
var tappableNodeA: Fader!
var tappableNodeB: Fader!
var tappableNodeC: Fader!
var silence: Fader!
var tracker: PitchTap!
private var instrument = MIDISampler(name: "Instrument 1")
func noteOn(note: MIDINoteNumber) {
instrument.play(noteNumber: note, velocity: 90, channel: 0)
}
func noteOff(note: MIDINoteNumber) {
instrument.stop(noteNumber: note, channel: 0)
}
override func viewDidLoad() {
super.viewDidLoad()
print("init started ")
guard let input = engine.input else { fatalError() }
guard let device = engine.inputDevice else { fatalError() }
print("input selected")
initialDevice = device
engine.output = instrument
start1()
mic = input
tappableNodeA = Fader(mic)
tappableNodeB = Fader(tappableNodeA)
tappableNodeC = Fader(tappableNodeB)
silence = Fader(tappableNodeC, gain: 0)
engine.output = silence
print("objects init")
tracker = PitchTap(mic) { pitch, amp in
DispatchQueue.main.async {
self.update(pitch[0], amp[0])
}
}
start()
// other init that are not related
}
func start1(){
do {
if let fileURL = Bundle.main.url(forResource: "Sounds/Sampler Instruments/sawPiano1", withExtension: "exs") {
try instrument.loadInstrument(url: fileURL)
} else {
Log("Could not find file")
}
} catch let err {
Log("Could not load instrument")
Log(err)
}
}
func start() {
do {
try engine.start()
tracker.start()
} catch let err {
print("caught error at start")
Log(err)
}
}
Although the exception is now gone, there is still no sound being played for some reason.

Swift 5 if camera in use

How can i check if the iPhone back camera is in use with Swift 5+ and then turn on my the flashlight while the camera is running i got my function to turn on the flash but don't know how to access the camera status?
func toggleTorch() {
guard
let device = AVCaptureDevice.default(for: AVMediaType.video),
device.hasTorch
else { return }
do {
try device.lockForConfiguration()
if device.torchMode == AVCaptureDevice.TorchMode.on
{
device.torchMode = .off
} else {
device.torchMode = .on
}
device.unlockForConfiguration()
} catch {
print("Torch could not be used")
}
}
And i have tried with this:
var cameraPosition: AVCaptureDevice.Position!
func switchCamera() {
switch cameraPosition {
case .back:
cameraPosition = AVCaptureDevice.Position.front
print(cameraPosition)
case .front:
cameraPosition = AVCaptureDevice.Position.back
toggleTorch()
default:
cameraPosition = AVCaptureDevice.Position.front
print(cameraPosition)
}
}

How to detect volume button press on tvOS remote

Im trying to find a way to observe the player so that I can detect when a user increases or decreases the volume on the Apple TV. I have managed to get this to work for iOS by using:
var audioSession: AVAudioSession?
audioSession?.addObserver(self, forKeyPath: "outputVolume", options: [.new], context: &videoPlayerViewControllerKVOContext)
if keyPath == "outputVolume" {
guard let mute = (change?[NSKeyValueChangeKey.newKey] as? NSNumber)?.floatValue else {
return
}
var isMuted = false
if (mute == 0) && (!player.isMuted) {
isMuted = true
} else if (mute.isZero) && (player.isMuted) {
isMuted = false
}
}
However this doesn't work for tvOS. Is there a way to do this on tvOS?
It is not clear all other code, but you have to keep reference to created observer.
Here is possible solution (tested with Xcode 12.1)
private var observer: NSKeyValueObservation?
// ... other code
self.observer = audioSession?.observe(\.outputVolume) { [weak self] (audioSession, _) in
guard let `self` = self else { return }
let mute = audioSession.outputVolume
var isMuted = false
if (mute == 0) && (!self.player.isMuted) {
isMuted = true
} else if (mute.isZero) && (self.player.isMuted) {
isMuted = false
}
// do what's needed here with `isMuted`
}

Swift: UIRefreshControl does not pull through when dragging down

I have a working UITableView in which I added a UIRefreshControl like this:
var refresher: UIRefreshControl!
...
// this is ViewDidLoad()
// pull to refresh
refresher = UIRefreshControl()
refresher.tintColor = globalClass.blue
refresher.attributedTitle = NSAttributedString(string: "")
refresher.addTarget(self, action: "loadFriends", forControlEvents: UIControlEvents.ValueChanged)
friendTableView.addSubview(refresher)
This works well on other UITableViews, but not on this one for some reason. The spinner never really spins and just snaps to the top when the dragging stops. What could cause this?
EDIT: loadFriends function
//load friends
func loadFriends() {
globalClass.requestsIn = []
globalClass.requestsOut = []
globalClass.finalSections = []
globalClass.myFriends = []
finalSections = []
sectionsG1 = []
let queryIn1 = PFQuery(className:"Friendship")
queryIn1.whereKey("toUser", equalTo: PFUser.currentUser()!.username!)
let queryOut = PFQuery(className:"Friendship")
queryOut.whereKey("fromUser", equalTo: PFUser.currentUser()!.username!)
let query = PFQuery.orQueryWithSubqueries([queryIn1, queryOut])
query.orderByDescending("createdAt")
query.findObjectsInBackgroundWithBlock {
(objects: [AnyObject]?, error: NSError?) -> Void in
if error == nil {
if objects!.count == 0 {
self.friendtTableView.reloadData()
self.text()
} else {
for object in objects! {
let toUser:String = object["toUser"] as! String
let status:String = object["status"] as! String
if toUser == PFUser.currentUser()?.username {
if status == "pending" {
globalClass.requestsIn.append(object["fromUser"] as! String)
self.update()
} else if status == "approved" {
globalClass.myFriends.append(object["fromUser"] as! String)
globalClass.myFriends = globalClass.myFriends.sorted { $0.localizedCaseInsensitiveCompare($1) == NSComparisonResult.OrderedAscending }
self.update()
}
} else { if status == "pending" || status == "rejected" {
globalClass.requestsOut.append(object["toUser"] as! String)
self.update()
} else if status == "approved" {
globalClass.myFriends.append(object["toUser"] as! String)
globalClass.myFriends = globalClass.myFriends.sorted { $0.localizedCaseInsensitiveCompare($1) == NSComparisonResult.OrderedAscending }
self.update()
}
}
}
}
if self.tableSegment == 1 {
if globalClass.requestsIn.count == 0 {
self.friendTableView.reloadData()
self.text()
} else {
}
} else if self.tableSegment == 2 {
if globalClass.requestsOut.count == 0 {
self.friendTableView.reloadData()
self.text()
} else {
}
} else if self.tableSegment == 0 {
if globalClass.myFriends.count == 0 {
self.friendTableView.reloadData()
self.text()
} else {
}
}
self.refresher.endRefreshing()
} else {
}
}
}