Is it possible to detect that the user has an external headset plugged into the iPhone's 3.5mm connector or the 30-pin connector? I want to output audio only to an external audio device, and keep silent if nothing is connected.
The answer is very similar to the answer to this question, but you'll want to get the kAudioSessionProperty_AudioRoute property instead.
Call this method to find out the bluetooth headset is connected or not.
First import this framework #import <AVFoundation/AVFoundation.h>
- (BOOL) isBluetoothHeadsetConnected
{
AVAudioSession *session = [AVAudioSession sharedInstance];
AVAudioSessionRouteDescription *routeDescription = [session currentRoute];
NSLog(#"Current Routes : %#", routeDescription);
if (routeDescription)
{
NSArray *outputs = [routeDescription outputs];
if (outputs && [outputs count] > 0)
{
AVAudioSessionPortDescription *portDescription = [outputs objectAtIndex:0];
NSString *portType = [portDescription portType];
NSLog(#"dataSourceName : %#", portType);
if (portType && [portType isEqualToString:#"BluetoothA2DPOutput"])
{
return YES;
}
}
}
return NO;
}
There is nice article about this in Apple documentation:
https://developer.apple.com/documentation/avfoundation/avaudiosession/responding_to_audio_session_route_changes
Only you have to verify if portType == AVAudioSessionPortBluetoothA2DP
func setupNotifications() {
let notificationCenter = NotificationCenter.default
notificationCenter.addObserver(self,
selector: #selector(handleRouteChange),
name: .AVAudioSessionRouteChange,
object: nil)
}
#objc func handleRouteChange(notification: Notification) {
guard let userInfo = notification.userInfo,
let reasonValue = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt,
let reason = AVAudioSessionRouteChangeReason(rawValue:reasonValue) else {
return
}
switch reason {
case .newDeviceAvailable:
let session = AVAudioSession.sharedInstance()
for output in session.currentRoute.outputs where output.portType == AVAudioSessionPortBluetoothA2DP {
headsetConnected = true
break
}
case .oldDeviceUnavailable:
if let previousRoute =
userInfo[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription {
for output in previousRoute.outputs where output.portType == AVAudioSessionPortBluetoothA2DP {
headsetConnected = false
break
}
}
default: ()
}
}
func isBluetoothHeadsetConnected() -> Bool {
var result = false
let session = AVAudioSession.sharedInstance()
for output in session.currentRoute.outputs where output.portType == AVAudioSessionPortBluetoothA2DP {
result = true
}
return result
}
Related
I want to be able to use Airpods in my app, and I have some solution that works (almost).
func playSound(soundSpeech: String) {
let audioSession = AVAudioSession.sharedInstance()
selectDevice(audioSession: audioSession)
...
}
func selectDevice(audioSession: AVAudioSession) {
print("select device")
var headphonesExist = false
var bluetoothExist = false
var speakerExist = false
let currentRoute = AVAudioSession.sharedInstance().currentRoute
for output in audioSession.currentRoute.outputs {
print(output)
if output.portType == AVAudioSessionPortHeadphones || output.portType == AVAudioSessionPortHeadsetMic {
headphonesExist = true
}
if output.portType == AVAudioSessionPortBluetoothA2DP || output.portType == AVAudioSessionPortBluetoothHFP {
bluetoothExist = true
print("bluetooth is enabled")
}
if output.portType == AVAudioSessionPortBuiltInSpeaker {
speakerExist = true
}
}
print("headphones: \(headphonesExist) bluetooth: \(bluetoothExist) speakerExist: \(speakerExist)")
if bluetoothExist == true {
do { try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord, with: AVAudioSessionCategoryOptions.allowBluetoothA2DP) } catch {
print("error with audiosession: bluetooth")
}
} else {
do { try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord, with: AVAudioSessionCategoryOptions.defaultToSpeaker) } catch {
print("error with audiosession: default speaker")
}
}
}
The problem is it does work only if (or after) I switch to other app with sound (i.e. YouTube app) and back. After that it works like a charm, but I believe it should work from the start.
I'm rebuilding the Google Mobile Vision "Googly Eyes" demo in Swift 3. I figured almost all of it out, but I'm stuck on translating a function from Objective C to Swift.
The Objective C function in the demo view controller is:
- (AVCaptureDeviceInput *)cameraForPosition:(AVCaptureDevicePosition)desiredPosition {
BOOL hadError = NO;
for (AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([device position] == desiredPosition) {
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
if (error) {
hadError = YES;
NSLog(#"Could not initialize for AVMediaTypeVideo for device %#", device);
} else if ([self.session canAddInput:input]) {
return input;
}
}
}
if (!hadError) {
NSLog(#"No camera found for requested orientation");
}
return nil;
}
I've translated that into the following:
func camera(for desiredPosition: AVCaptureDevicePosition) -> AVCaptureDeviceInput {
var hadError: Bool = false
for device: AVCaptureDevice in AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) { // ERROR ON THIS LINE
if device.position() == desiredPosition {
var error: Error? = nil
let input = try? AVCaptureDeviceInput(device: device)
if error != nil {
hadError = true
print("Could not initialize for AVMediaTypeVideo for device \(device)")
}
else if session.canAdd(input!) {
return input!
}
}
}
if !hadError {
print("No camera found for requested orientation")
}
}
The error I'm getting is on the 3rd line (for device: AVCaptureDevice in AVCaptureDevice.devices...). The error is: 'Any' is not convertible to 'AVCaptureDevice'. I'm not very familiar with Objective C and have never used AVCaptureSession before so I've been struggling to figure it out. Any suggestions on how I need to rewrite this "for device" statement?
Assuming you use the latest release version of Xcode. It's Xcode 8.3.3 at the time of this post.
I show you codes for some simplified cases.
Case 1
Continue using deprecated method (not recommended):
func camera(for desiredPosition: AVCaptureDevicePosition) -> AVCaptureDeviceInput? {
var hadError: Bool = false
for device in AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice] { //### You need to cast
if device.position == desiredPosition {
do {
let input = try AVCaptureDeviceInput(device: device)
if session.canAddInput(input) { //###<-
return input
}
} catch {
hadError = true
print("Could not initialize for AVMediaTypeVideo for device \(device) with \(error)")
}
}
}
if !hadError {
print("No camera found for requested orientation")
}
return nil
}
The return type of devices(withMediaType:) is [Any]!, so if you want to use each element as AVCaptureDevice, you need to cast it.
Similar to your code, but I fixed some parts, canAdd to canAddInput and the error handling.
Case 2
Use AVCaptureDeviceDiscoverySession ignoring older versions of iOS than 10.0.
func camera(for desiredPosition: AVCaptureDevicePosition) -> AVCaptureDeviceInput? {
var hadError: Bool = false
for device in AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: desiredPosition).devices {
do {
let input = try AVCaptureDeviceInput(device: device)
if session.canAddInput(input) {
return input
}
} catch {
hadError = true
print("Could not initialize for AVMediaTypeVideo for device \(device) with \(error)")
}
}
if !hadError {
print("No camera found for requested orientation")
}
return nil
}
A simple example of using AVCaptureDeviceDiscoverySession, .builtInWideAngleCamera matches for front camera, normal back camera and wide angle camera on dual camera device.
NOTE
Many methods in above codes have changed their signatures in Swift 4/Xcode 9, including the return type. And in iOS SDK 11, there's a nice method:
class func default(AVCaptureDevice.DeviceType, for: AVMediaType?, position: AVCaptureDevice.Position)
I tried several different methods but they didn't help me. I want to change video orientation in AVFoundation. How can I make it?
override func viewDidLoad() {
super.viewDidLoad()
self.definesPresentationContext = true
// device capture for audio and video
let captureVideo = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
let captureAudio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
// input
let audioInput = try! AVCaptureDeviceInput(device: captureAudio)
let videoInput = try! AVCaptureDeviceInput(device: captureVideo)
let capturePreview = AVCaptureVideoPreviewLayer(session: captureSession)
capturePreview.frame = self.view.frame
capturePreview.videoGravity = AVLayerVideoGravityResizeAspect
self.view.layer.addSublayer(capturePreview)
// setting of session
captureSession.beginConfiguration()
if captureSession.canAddInput(audioInput) {
captureSession.addInput(audioInput)
}
if captureSession.canAddInput(videoInput) {
captureSession.addInput(videoInput)
}
// output
movieOutput.movieFragmentInterval = kCMTimeInvalid
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
print("added moview")
}
captureSession.sessionPreset = AVCaptureSessionPresetHigh
captureSession.commitConfiguration()
captureSession.startRunning()
}
#IBAction func startStopSession(sender: UIBarButtonItem) {
if movieOutput.recording {
movieOutput.stopRecording()
} else {
print("start recording")
captureSession.beginConfiguration()
for connection in movieOutput.connections as! [AVCaptureConnection] {
for port in connection.inputPorts as! [AVCaptureInputPort] {
print(port)
if port.mediaType == AVMediaTypeVideo {
print(port)
self.captureConnection = AVCaptureConnection(inputPorts: [port], output: movieOutput)
if self.captureConnection.supportsVideoOrientation {
self.captureConnection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
print("video orientation right")
}
}
}
}
if self.captureConnection.supportsVideoStabilization {
captureConnection.preferredVideoStabilizationMode = .Cinematic
print("true video stabilization")
}
let digit = returnFileDigit()
let path = fileManager.URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).last!.path!.stringByAppendingString("/movie-\(digit).mp4")
captureSession.commitConfiguration()
let url = NSURL(fileURLWithPath: path)
print(movieOutput.connections)
movieOutput.startRecordingToOutputFileURL(url, recordingDelegate: self)
}
}
I tried it that I can to find outputs but I don't know how change orientation
for output in captureSession.outputs as! [AVCaptureOutput] {
output.connections.first?.mediaType
for var connection in output.connections {
if connection.mediaType == AVMediaTypeVideo {
print(connection.mediaType)
connection.videoOrientation = .LandscapeRight
}
}
}
I change my code and it works for me
#IBAction func startStopSession(sender: UIBarButtonItem) {
if movieOutput.recording {
movieOutput.stopRecording()
} else {
print("start recording")
movieOutput.connectionWithMediaType(AVMediaTypeVideo).videoOrientation = returnedOrientation()
if movieOutput.connectionWithMediaType(AVMediaTypeVideo).supportsVideoStabilization {
movieOutput.connectionWithMediaType(AVMediaTypeVideo).preferredVideoStabilizationMode = .Cinematic
}
let digit = returnFileDigit()
let path = fileManager.URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).last!.path!.stringByAppendingString("/movie-\(digit).mp4")
let url = NSURL(fileURLWithPath: path)
movieOutput.startRecordingToOutputFileURL(url, recordingDelegate: self)
}
}
func returnedOrientation() -> AVCaptureVideoOrientation {
var videoOrientation: AVCaptureVideoOrientation!
let orientation = UIDevice.currentDevice().orientation
switch orientation {
case .Portrait:
videoOrientation = .Portrait
userDefault.setInteger(0, forKey: "CaptureVideoOrientation")
case .PortraitUpsideDown:
videoOrientation = .PortraitUpsideDown
userDefault.setInteger(1, forKey: "CaptureVideoOrientation")
case .LandscapeLeft:
videoOrientation = .LandscapeRight
userDefault.setInteger(2, forKey: "CaptureVideoOrientation")
case .LandscapeRight:
videoOrientation = .LandscapeLeft
userDefault.setInteger(3, forKey: "CaptureVideoOrientation")
case .FaceDown, .FaceUp, .Unknown:
let digit = userDefault.integerForKey("CaptureVideoOrientation")
videoOrientation = AVCaptureVideoOrientation.init(rawValue: digit)
}
return videoOrientation
}
I've tried commenting out the line deviceInput = nil just for testing but got other errors. This code is present in other examples online so I'm not sure what's going on here.
private func captureSetup (position : AVCaptureDevicePosition) {
var captureError : NSError?
var captureDevice : AVCaptureDevice!
for testedDevice in AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo){
if (testedDevice.position == position) {
captureDevice = testedDevice as! AVCaptureDevice
}
}
if (captureDevice == nil) {
captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
}
var deviceInput : AVCaptureDeviceInput
do {
deviceInput = try AVCaptureDeviceInput(device: captureDevice)
} catch let error as NSError {
captureError = error
deviceInput = nil // Nil cannot be assigned to type AVCaptureDeviceInput
if captureError != nil { // new if
print("error: \(captureError?.localizedDescription)")
}
}
captureSession.sessionPreset = AVCaptureSessionPresetHigh
if (captureError == nil) {
if (captureSession.canAddInput(deviceInput)) {
captureSession.addInput(deviceInput)
}
self.videoDataOutput = AVCaptureVideoDataOutput()
self.videoDataOutput!.videoSettings = [kCVPixelBufferPixelFormatTypeKey: Int(kCVPixelFormatType_32BGRA)]
self.videoDataOutput!.alwaysDiscardsLateVideoFrames = true
self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
self.videoDataOutput!.setSampleBufferDelegate(self, queue: self.videoDataOutputQueue!)
if (captureSession.canAddOutput(self.videoDataOutput)) {
captureSession.addOutput(self.videoDataOutput)
}
}
visageCameraView.frame = UIScreen.mainScreen().bounds
let previewLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession) as AVCaptureVideoPreviewLayer // changed to let
previewLayer.frame = UIScreen.mainScreen().bounds
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
visageCameraView.layer.addSublayer(previewLayer)
}
It makes total sense to me. If you'd like to set it to nil. Use optionals. that way you can set it to nil. It's a good mechanism to avoid crashes and a ton of if statements for null checks. What I recommend (even for testing) is using optionals. This is how it would look like for you:
var deviceInput : AVCaptureDeviceInput?
Now deviceInput does NOT have to hold any valid value. so you could write the following (although it's default to nil)
deviceInput = nil
Now, say you want to use it elsewhere. You could use if let like this:
if let myNonNilValue = deviceInput {
// myNonNilValue is for sure not nil
}
Is it possible to detect that the user has an external headset plugged into the iPhone's 3.5mm connector or the 30-pin connector? I want to output audio only to an external audio device, and keep silent if nothing is connected.
The answer is very similar to the answer to this question, but you'll want to get the kAudioSessionProperty_AudioRoute property instead.
Call this method to find out the bluetooth headset is connected or not.
First import this framework #import <AVFoundation/AVFoundation.h>
- (BOOL) isBluetoothHeadsetConnected
{
AVAudioSession *session = [AVAudioSession sharedInstance];
AVAudioSessionRouteDescription *routeDescription = [session currentRoute];
NSLog(#"Current Routes : %#", routeDescription);
if (routeDescription)
{
NSArray *outputs = [routeDescription outputs];
if (outputs && [outputs count] > 0)
{
AVAudioSessionPortDescription *portDescription = [outputs objectAtIndex:0];
NSString *portType = [portDescription portType];
NSLog(#"dataSourceName : %#", portType);
if (portType && [portType isEqualToString:#"BluetoothA2DPOutput"])
{
return YES;
}
}
}
return NO;
}
There is nice article about this in Apple documentation:
https://developer.apple.com/documentation/avfoundation/avaudiosession/responding_to_audio_session_route_changes
Only you have to verify if portType == AVAudioSessionPortBluetoothA2DP
func setupNotifications() {
let notificationCenter = NotificationCenter.default
notificationCenter.addObserver(self,
selector: #selector(handleRouteChange),
name: .AVAudioSessionRouteChange,
object: nil)
}
#objc func handleRouteChange(notification: Notification) {
guard let userInfo = notification.userInfo,
let reasonValue = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt,
let reason = AVAudioSessionRouteChangeReason(rawValue:reasonValue) else {
return
}
switch reason {
case .newDeviceAvailable:
let session = AVAudioSession.sharedInstance()
for output in session.currentRoute.outputs where output.portType == AVAudioSessionPortBluetoothA2DP {
headsetConnected = true
break
}
case .oldDeviceUnavailable:
if let previousRoute =
userInfo[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription {
for output in previousRoute.outputs where output.portType == AVAudioSessionPortBluetoothA2DP {
headsetConnected = false
break
}
}
default: ()
}
}
func isBluetoothHeadsetConnected() -> Bool {
var result = false
let session = AVAudioSession.sharedInstance()
for output in session.currentRoute.outputs where output.portType == AVAudioSessionPortBluetoothA2DP {
result = true
}
return result
}