Swift 5 if camera in use - swift

How can i check if the iPhone back camera is in use with Swift 5+ and then turn on my the flashlight while the camera is running i got my function to turn on the flash but don't know how to access the camera status?
func toggleTorch() {
guard
let device = AVCaptureDevice.default(for: AVMediaType.video),
device.hasTorch
else { return }
do {
try device.lockForConfiguration()
if device.torchMode == AVCaptureDevice.TorchMode.on
{
device.torchMode = .off
} else {
device.torchMode = .on
}
device.unlockForConfiguration()
} catch {
print("Torch could not be used")
}
}
And i have tried with this:
var cameraPosition: AVCaptureDevice.Position!
func switchCamera() {
switch cameraPosition {
case .back:
cameraPosition = AVCaptureDevice.Position.front
print(cameraPosition)
case .front:
cameraPosition = AVCaptureDevice.Position.back
toggleTorch()
default:
cameraPosition = AVCaptureDevice.Position.front
print(cameraPosition)
}
}

Related

SwiftUI: stop speech recognition recording session upon navigating in TabView

A recording session for speech recognition starts upon opening and navigating to a tab in the TabView, but it also needs to keep recording until either 'back' or 'next' was recognised, after which it can stop the session. Now, if those words weren't recognised and the user navigates within the tabview, the session isn't stopped correctly and throws an error, which then prevents a new recording session from being started.
Error thrown:
[Utility] +[AFAggregator logDictationFailedWithError:] Error Domain=kAFAssistantErrorDomain Code=203 "Corrupt" UserInfo={NSLocalizedDescription=Corrupt, NSUnderlyingError=0x281281aa0 {Error Domain=SiriSpeechErrorDomain Code=102 "(null)"}}
I have tried implementing stopRecording() in the TabView's selection set method, right before a session is started (startRecording()), but that does not seem to work. Is there a way to stop the recording session upon navigating through the TabView? I want to eventually be able to navigate through the tabs using voice.
Content view:
struct ContentView: View {
#State private var selectedTab = 1
static let voiceRecogniser = VoiceRecogniser()
var body: some View {
VStack {
TabView(
selection: Binding(
get: { selectedTab },
set: {
selectedTab = $0
ContentView.voiceRecogniser.startRecording()
})
) {
Text("Tab 1")
.tag(1)
Text("Tab 2")
.tag(2)
Text("Tab 3")
.tag(3)
}
.tabViewStyle(PageTabViewStyle())
}
.onAppear {
ContentView.voiceRecogniser.startRecording()
}
}
}
VoiceRecogniser class:
class VoiceRecogniser {
private let speechRecogniser = SFSpeechRecognizer(locale: Locale(identifier: "en-GB"))!
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
private var recognitionTask: SFSpeechRecognitionTask?
private let audioEngine = AVAudioEngine()
func startRecording() {
guard speechRecogniser.isAvailable else {
return
}
guard SFSpeechRecognizer.authorizationStatus() == .authorized else {
SFSpeechRecognizer.requestAuthorization({ (status) in
})
return
}
recognitionTask?.cancel()
self.recognitionTask = nil
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playAndRecord, mode: .measurement, options: .duckOthers) // Ensure session can play audio as well as record
try audioSession.setMode(AVAudioSession.Mode.measurement)
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
} catch {
print("Error with audio session")
}
let inputNode = audioEngine.inputNode
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
guard let recognitionRequest = recognitionRequest else { fatalError("Unable to create a SFSpeechAudioBufferRecognitionRequest object") }
recognitionRequest.shouldReportPartialResults = true
self.recognitionTask = speechRecogniser.recognitionTask(with: recognitionRequest) { result, error in
var isFinal = false
if let result = result {
let spokenText = result.bestTranscription.formattedString
let voiceCommands = ["Next", "Back"]
let string = spokenText.lowercased()
for command in voiceCommands {
if (string.contains(command.lowercased())) {
switch command {
case "Next":
print("Go next")
case "Back":
print("Go back")
default:
print("Default")
}
isFinal = true // stop listening once a voice command was recognised
}
}
//isFinal = true // stop listening after saying anything
}
if error != nil || isFinal {
self.stopRecording()
}
}
let recordingFormat = inputNode.outputFormat(forBus: 0)
inputNode.removeTap(onBus: 0)
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in
self.recognitionRequest?.append(buffer)
}
audioEngine.prepare()
do {
try audioEngine.start()
} catch {
print("Error whith starting audio engine")
}
}
func stopRecording() {
self.audioEngine.stop()
self.recognitionRequest?.endAudio()
self.recognitionRequest = nil
self.recognitionTask = nil
self.recognitionTask?.cancel()
}
}

Datas not received with GameKit on swift

I'm currently making a multiplayer game with GameKit. I want to create a waiting viewController while each player receive the array of players and what they selected for their character.
Here is my extension
extension LoadingViewController: GKMatchDelegate {
func sendData() {
guard let match = match else { return }
do {
guard let data = gameModel.encode() else { return }
try match.sendData(toAllPlayers: data, with: .reliable)
} catch {
print("Send data failed")
}
}
func match(_ match: GKMatch, didReceive data: Data, fromRemotePlayer player: GKPlayer) {
guard let model = GameModel.decode(data: data) else { return }
gameModel = model
}
}
My Override which waits until 2 players fill the gameModel
override func viewDidLoad() {
super.viewDidLoad()
Timer.scheduledTimer(withTimeInterval: 2, repeats: true) { timer in
self.setupPlayers()
if self.gameModel.players.count == 2 {
if let view = self.view as! SKView? {
// Load the SKScene from 'GameScene.sks'
if let scene = SKScene(fileNamed: "GameScene") as? GameScene {
scene.match = self.match
scene.gameModel = self.gameModel
scene.localPlayer = self.localPlayer
scene.size = view.bounds.size
scene.scaleMode = .resizeFill
// Present the scene
view.presentScene(scene)
timer.invalidate()
view.ignoresSiblingOrder = true
view.showsFPS = true
view.showsNodeCount = true
}
}
}
}
}
And the setupPlayers which is called each time to try adding a player and set his preferencies
private func setupPlayers() {
guard let player2Name = match?.players.first?.displayName else { return }
let player1 = Player(displayName: GKLocalPlayer.local.displayName)
let player2 = Player(displayName: player2Name)
var players = [player1,player2]
players.sort { (player1, player2) -> Bool in
player1.displayName < player2.displayName
}
if players.first?.displayName == GKLocalPlayer.local.displayName {
if gameModel.players.count == 0 {
players[0].index = .one
players[0].race = .orc
gameModel.players.append(players[0])
localPlayer = players[0]
sendData()
}
} else {
if gameModel.players.count == 1 {
players[1].index = .two
players[1].race = .human
gameModel.players.append(players[1])
localPlayer = players[1]
sendData()
}
}
}
However the scene does not appear when i'm doing simulations, I tried finding the bug and when the first player go in setup Players it works and gameModel.players.count is now 1 but the second player never receive it and his own gameModel stay to one
Does anyone know why ?

Swift CameraView Zoom In and Out Not working

In my scenario, I am trying to create a custom CameraView. Here, Pinch Zoom In and Zoom Out are not working. How do I fix this?
Below is my code:
#IBAction func pinchToZoom(_ sender: UIPinchGestureRecognizer) {
guard let device = captureDevice else { return }
func minMaxZoom(_ factor: CGFloat) -> CGFloat { return min(max(factor, 1.0), device.activeFormat.videoMaxZoomFactor) }
func update(scale factor: CGFloat) {
do {
try device.lockForConfiguration()
defer { device.unlockForConfiguration() }
device.videoZoomFactor = factor
} catch {
debugPrint(error)
}
}
let newScaleFactor = minMaxZoom(pinch.scale * zoomFactor)
switch sender.state {
case .began: fallthrough
case .changed: update(scale: newScaleFactor)
case .ended:
zoomFactor = minMaxZoom(newScaleFactor)
update(scale: zoomFactor)
default: break
}
}
Here, Below Answer Working fine for CamerView ZoomIn and ZoomOut.
#IBAction func pinchToZoom(_ sender: UIPinchGestureRecognizer) {
let captureSession = AVCaptureSession()
captureSession.sessionPreset = AVCaptureSession.Preset.photo
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
guard let device = captureDevice else { return }
if sender.state == .changed {
let maxZoomFactor = device.activeFormat.videoMaxZoomFactor
let pinchVelocityDividerFactor: CGFloat = 5.0
do {
try device.lockForConfiguration()
defer { device.unlockForConfiguration() }
let desiredZoomFactor = device.videoZoomFactor + atan2(sender.velocity, pinchVelocityDividerFactor)
device.videoZoomFactor = max(1.0, min(desiredZoomFactor, maxZoomFactor))
} catch {
print(error)
}
}
}

Bluetooth headphones (Airpods) and AVAudioSession overriding

I want to be able to use Airpods in my app, and I have some solution that works (almost).
func playSound(soundSpeech: String) {
let audioSession = AVAudioSession.sharedInstance()
selectDevice(audioSession: audioSession)
...
}
func selectDevice(audioSession: AVAudioSession) {
print("select device")
var headphonesExist = false
var bluetoothExist = false
var speakerExist = false
let currentRoute = AVAudioSession.sharedInstance().currentRoute
for output in audioSession.currentRoute.outputs {
print(output)
if output.portType == AVAudioSessionPortHeadphones || output.portType == AVAudioSessionPortHeadsetMic {
headphonesExist = true
}
if output.portType == AVAudioSessionPortBluetoothA2DP || output.portType == AVAudioSessionPortBluetoothHFP {
bluetoothExist = true
print("bluetooth is enabled")
}
if output.portType == AVAudioSessionPortBuiltInSpeaker {
speakerExist = true
}
}
print("headphones: \(headphonesExist) bluetooth: \(bluetoothExist) speakerExist: \(speakerExist)")
if bluetoothExist == true {
do { try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord, with: AVAudioSessionCategoryOptions.allowBluetoothA2DP) } catch {
print("error with audiosession: bluetooth")
}
} else {
do { try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord, with: AVAudioSessionCategoryOptions.defaultToSpeaker) } catch {
print("error with audiosession: default speaker")
}
}
}
The problem is it does work only if (or after) I switch to other app with sound (i.e. YouTube app) and back. After that it works like a charm, but I believe it should work from the start.

how to change orientation for AVCaptureMovieFileOutput in swift

I tried several different methods but they didn't help me. I want to change video orientation in AVFoundation. How can I make it?
override func viewDidLoad() {
super.viewDidLoad()
self.definesPresentationContext = true
// device capture for audio and video
let captureVideo = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
let captureAudio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
// input
let audioInput = try! AVCaptureDeviceInput(device: captureAudio)
let videoInput = try! AVCaptureDeviceInput(device: captureVideo)
let capturePreview = AVCaptureVideoPreviewLayer(session: captureSession)
capturePreview.frame = self.view.frame
capturePreview.videoGravity = AVLayerVideoGravityResizeAspect
self.view.layer.addSublayer(capturePreview)
// setting of session
captureSession.beginConfiguration()
if captureSession.canAddInput(audioInput) {
captureSession.addInput(audioInput)
}
if captureSession.canAddInput(videoInput) {
captureSession.addInput(videoInput)
}
// output
movieOutput.movieFragmentInterval = kCMTimeInvalid
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
print("added moview")
}
captureSession.sessionPreset = AVCaptureSessionPresetHigh
captureSession.commitConfiguration()
captureSession.startRunning()
}
#IBAction func startStopSession(sender: UIBarButtonItem) {
if movieOutput.recording {
movieOutput.stopRecording()
} else {
print("start recording")
captureSession.beginConfiguration()
for connection in movieOutput.connections as! [AVCaptureConnection] {
for port in connection.inputPorts as! [AVCaptureInputPort] {
print(port)
if port.mediaType == AVMediaTypeVideo {
print(port)
self.captureConnection = AVCaptureConnection(inputPorts: [port], output: movieOutput)
if self.captureConnection.supportsVideoOrientation {
self.captureConnection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
print("video orientation right")
}
}
}
}
if self.captureConnection.supportsVideoStabilization {
captureConnection.preferredVideoStabilizationMode = .Cinematic
print("true video stabilization")
}
let digit = returnFileDigit()
let path = fileManager.URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).last!.path!.stringByAppendingString("/movie-\(digit).mp4")
captureSession.commitConfiguration()
let url = NSURL(fileURLWithPath: path)
print(movieOutput.connections)
movieOutput.startRecordingToOutputFileURL(url, recordingDelegate: self)
}
}
I tried it that I can to find outputs but I don't know how change orientation
for output in captureSession.outputs as! [AVCaptureOutput] {
output.connections.first?.mediaType
for var connection in output.connections {
if connection.mediaType == AVMediaTypeVideo {
print(connection.mediaType)
connection.videoOrientation = .LandscapeRight
}
}
}
I change my code and it works for me
#IBAction func startStopSession(sender: UIBarButtonItem) {
if movieOutput.recording {
movieOutput.stopRecording()
} else {
print("start recording")
movieOutput.connectionWithMediaType(AVMediaTypeVideo).videoOrientation = returnedOrientation()
if movieOutput.connectionWithMediaType(AVMediaTypeVideo).supportsVideoStabilization {
movieOutput.connectionWithMediaType(AVMediaTypeVideo).preferredVideoStabilizationMode = .Cinematic
}
let digit = returnFileDigit()
let path = fileManager.URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).last!.path!.stringByAppendingString("/movie-\(digit).mp4")
let url = NSURL(fileURLWithPath: path)
movieOutput.startRecordingToOutputFileURL(url, recordingDelegate: self)
}
}
func returnedOrientation() -> AVCaptureVideoOrientation {
var videoOrientation: AVCaptureVideoOrientation!
let orientation = UIDevice.currentDevice().orientation
switch orientation {
case .Portrait:
videoOrientation = .Portrait
userDefault.setInteger(0, forKey: "CaptureVideoOrientation")
case .PortraitUpsideDown:
videoOrientation = .PortraitUpsideDown
userDefault.setInteger(1, forKey: "CaptureVideoOrientation")
case .LandscapeLeft:
videoOrientation = .LandscapeRight
userDefault.setInteger(2, forKey: "CaptureVideoOrientation")
case .LandscapeRight:
videoOrientation = .LandscapeLeft
userDefault.setInteger(3, forKey: "CaptureVideoOrientation")
case .FaceDown, .FaceUp, .Unknown:
let digit = userDefault.integerForKey("CaptureVideoOrientation")
videoOrientation = AVCaptureVideoOrientation.init(rawValue: digit)
}
return videoOrientation
}