Couple issue with custom camera - swift

I currently have a custom camera implemented into my application. I am running into two small issues.
1) When I switch b/w the views of the camera (front & back) the audio input dies, and only records video.
2) My method for deciding which camera view (front & back) is which, is depreciated, & I don't know how to exactly go about resolving it. For this one, the code is as follows: The depreciated part is the devices is storing as its variables. xCode is telling me: "Use AVCaptureDeviceDiscoverySession instead."
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
// Get the front and back-facing camera for taking photos
for device in devices {
if device.position == AVCaptureDevicePosition.back {
backFacingCamera = device
} else if device.position == AVCaptureDevicePosition.front {
frontFacingCamera = device
}
}
currentDevice = backFacingCamera
guard let captureDeviceInput = try? AVCaptureDeviceInput(device: currentDevice) else {
return
}
As for the general camera recording here are the codes:
My Variables:
let captureSession = AVCaptureSession()
var currentDevice:AVCaptureDevice?
var backFacingCamera: AVCaptureDevice?
var frontFacingCamera: AVCaptureDevice?
var videoFileOutput : AVCaptureMovieFileOutput?
var cameraPreviewLayer : AVCaptureVideoPreviewLayer?
#IBOutlet weak var recordingView: UIView!
switching cameras:
var device = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .back)
func switchCameras() {
captureSession.beginConfiguration()
// Change the device based on the current camera
let newDevice = (currentDevice?.position == AVCaptureDevicePosition.back) ? frontFacingCamera : backFacingCamera
// Remove all inputs from the session
for input in captureSession.inputs {
captureSession.removeInput(input as! AVCaptureDeviceInput)
}
// Change to the new input
let cameraInput:AVCaptureDeviceInput
do {
cameraInput = try AVCaptureDeviceInput(device: newDevice)
} catch {
print(error)
return
}
if captureSession.canAddInput(cameraInput) {
captureSession.addInput(cameraInput)
}
currentDevice = newDevice
captureSession.commitConfiguration()
if currentDevice?.position == .front {
flashButton.isHidden = true
flashButton.isEnabled = false
} else if currentDevice?.position == .back {
flashButton.isHidden = false
flashButton.isEnabled = true
}
}
& In my view will appear:
mediaViewCapture.frame = CGRect(x: self.view.frame.size.width * 0, y: self.view.frame.size.height * 0, width:self.view.frame.size.width, height: self.view.frame.size.height)
self.view.addSubview(mediaViewCapture)
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
// Get the front and back-facing camera for taking photos
for device in devices {
if device.position == AVCaptureDevicePosition.back {
backFacingCamera = device
} else if device.position == AVCaptureDevicePosition.front {
frontFacingCamera = device
}
}
currentDevice = backFacingCamera
guard let captureDeviceInput = try? AVCaptureDeviceInput(device: currentDevice) else {
return
}
let audioInputDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
do
{
let audioInput = try AVCaptureDeviceInput(device: audioInputDevice)
// Add Audio Input
if captureSession.canAddInput(audioInput)
{
captureSession.addInput(audioInput)
}
else
{
NSLog("Can't Add Audio Input")
}
}
catch let error
{
NSLog("Error Getting Input Device: \(error)")
}
videoFileOutput = AVCaptureMovieFileOutput()
captureSession.addInput(captureDeviceInput)
captureSession.addOutput(videoFileOutput)
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
view.layer.addSublayer(cameraPreviewLayer!)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
cameraPreviewLayer?.frame = mediaViewCapture.layer.frame
captureSession.startRunning()
& Finally my capture:
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
if error == nil {
turnFlashOff()
let videoVC = VideoPreviewVC()
videoVC.url = outputFileURL
self.navigationController?.pushViewController(videoVC, animated: false)
} else {
print("Error saving the video \(error)")
}
}

You can look use AVCaptureDeviceDiscoverySession instead of AVCaptureDevice as it is deprecated following is the code for it:
let deviceDiscovery = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back)
let devices = deviceDiscovery?.devices
for device in devices! {
if device.hasMediaType(AVMediaTypeVideo) {
captureDevice = device
}
}
AVCaptureDeviceType has following types: builtInMicrophone, builtInWideAngleCamera, builtInTelephotoCamera, builtInDualCamera and builtInDuoCamera.
Need to check the audioInput issue when camera is switched.

Related

AVCapturePhotoOutput capturePhotoWithSettings:delegate: No active and enabled video connection'

Hi I am fairly new to coding and I ran into this error with AVCapturePhotoOutput. I am using Swift 5 and Xcode 12.5.1. I'm running it on my actual device and I have been stuck on it for awhile. The error only occurs when I try to capture a photo. Any help would be greatly appreciated thank you :)
private let output = AVCapturePhotoOutput()
private var captureSession: AVCaptureSession?
private let previewLayer = AVCaptureVideoPreviewLayer()
private let cameraView = UIView()
t
private func setUpCamera() {
let captureSession = AVCaptureSession()
if let device = AVCaptureDevice.default(for: .video) {
do {
let input = try AVCaptureDeviceInput(device: device)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
}
if captureSession.canAddOutput(output) {
captureSession.addOutput(output)
}
// Layer
previewLayer.session = captureSession
previewLayer.videoGravity = .resizeAspectFill
cameraView.layer.addSublayer(previewLayer)
captureSession.startRunning()
captureSession.sessionPreset = AVCaptureSession.Preset.high
self.captureSession = captureSession
}
catch {
print(error)
}
}
}
t
Extension CameraViewController: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let data = photo.fileDataRepresentation(),
let image = UIImage(data: data) else {
return
}
captureSession?.stopRunning()
showEditPhoto(image: image)
}
private func showEditPhoto(image: UIImage) {
guard let resizedImage = image.sd_resizedImage(
with: CGSize(width: 640, height: 640),
scaleMode: .aspectFill
) else {
return
}
let vc = PostEditViewController(image: resizedImage)
if #available(iOS 14.0, *) {
vc.navigationItem.backButtonDisplayMode = .minimal
}
navigationController?.pushViewController(vc, animated: false)
}
Try using ‘ cgImageRepresentation()’ instead of ‘ fileDataRepresentation()’. And init a UIImage from the given cgImage

unable to play video when app rebuild swift

video not playing when app rebuild (file path url saving into coreData) using AVCaptureSession
filepath not changing before and after rebuild.
file:///private/var/mobile/Containers/Data/Application/3DA93FBC-9A20-40B4-A017-B3D5C7768301/tmp/63F6CEED-3202-4F5F-999B-5F138D73635D.mp4
i did all the ways, nothing works
here my code for record the video
func setupPreview() {
// Configure previewLayer
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.frame = shapeLayer.bounds
previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
shapeLayer.layer.addSublayer(previewLayer!)
}
func setupSession() -> Bool {
captureSession.sessionPreset = AVCaptureSession.Preset.high
// Setup Camera
let camera = AVCaptureDevice.default(for: AVMediaType.video)!
do {
let input = try AVCaptureDeviceInput(device: camera)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
activeInput = input
}
} catch {
print("Error setting device video input: \(error)")
return false
}
// Setup Microphone
let microphone = AVCaptureDevice.default(for: AVMediaType.audio)!
do {
let micInput = try AVCaptureDeviceInput(device: microphone)
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
} catch {
print("Error setting device audio input: \(error)")
return false
}
// Movie output
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
}
return true
}
func startSession() {
if !captureSession.isRunning {
videoQueue().async {
self.captureSession.startRunning()
}
}
}
func stopSession() {
if captureSession.isRunning {
videoQueue().async {
self.captureSession.stopRunning()
}
}
}
func videoQueue() -> DispatchQueue {
return DispatchQueue.main
}
func currentVideoOrientation() -> AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
case .portrait:
orientation = AVCaptureVideoOrientation.portrait
case .landscapeRight:
orientation = AVCaptureVideoOrientation.landscapeLeft
case .portraitUpsideDown:
orientation = AVCaptureVideoOrientation.portraitUpsideDown
default:
orientation = AVCaptureVideoOrientation.landscapeRight
}
return orientation
}
func startRecording() {
if movieOutput.isRecording == false {
save.setTitle("stop", for: UIControl.State.normal)
let connection = movieOutput.connection(with: AVMediaType.video)
if (connection?.isVideoOrientationSupported)! {
connection?.videoOrientation = currentVideoOrientation()
}
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto
}
let device = activeInput.device
if (device.isSmoothAutoFocusSupported) {
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error setting configuration: \(error)")
}
}
//EDIT2: And I forgot this
outputURL = tempURL()
movieOutput.startRecording(to: outputURL, recordingDelegate: self)
}
else {
stopRecording()
}
}
func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
path22 = path
let directoryURL: URL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let folderPath: URL = directoryURL.appendingPathComponent("Downloads", isDirectory: true)
let fileURL: URL = folderPath.appendingPathComponent(path)
return URL(fileURLWithPath: path)
}
func stopRecording() {
if movieOutput.isRecording == true {
movieOutput.stopRecording()
}
}
here saving into coredata
let managedObject = self.managedObjectContext
entity = NSEntityDescription.entity(forEntityName: "MediaData", in: managedObject!)
let personMO = NSManagedObject(entity: entity, insertInto: managedObject)
personMO.setValue("\(self.videoURL!)", forKey: "videosS")
personMO.setValue(dataImage, forKey: "thumbnails")
print(personMO)
do
{
try managedObject?.save()
print("video saved")
}
catch
{
print("Catch Erroe : Failed To
}
let appdel = UIApplication.shared.delegate as! AppDelegate
appdel.avplayer = AVPlayer(url: videoURL!)
print(videoURL!)
let playerLayer = AVPlayerLayer(player: appdel.avplayer)
playerLayer.frame = self.view.bounds
self.view.layer.addSublayer(playerLayer)
appdel.avplayer?.play()
You must never save a full filepath into CoreData or anywhere else. File paths are not persistent. Your app is sandboxed. The sandbox path can change at any time, especially between launches and installations.
Instead, save the file name and reconstruct the path each time you need it. Just as you are calling FileManager.default.urls(for: .documentDirectory...) to construct the file path initially, so you must call it every time you want to access this file.

AVFoundation PDF417 scanner doesn't always work

I am creating an app using Swift 4 and Xcode 9 that scans PDF417 barcodes using AVFoundation. The scanner works with some codes but doesn't recognize the PDF417 barcode that you would find on the front of a CA Lottery scratchers ticket for example.
Is there anything I am missing to make it work? Below is my code:
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera], mediaType: AVMediaType.video, position: .back)
guard let captureDevice = deviceDiscoverySession.devices.first else {
print("Failed to get the camera device")
return
}
do {
captureSession = AVCaptureSession()
let input = try AVCaptureDeviceInput(device: captureDevice)
captureSession!.addInput(input)
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession!.addOutput(captureMetadataOutput)
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.pdf417]
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
captureSession?.startRunning()
} catch {
print(error)
return
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
//Get the metadata object
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if scanType.contains(metadataObj.type) {
let barCodeObj = videoPreviewLayer?.transformedMetadataObject(for: metadataObj)
if(metadataObj.stringValue != nil) {
callDelegate(metadataObj.stringValue)
captureSession?.stopRunning()
AudioServicesPlayAlertSound(SystemSoundID(kSystemSoundID_Vibrate))
navigationController?.popViewController(animated: true)
}
}
}
Thanks!
Replace your initialization code for the scanner with the following code either in your viewDidLoad or some method that you'd like it to be in
// Global vars used in init below
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
func setupCaptureInputDevice() {
let cameraMediaType = AVMediaType.video
captureSession = AVCaptureSession()
// get the video capture device, which should be of type video
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else {
// if there is an error then something is wrong, so dismiss
dismiss(animated: true, completion: nil)
return
}
let videoInput: AVCaptureDeviceInput
// create a capture input for the above device input that was created
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
// this is important to check for if we are able to add the input
// because adding before this could cause a crash or it could not happen
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
// dismiss or display error
return
}
// get ready to capture output somewhere
let metadataOutput = AVCaptureMetadataOutput()
// again check to make sure that we can do this operation before doing it
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
// setting the metadataOutput's delegate to be self and then requesting it run on the main thread
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
// specify your code type
metadataOutput.metadataObjectTypes = [.pdf417]
} else {
// dismiss or display error
return
}
// the preview layer now becomes the capture session
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
// just add it to the screen
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
// and begin input
captureSession.startRunning()
}

AVCapturePhoto Why is my photo always nil?

I'm trying to implement some custom camera similar to snapchat. I cannot see why my image is always nil during segue. Maybe a fresh pair of eyes may help as I have been working on this for 2 days now...
When I tap the take photo button (to run "livePhotoTapped"), app crashes with error: fatal error: unexpectedly found nil while unwrapping an Optional value, referring to the image being nil
Any help would be nice :)
#IBOutlet weak var cameraView: UIView!
//session to capture data
var captureSession = AVCaptureSession()
//which camera to use
var backFacingCamera: AVCaptureDevice?
var frontFacingCamera: AVCaptureDevice?
var currentDevice: AVCaptureDevice?
var stillImageOutput: AVCaptureStillImageOutput?
var stillImage: UIImage?
//camera preview layer
var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
func setupCaptureSessionCamera() {
//this makes sure to get full res of camera
captureSession.sessionPreset = AVCaptureSession.Preset.photo
var devices = AVCaptureDevice.devices(for: .video)
//query available devices
for device in devices {
if device.position == .front {
frontFacingCamera = device
} else if device.position == .back {
backFacingCamera = device
}
}//end iteration
//set a default device
currentDevice = backFacingCamera
//configure session w output for capturing still img
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey : AVVideoCodecType.jpeg]
do {
//capture the data from whichevr camera we r using..imagine as buffer to translate data into real world image
let captureDeviceInput = try AVCaptureDeviceInput(device: currentDevice!)
captureSession.addInput(captureDeviceInput)
captureSession.addOutput(stillImageOutput!)
//setup camera preview layer
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
//add the preview to our specified view in the UI
view.layer.addSublayer(cameraPreviewLayer!)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer?.frame = cameraView.frame
captureSession.startRunning()
} catch let error {
print(error)
}//end do
}
#IBAction func livePhotoTapped(_ sender: UIButton) {
let videoConnection = stillImageOutput?.connection(with: .video)
//capture still image async
stillImageOutput?.captureStillImageAsynchronously(from: videoConnection!, completionHandler: { (imageDataBuffer, error) in
if let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: imageDataBuffer!, previewPhotoSampleBuffer: imageDataBuffer!) {
self.stillImage = UIImage(data: imageData)
self.performSegue(withIdentifier: "toPreviewPhoto", sender: self)
}
})
}
Use like this :
For CMSampleBufferIsValid check this link
#IBAction func livePhotoTapped(_ sender: UIButton) {
let videoConnection = stillImageOutput?.connection(with: .video)
//capture still image async
stillImageOutput?.captureStillImageAsynchronously(from: videoConnection!, completionHandler: { (imageDataBuffer, error) in
if error != nil {
print("error \(error)")
} else {
if let imageBuffer = imageDataBuffer, CMSampleBufferIsValid(imageBuffer) {
if let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: imageDataBuffer!, previewPhotoSampleBuffer: imageDataBuffer!) {
self.stillImage = UIImage(data: imageData)
self.performSegue(withIdentifier: "toPreviewPhoto", sender: self)
}
} else {
print("imageDataBuffer is nil or not valid")
}
}
})
}
You are force unwrapping a nil object.
var currentDevice: AVCaptureDevice?
It looks like this method has been deprecated:
+ (NSArray<AVCaptureDevice *> *)devices NS_DEPRECATED(10_7, NA, 4_0, 10_0, "Use AVCaptureDeviceDiscoverySession instead.");
Have you tried "AVCaptureDeviceDiscoverySession"?

Switching Camera with a button in Swift

This seems to work to switch the camera from the back to the front, but I'm trying to come up with an 'if' statement so that I can switch it back too. Any ideas or advice?
#IBAction func didTouchSwitchButton(sender: UIButton) {
let camera = getDevice(.Front)
let cameraBack = getDevice(.Back)
do {
input = try AVCaptureDeviceInput(device: camera)
} catch let error as NSError {
print(error)
input = nil
}
if(captureSession?.canAddInput(input) == true){
captureSession?.addInput(input)
stillImageOutput?.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if(captureSession?.canAddOutput(stillImageOutput) == true){
captureSession?.addOutput(stillImageOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.frame = cameraView.bounds
cameraView.layer.addSublayer(previewLayer!)
captureSession?.startRunning()
}
}
}
func switchCamera(_ sender: UIButton) {
if let session = AVCaptureSession() {
let currentCameraInput: AVCaptureInput = session.inputs[0]
session.removeInput(currentCameraInput)
var newCamera: AVCaptureDevice
newCamera = AVCaptureDevice.default(for: AVMediaType.video)!
if (currentCameraInput as! AVCaptureDeviceInput).device.position == .back {
UIView.transition(with: self.cameraView, duration: 0.5, options: .transitionFlipFromLeft, animations: {
newCamera = self.cameraWithPosition(.front)!
}, completion: nil)
} else {
UIView.transition(with: self.cameraView, duration: 0.5, options: .transitionFlipFromRight, animations: {
newCamera = self.cameraWithPosition(.back)!
}, completion: nil)
}
do {
try self.captureSession?.addInput(AVCaptureDeviceInput(device: newCamera))
}
catch {
print("error: \(error.localizedDescription)")
}
}
}
func cameraWithPosition(_ position: AVCaptureDevice.Position) -> AVCaptureDevice? {
let deviceDescoverySession = AVCaptureDevice.DiscoverySession.init(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
for device in deviceDescoverySession.devices {
if device.position == position {
return device
}
}
return nil
}
First create an enum to check the type of camera:
enum CameraDirection {
case .front
case .back
}
Then create a variable for the enum:
var currentDirection: CameraDirection = .front//or initial direction
Then in your didTouchSwitchButton function:
if (currentDirection == .front) {
currentDirection = .back
} else {
currentDirection = .front
}
reload()
You can see that I called a function called reload. Create that:
func reload() {
let camera = getDevice(.Front)
let cameraBack = getDevice(.Back)
do {
if currentDirection == .front {
input = try AVCaptureDeviceInput(device: camera)
} else {
input = try AVCaptureDeviceInput(device: cameraBack)
}
} catch let error as NSError {
print(error)
input = nil
}
//rest of code
}