Cannot capture photo with custom camera - swift

I am trying to display an image taken from my custom camera (from a snapchat-like menu where you swipe right to open the camera on the left). I updated the code to 3.0 but now it gives me this error:
'AVCapturePhotoOutput' has no member 'captureStillImageAsynchronouslyFromConnection', and I can't find a way to fix it.
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
captureSession = AVCaptureSession()
captureSession?.sessionPreset = AVCaptureSession.Preset.hd1920x1080
let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
do {
let input = try AVCaptureDeviceInput(device: backCamera!)
if (captureSession?.canAddInput(input) != nil) {
captureSession?.addInput(input)
stillImageOutput = AVCapturePhotoOutput()
captureSession?.addOutput(stillImageOutput!)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspect
cameraView.layer.addSublayer(previewLayer!)
captureSession?.startRunning()
}
} catch {
print("Something is wrong with the camera")
}
}
func didPressTakePhoto() {
if let videoConnection = stillImageOutput?.connection(with: AVMediaType.video) {
videoConnection.videoOrientation = AVCaptureVideoOrientation.portrait
stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
(sampleBuffer, error) in
if sampleBuffer != nil {
var imageData = AVCaptureStillImageOutput.jsegStillImageNSDataRepresentation(sampleBuffer)
var dataProfider = CGDataProviderCreateWithCFData(imageData)
var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, kCGRenderingIntentDefault)
var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
self.tempImageView.image = image
self.tempImageView.isHidden = true
}
})
}
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
didPressTakePhoto()
}
I noticed that stillImageOutput is deprecated and now AVCapturePhotoOutput is used instead.
What is the proper way to write captureStillImageAsynchronously in 3.0 (with AVCapturePhotoOutput)?
I saw the other answers for this question and none of them works for me.
All I want is to capture a photo on tap(action) which previews the photo (where I can accept it and do other logic once I accept it).

let photoSettings = AVCapturePhotoSettings()
photoSettings.isAutoStillImageStabilizationEnabled = true
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.flashMode = .auto
stillImageOutput?.capturePhoto(with: photoSettings, delegate: self)
extension YourViewController: AVCapturePhotoCaptureDelegate {
func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, ..., error: Error?) {
guard error == nil, let photoSampleBuffer = photoSampleBuffer else { return }
guard let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSampleBuffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer) else { return }
let capturedImage = UIImage(data: imageData, scale: 1.0)
...
}
}
Typed in by hand, please forgive any typos.

Related

AVCapturePhotoOutput capturePhotoWithSettings:delegate: No active and enabled video connection'

Hi I am fairly new to coding and I ran into this error with AVCapturePhotoOutput. I am using Swift 5 and Xcode 12.5.1. I'm running it on my actual device and I have been stuck on it for awhile. The error only occurs when I try to capture a photo. Any help would be greatly appreciated thank you :)
private let output = AVCapturePhotoOutput()
private var captureSession: AVCaptureSession?
private let previewLayer = AVCaptureVideoPreviewLayer()
private let cameraView = UIView()
t
private func setUpCamera() {
let captureSession = AVCaptureSession()
if let device = AVCaptureDevice.default(for: .video) {
do {
let input = try AVCaptureDeviceInput(device: device)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
}
if captureSession.canAddOutput(output) {
captureSession.addOutput(output)
}
// Layer
previewLayer.session = captureSession
previewLayer.videoGravity = .resizeAspectFill
cameraView.layer.addSublayer(previewLayer)
captureSession.startRunning()
captureSession.sessionPreset = AVCaptureSession.Preset.high
self.captureSession = captureSession
}
catch {
print(error)
}
}
}
t
Extension CameraViewController: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let data = photo.fileDataRepresentation(),
let image = UIImage(data: data) else {
return
}
captureSession?.stopRunning()
showEditPhoto(image: image)
}
private func showEditPhoto(image: UIImage) {
guard let resizedImage = image.sd_resizedImage(
with: CGSize(width: 640, height: 640),
scaleMode: .aspectFill
) else {
return
}
let vc = PostEditViewController(image: resizedImage)
if #available(iOS 14.0, *) {
vc.navigationItem.backButtonDisplayMode = .minimal
}
navigationController?.pushViewController(vc, animated: false)
}
Try using ‘ cgImageRepresentation()’ instead of ‘ fileDataRepresentation()’. And init a UIImage from the given cgImage

define video url as the uiview in your class

My swift code should be able to take a snapshot of a video and then take that image and display in a uiimageview. Instead of using a online link I just want the url to be the uiview in my class.So the video url should be previewView not the https link that I have below. All the code below is in this class
import UIKit;import AVFoundation
class ViewController: UIViewController, AVCapturePhotoCaptureDelegate {
#IBOutlet var previewView : UIView!
#IBOutlet var captureImageView : UIImageView!
var captureSession: AVCaptureSession!
var stillImageOutput: AVCapturePhotoOutput!
var videoPreviewLayer: AVCaptureVideoPreviewLayer!
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
// Setup your camera here...
captureSession = AVCaptureSession()
captureSession.sessionPreset = .medium
guard let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
else {
print("Unable to access back camera!")
return
}
do {
let input = try AVCaptureDeviceInput(device: backCamera)
//Step 9
stillImageOutput = AVCapturePhotoOutput()
stillImageOutput = AVCapturePhotoOutput()
if captureSession.canAddInput(input) && captureSession.canAddOutput(stillImageOutput) {
captureSession.addInput(input)
captureSession.addOutput(stillImageOutput)
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
}
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer.videoGravity = .resizeAspect
videoPreviewLayer.connection?.videoOrientation = .portrait
previewView.layer.addSublayer(videoPreviewLayer)
//Step12
DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
self.captureSession.startRunning()
//Step 13
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.previewView.bounds
}
}
}
#IBAction func startRecord(_ sender: Any) {
}
#IBAction func Save(_ sender: Any) {
//what do I put in the 2 highlighted blocks
let videoURL = "https://www.youtube.com/watch?v=Txt25dw-lIk"
self.getThumbnailFromUrl(videoURL) { [weak self] (img) in
guard let _ = self else { return }
if let img = img {
self?.captureImageView.image = img
}
}
}
func getThumbnailFromUrl(_ url: String?, _ completion: #escaping ((_ image: UIImage?)->Void)) {
guard let url = URL(string: url ?? "") else { return }
DispatchQueue.main.async {
let asset = AVAsset(url: url)
let assetImgGenerate = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let time = CMTimeMake(value: 2, timescale: 1)
do {
let img = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
let thumbnail = UIImage(cgImage: img)
completion(thumbnail)
} catch {
print("Error :: ", error.localizedDescription)
completion(nil)
}
}
}
#IBAction func didTakePhoto(_ sender: Any) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
let image = UIImage(data: imageData)
captureImageView.image = image
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.captureSession.stopRunning()
}
}

Several errors in my Swift code for a camera app

I am trying to create a camera app on Xcode 10.1 using Swift for a school project. I have been working on this for a while, and still have several errors.
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
let captureSession = AVCaptureSession()
var previewLayer:CALayer!
var captureDevice:AVCaptureDevice?
var takePhoto = false
override func viewDidLoad() {
super.viewDidLoad()
prepareCamera()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
func prepareCamera() {
captureSession.sessionPreset = AVCaptureSession.Preset.photo
if let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.back) {
//if availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType:AVMediaType.video, position: .back).devices {
//let captureDevice = availableDevices
beginSession()
}
}
func beginSession () {
do {
let captureDeviceInput = try AVCaptureDeviceInput( device: captureDevice)
captureSession.addInput(captureDeviceInput)
} catch {
print(error.localizedDescription)
}
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
self.previewLayer = previewLayer
self.view.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = self.view.layer.frame
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString): NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
}
captureSession.commitConfiguration()
let queue = DispatchQueue(label: "com.sophiaradis.captureQueue")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
}
#IBAction func takePhoto(_ sender: Any) {
takePhoto = true
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if takePhoto {
takePhoto = false
if let image = self.getImageFromSamplyBuffer(buffer: sampleBuffer){
let photoVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "PhotoVC") as! PhotoViewController
photoVC.takenPhoto = image
DispatchQueue.main.async {
self.present(photoVC, animated: true, completion: {
self.stopCaptureSession()
})
}
}
}
}
func getImageFromSamplyBuffer (buffer:CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext()
let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
if let image = context.createCGImage(ciImage, from: imageRect) {
return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
}
}
return nil
}
func stopCaptureSession () {
self.captureSession.stopRunning()
if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
self.captureSession.removeInput(input)
}
}
}
}
There is an error in these lines of code:
if let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes:[AVCaptureDevice.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.back)
This error says that type AVCaptureDevice has no member Discovery Session. But when I looked online, it did.
There is a second error in these lines that follow that I cannot convert value of type 'AVCaptureDevice?' to expected argument type 'AVCaptureDevice'.
func beginSession () {
do {
let captureDeviceInput = try AVCaptureDeviceInput( device: captureDevice)
I have no idea how to fix this one at all. My next error occurs directly below that one, in these following lines
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
self.previewLayer = previewLayer
self.view.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = self.view.layer.frame
captureSession.startRunning()
And this is flagged as that Initializer for conditional binding must have Optional type, not 'AVCaptureVideoPreviewLayer'.
If you can fix or even offer advice as how to fix any of these it will mean a lot to me and really make my year.
1-
let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.back)
2- if let captureDevice or force unwrap captureDevice!
let captureDeviceInput = try AVCaptureDeviceInput( device: captureDevice!)
3- AVCaptureVideoPreviewLayer doesn't return optional , so replace
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
with
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)

func captureOutput is never called

Add like to add filter to each frame i record in real time and display the filtered image in UIImageView, if anyone could help it would be nice.
but captureoutput is never called, here is my code.
class Measurement: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate {
#IBOutlet weak var cameraPreview: UIView!
#IBOutlet weak var imageView: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
setupCameraSession()
toggleTorch(on: true)
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
view.layer.addSublayer(previewLayer)
cameraSession.startRunning()
}
lazy var cameraSession: AVCaptureSession = {
let s = AVCaptureSession()
s.sessionPreset = AVCaptureSession.Preset.low
return s
}()
lazy var previewLayer: AVCaptureVideoPreviewLayer = {
let preview = AVCaptureVideoPreviewLayer(session: self.cameraSession)
preview.position = CGPoint(x:182,y: 485)
preview.videoGravity = AVLayerVideoGravity.resizeAspectFill
preview.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
preview.bounds = imageView.bounds
//preview.position = CGPoint(x:self.view.bounds.midX,y: self.view.bounds.midY)
imageView.layer.addSublayer(preview)
return preview
}()
func toggleTorch(on: Bool) {
guard let device = AVCaptureDevice.default(for: .video) else { return }
if device.hasTorch {
do {
try device.lockForConfiguration()
if on == true {
device.torchMode = .on
} else {
device.torchMode = .off
}
device.unlockForConfiguration()
} catch {
print("Torch could not be used")
}
} else {
print("Torch is not available")
}
}
func setupCameraSession() {
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
do {
let deviceInput = try AVCaptureDeviceInput(device: captureDevice!)
cameraSession.beginConfiguration()
if (cameraSession.canAddInput(deviceInput) == true) {
cameraSession.addInput(deviceInput)
print("Processing Data.")
}
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)] as [String : AnyObject]
dataOutput.alwaysDiscardsLateVideoFrames = true
print("Processing Data.")
if (cameraSession.canAddOutput(dataOutput) == true) {
cameraSession.addOutput(dataOutput)
print("Processing Data.")
}
cameraSession.commitConfiguration()
let queue = DispatchQueue(label: "com.invasivecode.videoQueue")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
catch let error as NSError {
print("\(error), \(error.localizedDescription)")
}
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
print("Processing Data.")
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
//let chromaKeyFilter = colorCubeFilterForChromaKey(hueAngle: 120)
let ciImage = CIImage(cvPixelBuffer: imageBuffer)
let context = CIContext()
guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return }
let image = UIImage(cgImage: cgImage)
if let chromaKeyFilter = CIFilter(name: "CISepiaTone") {
let beginImage = CIImage(image: image)
chromaKeyFilter.setValue(beginImage, forKey: kCIInputImageKey)
chromaKeyFilter.setValue(0.5, forKey: kCIInputIntensityKey)
if let output = chromaKeyFilter.outputImage {
if let cgimg = context.createCGImage(output, from: output.extent) {
let processedImage = UIImage(cgImage: cgimg)
// do something interesting with the processed image
imageView.image = processedImage
}
}
}
}
func captureOutput(_ captureOutput: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// Here you can count how many frames are dopped
}
func startCapture() {
print("\(self.classForCoder)/" + #function)
if cameraSession.isRunning {
print("already running")
return
}
cameraSession.startRunning()
toggleTorch(on: true)
}
You need to set the delegate
dataOutput.sampleBufferDelegate = self

How do I save a captured video directly to Firebase vs locally?

I built an app that uses a custom camera to record video and take photos. I'm successfully saving the photos to Firebase Storage, but I can't figure out how to access the video to save it there as well.
Here's my code:
recordViewController
func setupCaptureSession() {
captureSession.sessionPreset = AVCaptureSession.Preset.high
}
func setupDevice() {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let devices = deviceDiscoverySession.devices
for device in devices {
if device.position == AVCaptureDevice.Position.back {
backCamera = device
} else if device.position == AVCaptureDevice.Position.front {
frontCamera = device
}
}
currentCamera = backCamera
}
func setupInputOutput() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
captureSession.addInput(captureDeviceInput)
videoFileOutput = AVCaptureMovieFileOutput()
captureSession.addOutput(videoFileOutput!)
} catch {
print(error)
}
}
func setupPreviewLayer() {
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraPreviewLayer?.frame = self.view.frame
self.view.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}
func startRunningCaptureSession() {
captureSession.startRunning()
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "playVideo" {
let videoPlayerViewController = segue.destination as! AVPlayerViewController
let videoFileURL = sender as! URL
videoPlayerViewController.player = AVPlayer(url: videoFileURL)
}
}
// MARK: - AVCaptureFileOutputRecordingDelegate methods
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if error != nil {
print(error)
return
}
performSegue(withIdentifier: "playVideo", sender: outputFileURL)
}
// Actions
#IBAction func recordButtonTapped(_ sender: Any) {
if !isRecording {
isRecording = true
let outputPath = NSTemporaryDirectory() + "output.mov"
let outputFileURL = URL(fileURLWithPath: outputPath)
videoFileOutput?.startRecording(to: outputFileURL, recordingDelegate: self)
recordButton.isSelected = true
} else {
isRecording = false
recordButton.isSelected = false
recordButton.layer.removeAllAnimations()
videoFileOutput?.stopRecording()
}
}
Once the video has been recorded, it currently saves it locally to a temporary location. I'm not versed in how to find the current location path and changing it the Firebase Storage path.