How do I save a captured video directly to Firebase vs locally? - swift

I built an app that uses a custom camera to record video and take photos. I'm successfully saving the photos to Firebase Storage, but I can't figure out how to access the video to save it there as well.
Here's my code:
recordViewController
func setupCaptureSession() {
captureSession.sessionPreset = AVCaptureSession.Preset.high
}
func setupDevice() {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let devices = deviceDiscoverySession.devices
for device in devices {
if device.position == AVCaptureDevice.Position.back {
backCamera = device
} else if device.position == AVCaptureDevice.Position.front {
frontCamera = device
}
}
currentCamera = backCamera
}
func setupInputOutput() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
captureSession.addInput(captureDeviceInput)
videoFileOutput = AVCaptureMovieFileOutput()
captureSession.addOutput(videoFileOutput!)
} catch {
print(error)
}
}
func setupPreviewLayer() {
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraPreviewLayer?.frame = self.view.frame
self.view.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}
func startRunningCaptureSession() {
captureSession.startRunning()
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "playVideo" {
let videoPlayerViewController = segue.destination as! AVPlayerViewController
let videoFileURL = sender as! URL
videoPlayerViewController.player = AVPlayer(url: videoFileURL)
}
}
// MARK: - AVCaptureFileOutputRecordingDelegate methods
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if error != nil {
print(error)
return
}
performSegue(withIdentifier: "playVideo", sender: outputFileURL)
}
// Actions
#IBAction func recordButtonTapped(_ sender: Any) {
if !isRecording {
isRecording = true
let outputPath = NSTemporaryDirectory() + "output.mov"
let outputFileURL = URL(fileURLWithPath: outputPath)
videoFileOutput?.startRecording(to: outputFileURL, recordingDelegate: self)
recordButton.isSelected = true
} else {
isRecording = false
recordButton.isSelected = false
recordButton.layer.removeAllAnimations()
videoFileOutput?.stopRecording()
}
}
Once the video has been recorded, it currently saves it locally to a temporary location. I'm not versed in how to find the current location path and changing it the Firebase Storage path.

Related

I want to add an ean 13 barcode scanner to my application but it wont work on swift playgrounds for Ipad

I am trying to add an scanner to my application but everytime when I run the application it sais that there is a problem in my code and that I have to try again
The code what I have is from https://www.hackingwithswift.com/example-code/media/how-to-scan-a-barcode, I tried to paste it on to an blank document and a Xcode-playground document but both of them didn’t work.
Import AVFoundation
import UIKit
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.ean8, .ean13, .pdf417]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}

define video url as the uiview in your class

My swift code should be able to take a snapshot of a video and then take that image and display in a uiimageview. Instead of using a online link I just want the url to be the uiview in my class.So the video url should be previewView not the https link that I have below. All the code below is in this class
import UIKit;import AVFoundation
class ViewController: UIViewController, AVCapturePhotoCaptureDelegate {
#IBOutlet var previewView : UIView!
#IBOutlet var captureImageView : UIImageView!
var captureSession: AVCaptureSession!
var stillImageOutput: AVCapturePhotoOutput!
var videoPreviewLayer: AVCaptureVideoPreviewLayer!
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
// Setup your camera here...
captureSession = AVCaptureSession()
captureSession.sessionPreset = .medium
guard let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
else {
print("Unable to access back camera!")
return
}
do {
let input = try AVCaptureDeviceInput(device: backCamera)
//Step 9
stillImageOutput = AVCapturePhotoOutput()
stillImageOutput = AVCapturePhotoOutput()
if captureSession.canAddInput(input) && captureSession.canAddOutput(stillImageOutput) {
captureSession.addInput(input)
captureSession.addOutput(stillImageOutput)
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
}
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer.videoGravity = .resizeAspect
videoPreviewLayer.connection?.videoOrientation = .portrait
previewView.layer.addSublayer(videoPreviewLayer)
//Step12
DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
self.captureSession.startRunning()
//Step 13
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.previewView.bounds
}
}
}
#IBAction func startRecord(_ sender: Any) {
}
#IBAction func Save(_ sender: Any) {
//what do I put in the 2 highlighted blocks
let videoURL = "https://www.youtube.com/watch?v=Txt25dw-lIk"
self.getThumbnailFromUrl(videoURL) { [weak self] (img) in
guard let _ = self else { return }
if let img = img {
self?.captureImageView.image = img
}
}
}
func getThumbnailFromUrl(_ url: String?, _ completion: #escaping ((_ image: UIImage?)->Void)) {
guard let url = URL(string: url ?? "") else { return }
DispatchQueue.main.async {
let asset = AVAsset(url: url)
let assetImgGenerate = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let time = CMTimeMake(value: 2, timescale: 1)
do {
let img = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
let thumbnail = UIImage(cgImage: img)
completion(thumbnail)
} catch {
print("Error :: ", error.localizedDescription)
completion(nil)
}
}
}
#IBAction func didTakePhoto(_ sender: Any) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
let image = UIImage(data: imageData)
captureImageView.image = image
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.captureSession.stopRunning()
}
}

Cannot capture photo with custom camera

I am trying to display an image taken from my custom camera (from a snapchat-like menu where you swipe right to open the camera on the left). I updated the code to 3.0 but now it gives me this error:
'AVCapturePhotoOutput' has no member 'captureStillImageAsynchronouslyFromConnection', and I can't find a way to fix it.
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
captureSession = AVCaptureSession()
captureSession?.sessionPreset = AVCaptureSession.Preset.hd1920x1080
let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
do {
let input = try AVCaptureDeviceInput(device: backCamera!)
if (captureSession?.canAddInput(input) != nil) {
captureSession?.addInput(input)
stillImageOutput = AVCapturePhotoOutput()
captureSession?.addOutput(stillImageOutput!)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspect
cameraView.layer.addSublayer(previewLayer!)
captureSession?.startRunning()
}
} catch {
print("Something is wrong with the camera")
}
}
func didPressTakePhoto() {
if let videoConnection = stillImageOutput?.connection(with: AVMediaType.video) {
videoConnection.videoOrientation = AVCaptureVideoOrientation.portrait
stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
(sampleBuffer, error) in
if sampleBuffer != nil {
var imageData = AVCaptureStillImageOutput.jsegStillImageNSDataRepresentation(sampleBuffer)
var dataProfider = CGDataProviderCreateWithCFData(imageData)
var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, kCGRenderingIntentDefault)
var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
self.tempImageView.image = image
self.tempImageView.isHidden = true
}
})
}
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
didPressTakePhoto()
}
I noticed that stillImageOutput is deprecated and now AVCapturePhotoOutput is used instead.
What is the proper way to write captureStillImageAsynchronously in 3.0 (with AVCapturePhotoOutput)?
I saw the other answers for this question and none of them works for me.
All I want is to capture a photo on tap(action) which previews the photo (where I can accept it and do other logic once I accept it).
let photoSettings = AVCapturePhotoSettings()
photoSettings.isAutoStillImageStabilizationEnabled = true
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.flashMode = .auto
stillImageOutput?.capturePhoto(with: photoSettings, delegate: self)
extension YourViewController: AVCapturePhotoCaptureDelegate {
func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, ..., error: Error?) {
guard error == nil, let photoSampleBuffer = photoSampleBuffer else { return }
guard let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSampleBuffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer) else { return }
let capturedImage = UIImage(data: imageData, scale: 1.0)
...
}
}
Typed in by hand, please forgive any typos.

how to save m4a file in userDefauls and to present it in a table view-ios swift 3

I have developed a recording and play the voice of the user and I need to save in in User Defaults and in the next screen to present all the recording data in a list view
my question is how to save m4a file? and how to present the data later on?
var soundRecorder : AVAudioRecorder!
var soundPlayer : AVAudioPlayer!
var audioFileName = "sound.m4a"
let arrayOfRecords = [""]
//recording issues
let recordSettings = [AVSampleRateKey : NSNumber(value: Float(44100.0) as Float),
AVFormatIDKey : NSNumber(value: Int32(kAudioFormatMPEG4AAC) as Int32),//change to 850k vs 25Mb at (kAudioFormatAppleLossless)
AVNumberOfChannelsKey : NSNumber(value: 1 as Int32),
AVEncoderAudioQualityKey : NSNumber(value: Int32(AVAudioQuality.high.rawValue) as Int32)]
override func viewDidLoad() {
super.viewDidLoad()
setupRecorder()
buttonPlay.isHidden = true
readableText()
//audio speaker not mic option
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSessionCategoryPlayAndRecord, with: AVAudioSessionCategoryOptions.defaultToSpeaker)
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
textViewTest.setContentOffset(CGPoint.zero, animated: false)
}
/////recording
#IBAction func ActionRecord(_ sender: AnyObject) {
if sender.titleLabel?!.text == "Record"{
buttonPlay.isHidden = false
if isTimerRunning == false{
}
sender.setTitle("Stop", for: UIControlState())
//stop recording + save
UserDefaults.standard.set(arrayOfRecords, forKey: "userRecord")
// defaults.object(forKey: "userRecord")//print
}
else{
print("record")
stopTimer()
soundRecorder.stop()
sender.setTitle("Record", for: UIControlState())
buttonPlay.isEnabled = true
}
}
//play record
#IBAction func ActionPlay(_ sender: AnyObject) {
if sender.titleLabel?!.text == "Play" {
butttonRecord.isEnabled = true
sender.setTitle("Stop", for: UIControlState())
preparePlayer()
soundPlayer.play()
}
else{
soundPlayer.stop()
sender.setTitle("Play", for: UIControlState())
}
}
// override func viewDidAppear(_ animated: Bool) {
// if let x = UserDefaults.standard.object(forKey: "userRecord") as? String{
// print("your record has been saved")
// }
// }
//HELPERS
func preparePlayer(){
do {
try soundPlayer = AVAudioPlayer(contentsOf: directoryURL()!)
soundPlayer.delegate = self
soundPlayer.prepareToPlay()
soundPlayer.volume = 1.0
} catch {
print("Error playing")
}
}
func setupRecorder(){
let audioSession:AVAudioSession = AVAudioSession.sharedInstance()
//ask for permission
if (audioSession.responds(to: #selector(AVAudioSession.requestRecordPermission(_:)))) {
AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in
if granted {
print("granted")
//set category and activate recorder session
do {
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try self.soundRecorder = AVAudioRecorder(url: self.directoryURL()!, settings: self.recordSettings)
self.soundRecorder.prepareToRecord()
} catch {
print("Error Recording");
}
}
})
}
}
func directoryURL() -> URL? {
let fileManager = FileManager.default
let urls = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
let documentDirectory = urls[0] as URL
let soundURL = documentDirectory.appendingPathComponent("sound.m4a")
return soundURL
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
buttonPlay.isEnabled = true
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
butttonRecord.isEnabled = true
buttonPlay.setTitle("Play", for: UIControlState())
}
}

How to playback recorded video?

This is an example of the type of video file i'm trying to play:
file:///private/var/mobile/Containers/Data/Application/7725BCCA-B709-48FB-8FE3-DBC9F4B679C0/tmp/9AD6A48E-6A25-4114-88D3-474A0E1C762F.mp4
I think it's recording but when I try to play the recorded video it's just a blank screen.
func startRecording() {
print("start")
if movieOutput.isRecording == false {
let connection = movieOutput.connection(withMediaType: AVMediaTypeVideo)
if (connection?.isVideoOrientationSupported)! {
connection?.videoOrientation = currentVideoOrientation()
}
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto
}
print(AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo))
let device = activeInput.device
if (device?.isSmoothAutoFocusSupported)! {
do {
try device?.lockForConfiguration()
device?.isSmoothAutoFocusEnabled = false
device?.unlockForConfiguration()
} catch {
print("Error setting configuration: \(error)")
}
}
outputURL = tempURL()
movieOutput.startRecording(toOutputFileURL: outputURL, recordingDelegate: self)
} else {
print("stop")
stopRecording()
}
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "showPreview" {
let previewVC = segue.destination as! ProfilePhotoPreviewViewController
previewVC.image = self.image
} else if segue.identifier == "showVideoPreview" {
let vc = segue.destination as! ProfilePhotoPreviewViewController
vc.videoURL = videoRecorded
}
}
}
extension TakePhotoViewController: AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
if (error != nil) {
print("Error recording movie: \(error!.localizedDescription)")
} else {
videoRecorded = outputURL! as URL
print(videoRecorded)
print("recorded")
performSegue(withIdentifier: "showVideoPreview", sender: nil)
}
}
}
This code works right here, I had the same problem as you. And I had to add the "mov" file path extension at the end of my file path. See below:
func recordVideo() {
let recordingDelegate: AVCaptureFileOutputRecordingDelegate! = self
let videoFileOutput = AVCaptureMovieFileOutput()
videoOutput = videoFileOutput
self.captureSession.addOutput(videoFileOutput)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filePath = documentsURL.appendingPathComponent("introVideo").appendingPathExtension("mov")
videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)
}
And then I took that url and passed it to my VideoPlayerController as so:
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
if error == nil {
let videoVC = PlayVideoController()
videoVC.url = outputFileURL!
self.navigationController?.pushViewController(videoVC, animated: true)
}
return
}
and then here is my code on VideoPlayerController to play the video that was just recorded:
class PlayVideoController: UIViewController {
var url: URL!
var player: AVPlayer!
var avPlayerLayer: AVPlayerLayer!
override func viewWillAppear(_ animated: Bool) {
if url != nil {
print(url)
player = AVPlayer(url: url)
let playerLayer = AVPlayerLayer(player: player)
self.view.layer.addSublayer(playerLayer)
playerLayer.frame = self.view.frame
player.play()
}
}
}
Hope this helps!