How to playback recorded video? - swift

This is an example of the type of video file i'm trying to play:
file:///private/var/mobile/Containers/Data/Application/7725BCCA-B709-48FB-8FE3-DBC9F4B679C0/tmp/9AD6A48E-6A25-4114-88D3-474A0E1C762F.mp4
I think it's recording but when I try to play the recorded video it's just a blank screen.
func startRecording() {
print("start")
if movieOutput.isRecording == false {
let connection = movieOutput.connection(withMediaType: AVMediaTypeVideo)
if (connection?.isVideoOrientationSupported)! {
connection?.videoOrientation = currentVideoOrientation()
}
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto
}
print(AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo))
let device = activeInput.device
if (device?.isSmoothAutoFocusSupported)! {
do {
try device?.lockForConfiguration()
device?.isSmoothAutoFocusEnabled = false
device?.unlockForConfiguration()
} catch {
print("Error setting configuration: \(error)")
}
}
outputURL = tempURL()
movieOutput.startRecording(toOutputFileURL: outputURL, recordingDelegate: self)
} else {
print("stop")
stopRecording()
}
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "showPreview" {
let previewVC = segue.destination as! ProfilePhotoPreviewViewController
previewVC.image = self.image
} else if segue.identifier == "showVideoPreview" {
let vc = segue.destination as! ProfilePhotoPreviewViewController
vc.videoURL = videoRecorded
}
}
}
extension TakePhotoViewController: AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
if (error != nil) {
print("Error recording movie: \(error!.localizedDescription)")
} else {
videoRecorded = outputURL! as URL
print(videoRecorded)
print("recorded")
performSegue(withIdentifier: "showVideoPreview", sender: nil)
}
}
}

This code works right here, I had the same problem as you. And I had to add the "mov" file path extension at the end of my file path. See below:
func recordVideo() {
let recordingDelegate: AVCaptureFileOutputRecordingDelegate! = self
let videoFileOutput = AVCaptureMovieFileOutput()
videoOutput = videoFileOutput
self.captureSession.addOutput(videoFileOutput)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filePath = documentsURL.appendingPathComponent("introVideo").appendingPathExtension("mov")
videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)
}
And then I took that url and passed it to my VideoPlayerController as so:
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
if error == nil {
let videoVC = PlayVideoController()
videoVC.url = outputFileURL!
self.navigationController?.pushViewController(videoVC, animated: true)
}
return
}
and then here is my code on VideoPlayerController to play the video that was just recorded:
class PlayVideoController: UIViewController {
var url: URL!
var player: AVPlayer!
var avPlayerLayer: AVPlayerLayer!
override func viewWillAppear(_ animated: Bool) {
if url != nil {
print(url)
player = AVPlayer(url: url)
let playerLayer = AVPlayerLayer(player: player)
self.view.layer.addSublayer(playerLayer)
playerLayer.frame = self.view.frame
player.play()
}
}
}
Hope this helps!

Related

define video url as the uiview in your class

My swift code should be able to take a snapshot of a video and then take that image and display in a uiimageview. Instead of using a online link I just want the url to be the uiview in my class.So the video url should be previewView not the https link that I have below. All the code below is in this class
import UIKit;import AVFoundation
class ViewController: UIViewController, AVCapturePhotoCaptureDelegate {
#IBOutlet var previewView : UIView!
#IBOutlet var captureImageView : UIImageView!
var captureSession: AVCaptureSession!
var stillImageOutput: AVCapturePhotoOutput!
var videoPreviewLayer: AVCaptureVideoPreviewLayer!
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
// Setup your camera here...
captureSession = AVCaptureSession()
captureSession.sessionPreset = .medium
guard let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
else {
print("Unable to access back camera!")
return
}
do {
let input = try AVCaptureDeviceInput(device: backCamera)
//Step 9
stillImageOutput = AVCapturePhotoOutput()
stillImageOutput = AVCapturePhotoOutput()
if captureSession.canAddInput(input) && captureSession.canAddOutput(stillImageOutput) {
captureSession.addInput(input)
captureSession.addOutput(stillImageOutput)
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
}
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer.videoGravity = .resizeAspect
videoPreviewLayer.connection?.videoOrientation = .portrait
previewView.layer.addSublayer(videoPreviewLayer)
//Step12
DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
self.captureSession.startRunning()
//Step 13
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.previewView.bounds
}
}
}
#IBAction func startRecord(_ sender: Any) {
}
#IBAction func Save(_ sender: Any) {
//what do I put in the 2 highlighted blocks
let videoURL = "https://www.youtube.com/watch?v=Txt25dw-lIk"
self.getThumbnailFromUrl(videoURL) { [weak self] (img) in
guard let _ = self else { return }
if let img = img {
self?.captureImageView.image = img
}
}
}
func getThumbnailFromUrl(_ url: String?, _ completion: #escaping ((_ image: UIImage?)->Void)) {
guard let url = URL(string: url ?? "") else { return }
DispatchQueue.main.async {
let asset = AVAsset(url: url)
let assetImgGenerate = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let time = CMTimeMake(value: 2, timescale: 1)
do {
let img = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
let thumbnail = UIImage(cgImage: img)
completion(thumbnail)
} catch {
print("Error :: ", error.localizedDescription)
completion(nil)
}
}
}
#IBAction func didTakePhoto(_ sender: Any) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
let image = UIImage(data: imageData)
captureImageView.image = image
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.captureSession.stopRunning()
}
}

How do I save a captured video directly to Firebase vs locally?

I built an app that uses a custom camera to record video and take photos. I'm successfully saving the photos to Firebase Storage, but I can't figure out how to access the video to save it there as well.
Here's my code:
recordViewController
func setupCaptureSession() {
captureSession.sessionPreset = AVCaptureSession.Preset.high
}
func setupDevice() {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let devices = deviceDiscoverySession.devices
for device in devices {
if device.position == AVCaptureDevice.Position.back {
backCamera = device
} else if device.position == AVCaptureDevice.Position.front {
frontCamera = device
}
}
currentCamera = backCamera
}
func setupInputOutput() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
captureSession.addInput(captureDeviceInput)
videoFileOutput = AVCaptureMovieFileOutput()
captureSession.addOutput(videoFileOutput!)
} catch {
print(error)
}
}
func setupPreviewLayer() {
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraPreviewLayer?.frame = self.view.frame
self.view.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}
func startRunningCaptureSession() {
captureSession.startRunning()
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "playVideo" {
let videoPlayerViewController = segue.destination as! AVPlayerViewController
let videoFileURL = sender as! URL
videoPlayerViewController.player = AVPlayer(url: videoFileURL)
}
}
// MARK: - AVCaptureFileOutputRecordingDelegate methods
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if error != nil {
print(error)
return
}
performSegue(withIdentifier: "playVideo", sender: outputFileURL)
}
// Actions
#IBAction func recordButtonTapped(_ sender: Any) {
if !isRecording {
isRecording = true
let outputPath = NSTemporaryDirectory() + "output.mov"
let outputFileURL = URL(fileURLWithPath: outputPath)
videoFileOutput?.startRecording(to: outputFileURL, recordingDelegate: self)
recordButton.isSelected = true
} else {
isRecording = false
recordButton.isSelected = false
recordButton.layer.removeAllAnimations()
videoFileOutput?.stopRecording()
}
}
Once the video has been recorded, it currently saves it locally to a temporary location. I'm not versed in how to find the current location path and changing it the Firebase Storage path.

how to save m4a file in userDefauls and to present it in a table view-ios swift 3

I have developed a recording and play the voice of the user and I need to save in in User Defaults and in the next screen to present all the recording data in a list view
my question is how to save m4a file? and how to present the data later on?
var soundRecorder : AVAudioRecorder!
var soundPlayer : AVAudioPlayer!
var audioFileName = "sound.m4a"
let arrayOfRecords = [""]
//recording issues
let recordSettings = [AVSampleRateKey : NSNumber(value: Float(44100.0) as Float),
AVFormatIDKey : NSNumber(value: Int32(kAudioFormatMPEG4AAC) as Int32),//change to 850k vs 25Mb at (kAudioFormatAppleLossless)
AVNumberOfChannelsKey : NSNumber(value: 1 as Int32),
AVEncoderAudioQualityKey : NSNumber(value: Int32(AVAudioQuality.high.rawValue) as Int32)]
override func viewDidLoad() {
super.viewDidLoad()
setupRecorder()
buttonPlay.isHidden = true
readableText()
//audio speaker not mic option
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSessionCategoryPlayAndRecord, with: AVAudioSessionCategoryOptions.defaultToSpeaker)
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
textViewTest.setContentOffset(CGPoint.zero, animated: false)
}
/////recording
#IBAction func ActionRecord(_ sender: AnyObject) {
if sender.titleLabel?!.text == "Record"{
buttonPlay.isHidden = false
if isTimerRunning == false{
}
sender.setTitle("Stop", for: UIControlState())
//stop recording + save
UserDefaults.standard.set(arrayOfRecords, forKey: "userRecord")
// defaults.object(forKey: "userRecord")//print
}
else{
print("record")
stopTimer()
soundRecorder.stop()
sender.setTitle("Record", for: UIControlState())
buttonPlay.isEnabled = true
}
}
//play record
#IBAction func ActionPlay(_ sender: AnyObject) {
if sender.titleLabel?!.text == "Play" {
butttonRecord.isEnabled = true
sender.setTitle("Stop", for: UIControlState())
preparePlayer()
soundPlayer.play()
}
else{
soundPlayer.stop()
sender.setTitle("Play", for: UIControlState())
}
}
// override func viewDidAppear(_ animated: Bool) {
// if let x = UserDefaults.standard.object(forKey: "userRecord") as? String{
// print("your record has been saved")
// }
// }
//HELPERS
func preparePlayer(){
do {
try soundPlayer = AVAudioPlayer(contentsOf: directoryURL()!)
soundPlayer.delegate = self
soundPlayer.prepareToPlay()
soundPlayer.volume = 1.0
} catch {
print("Error playing")
}
}
func setupRecorder(){
let audioSession:AVAudioSession = AVAudioSession.sharedInstance()
//ask for permission
if (audioSession.responds(to: #selector(AVAudioSession.requestRecordPermission(_:)))) {
AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in
if granted {
print("granted")
//set category and activate recorder session
do {
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try self.soundRecorder = AVAudioRecorder(url: self.directoryURL()!, settings: self.recordSettings)
self.soundRecorder.prepareToRecord()
} catch {
print("Error Recording");
}
}
})
}
}
func directoryURL() -> URL? {
let fileManager = FileManager.default
let urls = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
let documentDirectory = urls[0] as URL
let soundURL = documentDirectory.appendingPathComponent("sound.m4a")
return soundURL
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
buttonPlay.isEnabled = true
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
butttonRecord.isEnabled = true
buttonPlay.setTitle("Play", for: UIControlState())
}
}

Swift capture video to different paths

I'm trying to capture a video using swift. I want to capture the video in a new file every x seconds.
Here is my current code:
var i = 0
override func viewDidLoad() {
super.viewDidLoad()
if setupSession() {
setupPreview()
startSession()
}
}
func setupSession() -> Bool {
...
return true
}
func startSession() {
...
}
func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
i = i+1
old_cur_videoID = cur_videoID_system
cur_videoID_system = "\(cur_videoID)_\(i).mp4"
let path = directory.appendingPathComponent(cur_videoID_system)
return URL(fileURLWithPath: path)
}
return nil
}
func startRecording() {
let delayTime = DispatchTime.now() + 2
DispatchQueue.main.asyncAfter(deadline: delayTime) {
self.startRecording()
}
outputURL = tempURL()
movieOutput.startRecording(toOutputFileURL: outputURL, recordingDelegate: self)
}
And the two delegate functions:
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
if (error != nil) {
print("Error recording movie: \(error!.localizedDescription)")
} else {
if(i > 1){
saveOnServer()
}
}
outputURL = nil
}
If I try to run the Code, I get the following Error-Message:
Error recording movie: Movie recording cannot be started
Can somebody help me with my problem? If more code is necessary or some parts are unclear, please feel free to ask.
Thanks a lot!

AVPlayer is not working when I'm trying to use with this YTVimeoExtractor

When I try to play the video through the AVPlayer, the video loads for some time(the loading symbol appears at the top of the player) then suddenly it stops and the play icon with a crossover is shown. Don't know what is wrong? I can get the video informations but I can't make the video to play.
I will show what I have done. Could anyone answer my question, help is much appreciated.
import UIKit
import AVKit
import AVFoundation
class ViewController: UIViewController {
var playerVC : AVPlayerViewController!
var playerItem : AVPlayerItem!
var player : AVPlayer!
var playerLayer: AVPlayerLayer!
#IBOutlet weak var videoURL: UITextField!
#IBOutlet weak var videoTitle: UILabel!
#IBAction func playVideo(sender: AnyObject) {
YTVimeoExtractor.sharedExtractor().fetchVideoWithVimeoURL(self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
// https://vimeo.com/165891648
self.videoTitle.text = video?.title
print("hello: \(self.videoTitle.text)")
let url = NSURL(string: self.videoURL.text!)
// let url = NSURL.init(fileURLWithPath: self.videoURL.text!)
self.playerItem = AVPlayerItem.init(URL: url!)
self.player = AVPlayer.init(playerItem: self.playerItem)
self.playerVC = AVPlayerViewController.init();
self.playerVC.player = self.player;
self.player.currentItem!.playbackLikelyToKeepUp
self.presentViewController(self.playerVC, animated: true) { () -> Void in
self.playerVC.player?.play()
}
}else {
let alert = UIAlertController(title: error!.localizedDescription, message: error!.localizedFailureReason, preferredStyle: UIAlertControllerStyle.Alert)
alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Default, handler: nil))
self.presentViewController(alert, animated: true, completion: nil)
}
})
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
}
override func viewDidAppear(animated: Bool) {
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
This is the one I get on the simulator.
I used this https://github.com/lilfaf/YTVimeoExtractor one to do this sample project. You can try it out and let me know.
I had the same issue and fix using this way:
#IBAction func btnDownload_touchUpInside(_ sender: UIButton) {
YTVimeoExtractor.shared().fetchVideo(withVimeoURL: self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
self.videoTitle.text = video?.title
if let streamUrls = video?.streamURLs
{
var streamURL: String?
var streams : [String:String] = [:]
for (key,value) in streamUrls {
streams["\(key)"] = "\(value)"
print("\(key) || \(value)")
}
if let large = streams["720"]
{
streamURL = large
}
else if let high = streams["480"]
{
streamURL = high
}
else if let medium = streams["360"]
{
streamURL = medium
}
else if let low = streams["270"]
{
streamURL = low
}
if let url = streamURL
{
Alamofire.download(url, to: { (temporaryURL, response) -> (destinationURL: URL, options: DownloadRequest.DownloadOptions) in
if let directoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as? NSURL {
let fileName = response.suggestedFilename!
let finalPath = directoryURL.appendingPathComponent(fileName)
self.downloadedVideoPath = finalPath?.absoluteString
return (finalPath!, DownloadRequest.DownloadOptions(rawValue: 2))
}
}).downloadProgress(closure: { (progress) in
print("Progress: \(progress.fractionCompleted)")
})
}
}
}
})
}
#IBAction func btnPlayOffLine_touchUpInside(_ sender: UIButton) {
YTVimeoExtractor.shared().fetchVideo(withVimeoURL: self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
let videoURL = NSURL(string: self.downloadedVideoPath!)
let player = AVPlayer(url: videoURL! as URL)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
self.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
}
})
}
#IBAction func btnPlayOnLine_touchUpInside(_ sender: UIButton) {
YTVimeoExtractor.shared().fetchVideo(withVimeoURL: self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
self.videoTitle.text = video?.title
if let streamUrls = video?.streamURLs
{
var streamURL: String?
var streams : [String:String] = [:]
for (key,value) in streamUrls {
streams["\(key)"] = "\(value)"
print("\(key) || \(value)")
}
if let large = streams["720"]
{
streamURL = large
}
else if let high = streams["480"]
{
streamURL = high
}
else if let medium = streams["360"]
{
streamURL = medium
}
else if let low = streams["270"]
{
streamURL = low
}
if let url = streamURL
{
let videoURL = NSURL(string: url)
let player = AVPlayer(url: videoURL! as URL)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
self.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
}
}
}
})
}
}