define video url as the uiview in your class - swift

My swift code should be able to take a snapshot of a video and then take that image and display in a uiimageview. Instead of using a online link I just want the url to be the uiview in my class.So the video url should be previewView not the https link that I have below. All the code below is in this class
import UIKit;import AVFoundation
class ViewController: UIViewController, AVCapturePhotoCaptureDelegate {
#IBOutlet var previewView : UIView!
#IBOutlet var captureImageView : UIImageView!
var captureSession: AVCaptureSession!
var stillImageOutput: AVCapturePhotoOutput!
var videoPreviewLayer: AVCaptureVideoPreviewLayer!
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
// Setup your camera here...
captureSession = AVCaptureSession()
captureSession.sessionPreset = .medium
guard let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
else {
print("Unable to access back camera!")
return
}
do {
let input = try AVCaptureDeviceInput(device: backCamera)
//Step 9
stillImageOutput = AVCapturePhotoOutput()
stillImageOutput = AVCapturePhotoOutput()
if captureSession.canAddInput(input) && captureSession.canAddOutput(stillImageOutput) {
captureSession.addInput(input)
captureSession.addOutput(stillImageOutput)
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
}
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer.videoGravity = .resizeAspect
videoPreviewLayer.connection?.videoOrientation = .portrait
previewView.layer.addSublayer(videoPreviewLayer)
//Step12
DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
self.captureSession.startRunning()
//Step 13
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.previewView.bounds
}
}
}
#IBAction func startRecord(_ sender: Any) {
}
#IBAction func Save(_ sender: Any) {
//what do I put in the 2 highlighted blocks
let videoURL = "https://www.youtube.com/watch?v=Txt25dw-lIk"
self.getThumbnailFromUrl(videoURL) { [weak self] (img) in
guard let _ = self else { return }
if let img = img {
self?.captureImageView.image = img
}
}
}
func getThumbnailFromUrl(_ url: String?, _ completion: #escaping ((_ image: UIImage?)->Void)) {
guard let url = URL(string: url ?? "") else { return }
DispatchQueue.main.async {
let asset = AVAsset(url: url)
let assetImgGenerate = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let time = CMTimeMake(value: 2, timescale: 1)
do {
let img = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
let thumbnail = UIImage(cgImage: img)
completion(thumbnail)
} catch {
print("Error :: ", error.localizedDescription)
completion(nil)
}
}
}
#IBAction func didTakePhoto(_ sender: Any) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
let image = UIImage(data: imageData)
captureImageView.image = image
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.captureSession.stopRunning()
}
}

Related

Realm list data in Swift is saving but not loading properly. New to this and not sure what the problem is. Code below

Below is my main view controller. The user selects images of clothing which are then categorized using CoreML and given a filename. Then, data is saved to Realm. When I call the function loadClothing(), the array is empty even though items were added during func detect. Any help is much appreciated!
import UIKit
import PhotosUI
import RealmSwift
import CoreML
import Vision
class ViewController: UIViewController, PHPickerViewControllerDelegate {
#IBOutlet weak var shoesImageView: UIImageView!
#IBOutlet weak var shirtImageView: UIImageView!
#IBOutlet weak var pantsImageView: UIImageView!
var documentsUrl: URL {
return FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
}
let realm = try! Realm()
var clothing: Results<Clothing>?
override func viewDidLoad() {
super.viewDidLoad()
loadClothing()
let clothingArray = Clothing()
print(clothingArray)
}
#IBAction func addClothesButton(_ sender: UIBarButtonItem) {
pickPhotos()
}
#IBAction func randomizeButton(_ sender: UIBarButtonItem) {
loadClothing()
let clothingArray = Clothing()
print(clothingArray)
shirtImageView.image = load(fileName: clothingArray.shirtImages.randomElement()!)
pantsImageView.image = load(fileName: clothingArray.pantsImages.randomElement()!)
shoesImageView.image = load(fileName: clothingArray.shoesImages.randomElement()!)
}
//MARK: - PHPickerViewController
#objc func pickPhotos() {
var config = PHPickerConfiguration()
config.selectionLimit = 25
config.filter = PHPickerFilter.images
let pickerViewController = PHPickerViewController(configuration: config)
pickerViewController.delegate = self
self.present(pickerViewController, animated: true, completion: nil)
}
// MARK: - PHPickerViewControllerDelegate
func picker(_ picker: PHPickerViewController, didFinishPicking results: [PHPickerResult]) {
picker.dismiss(animated: true, completion: nil)
for result in results {
result.itemProvider.loadObject(ofClass: UIImage.self) {(object, error) in
if let image = object as? UIImage {
DispatchQueue.main.async {
guard let fileName = result.itemProvider.suggestedName else {
fatalError("Could not retrieve file name.")
}
print(fileName)
guard let ciImage = CIImage(image: image) else {
fatalError("Could not convert to CI Image.")
}
self.detect(image: ciImage, fileName: fileName)
}
}
}
}
}
// MARK: - Core ML
func detect(image: CIImage, fileName: String) {
guard let model = try? VNCoreMLModel(for: ClothingClassifier(configuration: MLModelConfiguration()).model) else {
fatalError("Loading CoreML Model failed.")
}
let request = VNCoreMLRequest(model: model) { (request, error) in
guard let results = request.results as? [VNClassificationObservation] else {
fatalError("Model failed to process image.")
}
let newClothing = Clothing()
if let firstResult = results.first {
let uiImage = UIImage(ciImage: image)
if firstResult.identifier.contains("shirts") {
newClothing.shirtImages.append(fileName)
} else if firstResult.identifier.contains("pants"){
newClothing.pantsImages.append(fileName)
} else if firstResult.identifier.contains("shoes") {
newClothing.shoesImages.append(fileName)
}
self.save(clothing: newClothing)
print(newClothing)
}
}
let handler = VNImageRequestHandler(ciImage: image)
do {
try handler.perform([request])
}
catch {
print(error)
}
}
// MARK: - Data Manipulation Methods
func save(clothing: Clothing) {
do {
try realm.write {
realm.add(clothing)
}
} catch {
print("Error saving uploaded clothing. \(error)")
}
}
func loadClothing() {
clothing = realm.objects(Clothing.self)
print("loaded")
}
private func load(fileName: String) -> UIImage? {
let fileURL = documentsUrl.appendingPathComponent(fileName)
do {
let imageData = try Data(contentsOf: fileURL)
return UIImage(data: imageData)
} catch {
print("Error loading image : \(error)")
}
return nil
}
}
Clothing Class
import Foundation
import RealmSwift
class Clothing: Object {
let shirtImages = List<String>()
let pantsImages = List<String>()
let shoesImages = List<String>()
}

How to use a stored url from Firebase Database as an image in an UIImageView

I'm new to coding and trying to build an iOS App. I am storing images uploaded by users into my firebase storage and then saving the URL as a string ("https//.....). I am able to get a snapshot to show up in project terminal after I use print(snapshot). It prints, snap (profileImageUrl) https://firebasestorage.... How do I use this snapshot to get the ImageView to show the profile picture most recently saved?
import UIKit
import Firebase
import SDWebImage
class EditProfileViewController: UIViewController {
#IBOutlet weak var ProfileImage: UIImageView!
var selectedImage: UIImage?
var ref:DatabaseReference?
var databaseHandle:DatabaseHandle = 0
var postProfileImage = [String]()
let dbref = Database.database().reference()
let uid = Auth.auth().currentUser?.uid
override func viewDidLoad() {
super.viewDidLoad()
self.ref?.child("users").child(Auth.auth().currentUser!.uid).child("profileImageUrl").observe(.value, with: { (snapshot) in
print(snapshot)
})
ProfileImage.layer.borderWidth = 3.0
ProfileImage.layer.masksToBounds = false
ProfileImage.layer.borderColor = UIColor.white.cgColor
ProfileImage.layer.cornerRadius = ProfileImage.frame.size.width / 2
ProfileImage.clipsToBounds = true
let tapGesture = UITapGestureRecognizer(target: self, action: #selector(EditProfileViewController.handleSelectProfileImageView))
ProfileImage.addGestureRecognizer(tapGesture)
ProfileImage.isUserInteractionEnabled = true
}
#objc func handleSelectProfileImageView() {
let pickerController = UIImagePickerController()
pickerController.delegate = self
present(pickerController, animated: true, completion: nil)
}
#IBAction func Cancel(_ sender: UIBarButtonItem) {
dismiss(animated: true, completion: nil)
}
let user = Auth.auth().currentUser
let fileData = NSData()
#IBAction func DoneButton(_ sender: UIBarButtonItem) {
guard let imageSelected = self.ProfileImage.image else {
print ("Avatar is nil")
return
}
var dict: Dictionary<String, Any> = [
"profileImageUrl": "",
]
guard let imageData = imageSelected.jpegData(compressionQuality: 0.4) else {
return
}
let storageRef = Storage.storage().reference(forURL: "(I have my storage url here")
let imageName = NSUUID().uuidString
let storageProfileRef = storageRef.child("Profile_Images").child(Auth.auth().currentUser!.uid).child("\(imageName).png")
let metadata = StorageMetadata()
metadata.contentType = "image/jpeg"
storageProfileRef.putData(imageData, metadata: metadata, completion:
{ (StorageMetadata, error) in
if (error != nil) {
return
}
storageProfileRef.downloadURL { (url, error) in
if let metaImageUrl = url?.absoluteString {
dict["profileImageUrl"] = metaImageUrl
Database.database().reference().child("users").child(Auth.auth().currentUser!.uid).updateChildValues(dict, withCompletionBlock: {
(error, ref) in
if error == nil {
print("Done")
}
}
)
}
}
})
dismiss(animated: true, completion: nil)
}
}
extension EditProfileViewController: UIImagePickerControllerDelegate, UINavigationControllerDelegate {
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
//print("did Finish Picking Media")
if let image = info[UIImagePickerController.InfoKey(rawValue: "UIImagePickerControllerOriginalImage")] as? UIImage{
selectedImage = image
ProfileImage.image = image
}
dismiss(animated: true, completion: nil)
}
}
I could really use some help!
You can add an extension to UIImageView as below:
extension UIImageView {
func load(url: URL, onLoadCompletion: ((_ isImageLoaded: Bool) -> Void)? = nil) {
self.image = nil
DispatchQueue.global().async { [weak self] in
if let data = try? Data(contentsOf: url) {
if let image = UIImage(data: data) {
DispatchQueue.main.async {
self?.image = image
onLoadCompletion?(true)
}
} else {
onLoadCompletion?(false)
}
} else {
onLoadCompletion?(false)
}
}
}
}
Assuming your image view outlet is something like this:
#IBOutlet weak var imageView: UIImageView!
Below is the usage when adding a loader:
if let url = URL(string: "https://firebase-storage-url") {
// show a loader here if needed
imageView.load(url: url) { (imageLoaded) in
if imageLoaded {
// hide loader
} else {
// show a place holder image
// hide loader
}
}
} else {
// show a default image
}
Below is the usage without any extra work and just loading the image:
if let url = URL(string: "https://firebase-storage-url") {
imageView.load(url: url)
}

func captureOutput is never called

Add like to add filter to each frame i record in real time and display the filtered image in UIImageView, if anyone could help it would be nice.
but captureoutput is never called, here is my code.
class Measurement: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate {
#IBOutlet weak var cameraPreview: UIView!
#IBOutlet weak var imageView: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
setupCameraSession()
toggleTorch(on: true)
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
view.layer.addSublayer(previewLayer)
cameraSession.startRunning()
}
lazy var cameraSession: AVCaptureSession = {
let s = AVCaptureSession()
s.sessionPreset = AVCaptureSession.Preset.low
return s
}()
lazy var previewLayer: AVCaptureVideoPreviewLayer = {
let preview = AVCaptureVideoPreviewLayer(session: self.cameraSession)
preview.position = CGPoint(x:182,y: 485)
preview.videoGravity = AVLayerVideoGravity.resizeAspectFill
preview.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
preview.bounds = imageView.bounds
//preview.position = CGPoint(x:self.view.bounds.midX,y: self.view.bounds.midY)
imageView.layer.addSublayer(preview)
return preview
}()
func toggleTorch(on: Bool) {
guard let device = AVCaptureDevice.default(for: .video) else { return }
if device.hasTorch {
do {
try device.lockForConfiguration()
if on == true {
device.torchMode = .on
} else {
device.torchMode = .off
}
device.unlockForConfiguration()
} catch {
print("Torch could not be used")
}
} else {
print("Torch is not available")
}
}
func setupCameraSession() {
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
do {
let deviceInput = try AVCaptureDeviceInput(device: captureDevice!)
cameraSession.beginConfiguration()
if (cameraSession.canAddInput(deviceInput) == true) {
cameraSession.addInput(deviceInput)
print("Processing Data.")
}
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)] as [String : AnyObject]
dataOutput.alwaysDiscardsLateVideoFrames = true
print("Processing Data.")
if (cameraSession.canAddOutput(dataOutput) == true) {
cameraSession.addOutput(dataOutput)
print("Processing Data.")
}
cameraSession.commitConfiguration()
let queue = DispatchQueue(label: "com.invasivecode.videoQueue")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
catch let error as NSError {
print("\(error), \(error.localizedDescription)")
}
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
print("Processing Data.")
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
//let chromaKeyFilter = colorCubeFilterForChromaKey(hueAngle: 120)
let ciImage = CIImage(cvPixelBuffer: imageBuffer)
let context = CIContext()
guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return }
let image = UIImage(cgImage: cgImage)
if let chromaKeyFilter = CIFilter(name: "CISepiaTone") {
let beginImage = CIImage(image: image)
chromaKeyFilter.setValue(beginImage, forKey: kCIInputImageKey)
chromaKeyFilter.setValue(0.5, forKey: kCIInputIntensityKey)
if let output = chromaKeyFilter.outputImage {
if let cgimg = context.createCGImage(output, from: output.extent) {
let processedImage = UIImage(cgImage: cgimg)
// do something interesting with the processed image
imageView.image = processedImage
}
}
}
}
func captureOutput(_ captureOutput: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// Here you can count how many frames are dopped
}
func startCapture() {
print("\(self.classForCoder)/" + #function)
if cameraSession.isRunning {
print("already running")
return
}
cameraSession.startRunning()
toggleTorch(on: true)
}
You need to set the delegate
dataOutput.sampleBufferDelegate = self

Cannot capture photo with custom camera

I am trying to display an image taken from my custom camera (from a snapchat-like menu where you swipe right to open the camera on the left). I updated the code to 3.0 but now it gives me this error:
'AVCapturePhotoOutput' has no member 'captureStillImageAsynchronouslyFromConnection', and I can't find a way to fix it.
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
captureSession = AVCaptureSession()
captureSession?.sessionPreset = AVCaptureSession.Preset.hd1920x1080
let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
do {
let input = try AVCaptureDeviceInput(device: backCamera!)
if (captureSession?.canAddInput(input) != nil) {
captureSession?.addInput(input)
stillImageOutput = AVCapturePhotoOutput()
captureSession?.addOutput(stillImageOutput!)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspect
cameraView.layer.addSublayer(previewLayer!)
captureSession?.startRunning()
}
} catch {
print("Something is wrong with the camera")
}
}
func didPressTakePhoto() {
if let videoConnection = stillImageOutput?.connection(with: AVMediaType.video) {
videoConnection.videoOrientation = AVCaptureVideoOrientation.portrait
stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
(sampleBuffer, error) in
if sampleBuffer != nil {
var imageData = AVCaptureStillImageOutput.jsegStillImageNSDataRepresentation(sampleBuffer)
var dataProfider = CGDataProviderCreateWithCFData(imageData)
var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, kCGRenderingIntentDefault)
var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
self.tempImageView.image = image
self.tempImageView.isHidden = true
}
})
}
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
didPressTakePhoto()
}
I noticed that stillImageOutput is deprecated and now AVCapturePhotoOutput is used instead.
What is the proper way to write captureStillImageAsynchronously in 3.0 (with AVCapturePhotoOutput)?
I saw the other answers for this question and none of them works for me.
All I want is to capture a photo on tap(action) which previews the photo (where I can accept it and do other logic once I accept it).
let photoSettings = AVCapturePhotoSettings()
photoSettings.isAutoStillImageStabilizationEnabled = true
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.flashMode = .auto
stillImageOutput?.capturePhoto(with: photoSettings, delegate: self)
extension YourViewController: AVCapturePhotoCaptureDelegate {
func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, ..., error: Error?) {
guard error == nil, let photoSampleBuffer = photoSampleBuffer else { return }
guard let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSampleBuffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer) else { return }
let capturedImage = UIImage(data: imageData, scale: 1.0)
...
}
}
Typed in by hand, please forgive any typos.

AVPlayer is not working when I'm trying to use with this YTVimeoExtractor

When I try to play the video through the AVPlayer, the video loads for some time(the loading symbol appears at the top of the player) then suddenly it stops and the play icon with a crossover is shown. Don't know what is wrong? I can get the video informations but I can't make the video to play.
I will show what I have done. Could anyone answer my question, help is much appreciated.
import UIKit
import AVKit
import AVFoundation
class ViewController: UIViewController {
var playerVC : AVPlayerViewController!
var playerItem : AVPlayerItem!
var player : AVPlayer!
var playerLayer: AVPlayerLayer!
#IBOutlet weak var videoURL: UITextField!
#IBOutlet weak var videoTitle: UILabel!
#IBAction func playVideo(sender: AnyObject) {
YTVimeoExtractor.sharedExtractor().fetchVideoWithVimeoURL(self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
// https://vimeo.com/165891648
self.videoTitle.text = video?.title
print("hello: \(self.videoTitle.text)")
let url = NSURL(string: self.videoURL.text!)
// let url = NSURL.init(fileURLWithPath: self.videoURL.text!)
self.playerItem = AVPlayerItem.init(URL: url!)
self.player = AVPlayer.init(playerItem: self.playerItem)
self.playerVC = AVPlayerViewController.init();
self.playerVC.player = self.player;
self.player.currentItem!.playbackLikelyToKeepUp
self.presentViewController(self.playerVC, animated: true) { () -> Void in
self.playerVC.player?.play()
}
}else {
let alert = UIAlertController(title: error!.localizedDescription, message: error!.localizedFailureReason, preferredStyle: UIAlertControllerStyle.Alert)
alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Default, handler: nil))
self.presentViewController(alert, animated: true, completion: nil)
}
})
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
}
override func viewDidAppear(animated: Bool) {
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
This is the one I get on the simulator.
I used this https://github.com/lilfaf/YTVimeoExtractor one to do this sample project. You can try it out and let me know.
I had the same issue and fix using this way:
#IBAction func btnDownload_touchUpInside(_ sender: UIButton) {
YTVimeoExtractor.shared().fetchVideo(withVimeoURL: self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
self.videoTitle.text = video?.title
if let streamUrls = video?.streamURLs
{
var streamURL: String?
var streams : [String:String] = [:]
for (key,value) in streamUrls {
streams["\(key)"] = "\(value)"
print("\(key) || \(value)")
}
if let large = streams["720"]
{
streamURL = large
}
else if let high = streams["480"]
{
streamURL = high
}
else if let medium = streams["360"]
{
streamURL = medium
}
else if let low = streams["270"]
{
streamURL = low
}
if let url = streamURL
{
Alamofire.download(url, to: { (temporaryURL, response) -> (destinationURL: URL, options: DownloadRequest.DownloadOptions) in
if let directoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as? NSURL {
let fileName = response.suggestedFilename!
let finalPath = directoryURL.appendingPathComponent(fileName)
self.downloadedVideoPath = finalPath?.absoluteString
return (finalPath!, DownloadRequest.DownloadOptions(rawValue: 2))
}
}).downloadProgress(closure: { (progress) in
print("Progress: \(progress.fractionCompleted)")
})
}
}
}
})
}
#IBAction func btnPlayOffLine_touchUpInside(_ sender: UIButton) {
YTVimeoExtractor.shared().fetchVideo(withVimeoURL: self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
let videoURL = NSURL(string: self.downloadedVideoPath!)
let player = AVPlayer(url: videoURL! as URL)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
self.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
}
})
}
#IBAction func btnPlayOnLine_touchUpInside(_ sender: UIButton) {
YTVimeoExtractor.shared().fetchVideo(withVimeoURL: self.videoURL.text!, withReferer: nil, completionHandler: {(video, error) -> Void in
if video != nil {
self.videoTitle.text = video?.title
if let streamUrls = video?.streamURLs
{
var streamURL: String?
var streams : [String:String] = [:]
for (key,value) in streamUrls {
streams["\(key)"] = "\(value)"
print("\(key) || \(value)")
}
if let large = streams["720"]
{
streamURL = large
}
else if let high = streams["480"]
{
streamURL = high
}
else if let medium = streams["360"]
{
streamURL = medium
}
else if let low = streams["270"]
{
streamURL = low
}
if let url = streamURL
{
let videoURL = NSURL(string: url)
let player = AVPlayer(url: videoURL! as URL)
let playerViewController = AVPlayerViewController()
playerViewController.player = player
self.present(playerViewController, animated: true) {
playerViewController.player!.play()
}
}
}
}
})
}
}