//MARK:- setting player
fileprivate func setPlayRemoteUrl() {
if playUrl.isEmpty { return }
let asset = AVAsset(url: URL(string: playUrl)!)
playerItem = AVPlayerItem(asset: asset)
player = AVPlayer(playerItem: playerItem)
playerLayer = AVPlayerLayer(player: player)
playerLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
playerLayer?.contentsScale = UIScreen.main.scale
layer.insertSublayer(playerLayer!, at: 0)
}
How to add signed cookies in URL of streaming video?
Related
I have a problem when playing the HLS stream with a rate > 2.0 on iOS 15.x
I try this code:
let strURL = "https://multiplatform-f.akamaihd.net/i/multi/will/bunny/big_buck_bunny_,640x360_400,640x360_700,640x360_1000,950x540_1500,.f4v.csmil/master.m3u8"
if let url = URL(string: strURL) {
let asset = AVAsset(url: url)
let playerItem = AVPlayerItem(asset: asset)
player = AVPlayer(playerItem: playerItem)
let layer = AVPlayerLayer(player: player)
layer.frame = viewPlaying.bounds
viewPlaying.layer.addSublayer(layer)
player?.playImmediately(atRate: 4.0)
}
It wasn't playing on iOS 15.x, if I change to rate <= 2.0, it's normal. How can I fix this issue?
I have a AVPlayer which I set in this way:
fileprivate func setUpPlayer(_ videoURL: String, completion: #escaping () -> Void) {
let url = URL(string: videoURL)
self.player = AVPlayer(url: url!)
self.playerLayer = AVPlayerLayer(player: player)
let audioSession = AVAudioSession.sharedInstance()
playerLayer.frame = view.bounds
playerLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(playerLayer)
player.volume = 1
player.play()
player.actionAtItemEnd = .none
}
I want to put a UIView which contains textViews and drawings over it but I don't know how to do it.
The ideal would be something similar to UIGraphicsGetImageFromCurrentImageContext() with images
Please help me with AVPlayer. I need resize video when iPhone rotate (landscape orientation). Here is my code:
var player: AVPlayer?
override func viewDidLoad() {
super.viewDidLoad()
// Load the video from the app bundle.
let videoURL: NSURL = NSBundle.mainBundle().URLForResource("bg_animate", withExtension: "3gp")!
player = AVPlayer(URL: videoURL)
player?.actionAtItemEnd = .None
player?.muted = true
let playerLayer = AVPlayerLayer(player: player)
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
playerLayer.zPosition = -1
playerLayer.frame = view.frame
view.layer.addSublayer(playerLayer)
player?.play()
//loop video
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(ViewController.loopVideo), name: AVPlayerItemDidPlayToEndTimeNotification, object: nil)
}
But string:
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
doesnt work.
I also set value as AVLayerVideoGravityResize and AVLayerVideoGravityResizeAspect, but had similar results...
I'm trying to enable slow playback rate for a video I'm playing using AVPlayerLayer. To do so the documentation states it will enable rate in the range of 0.0 - 1.0 if the AVPlayerItem returns true for canPlaySlowForward. This property is readonly, so you need to subclass AVPlayerItem and override this property to accomplish this. I've done that, but the video still does not playing at a slow rate, it's always the 1.0 rate. It never even calls the canPlaySlowForward property. Why is this?
import AVFoundation
class SlowMoPlayerItem: AVPlayerItem {
override var canPlaySlowForward: Bool {
return true
}
}
Playing the video:
let asset = AVAsset(URL: NSBundle.mainBundle().URLForResource("some-video", withExtension: "mp4")!)
let playerItem = SlowMoPlayerItem(asset: asset)
let player = AVPlayer(playerItem: playerItem)
player.rate = 0.5
player.muted = true
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.view.layer.bounds
self.view.layer.addSublayer(playerLayer)
player.play()
The only other thing I do related to video/audio is prevent it from stopping background audio, via:
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryAmbient)
}
The video file does not include an audio track but this was still necessary to include.
Set the rate after calling play(). No need to subclass AVPlayerItem.
func playVideo() {
let asset = AVAsset(URL: NSBundle.mainBundle().URLForResource("SampleVideo", withExtension: "mp4")!)
let playerItem = AVPlayerItem(asset: asset)
let player = AVPlayer(playerItem: playerItem)
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.view.bounds
self.view.layer.addSublayer(playerLayer)
player.play()
player.rate = 0.5
}
I added list of AVQueuePlayer to UITableViewCell. I added array of videos to AVQueuePlayer as follows:
if let urls = delegate.arrayVideofeedItemsList[indexPath.row] as? NSArray
{
let player = AVQueuePlayer()
for currentVideoObject in urls {
let avAsset = AVURLAsset(URL: NSURL(fileURLWithPath: "\(currentVideoObject)"))
avAsset.loadValuesAsynchronouslyForKeys(["playable", "tracks", "duration"], completionHandler: {
dispatch_async(dispatch_get_main_queue(), {
// self.enqueue(avAsset)
let item = AVPlayerItem(asset: avAsset)
player.insertItem(item, afterItem: nil)
})
})
}
let playerLayer = AVPlayerLayer(player: player)
// playerLayer.backgroundColor = UIColor.greenColor().CGColor
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
playerLayer.frame = CGRectMake(0, 0,SCREEN_WIDTH, 250)
cell.contentView.layer.addSublayer(playerLayer)
player.play()
player.actionAtItemEnd = AVPlayerActionAtItemEnd.Advance
player.volume = 0.0
NSNotificationCenter.defaultCenter().addObserver(self, selector: "playerItemDidReachEnd:", name: AVPlayerItemDidPlayToEndTimeNotification,object:player.currentItem)
}
func playerItemDidReachEnd(notification: NSNotification) {
let p: AVPlayerItem = notification.object as! AVPlayerItem
p.seekToTime(kCMTimeZero)
}
I have three videos in AVQueuePlayer. Three videos are played sequentially but After completion of this first video is not playing. What I need to add to this ? Please help me to complete it.
Try to add this line after initializing player:
player.actionAtItemEnd = AVPlayerActionAtItemEnd.None