How to replay video automatic. for example when the video done I want the video play again automatic.
this is my code:
videoURL = URL(string: video.url!)!
self.player = AVPlayer(url: videoURL!)
let videoLayer = AVPlayerLayer(player: self.player)
videoLayer.frame = view.bounds
videoLayer.videoGravity = .resizeAspectFill
self.videoSnap.layer.addSublayer(videoLayer)
let asset = AVURLAsset.init(url: videoURL!)
let duration = asset.duration.seconds
print(duration)
self.player.play()
You've just to add an observer. Thanks to it, when the video is going to finish, it will allow you to replay the video again and again.
To do that, I recommend you this answer.
Related
I'm trying to fit a video inside the view I've created on storyboard.
Here is how I added the view to my storyboard.
Here is how it shown in simulator. I want to fit the video inside of the view I've created.
func createVideoView() {
if let url = URL(string: self.videoUrl!) {
let player = AVPlayer(url: url)
let avController = AVPlayerViewController()
avController.player = player
// your desired frame
avController.view.frame = self.videoView.frame
self.view.addSubview(avController.view)
self.addChild(avController)
}
}
Also here is how I create the video. Notice that videoView variable is the view I want to show the video in.
AVPlayerController has attribute videoGravity to set fill video and resizeAspectFill is the one you need to set fill video on the screen. Notice that when you fill maybe it will not show correctly for all video.
func createVideoView() {
if let url = URL(string: "https://jplayer.org/video/m4v/Big_Buck_Bunny_Trailer.m4v") {
let player = AVPlayer(url: url)
let avController = AVPlayerViewController()
avController.player = player
// your desired frame
avController.view.frame = self.videoView.frame
avController.videoGravity = .resizeAspectFill // here
self.view.addSubview(avController.view)
self.addChild(avController)
}
}
Beside, you should make the AVPlayer as the subview of your videoView so that you can handle easier than addSubView to your viewController.
func createVideoView() {
if let url = URL(string: "https://jplayer.org/video/m4v/Big_Buck_Bunny_Trailer.m4v") {
let player = AVPlayer(url: url)
let avController = AVPlayerViewController()
avController.player = player
// your desired frame
avController.view.frame = self.videoView.bounds // change to bounds
avController.videoGravity = .resizeAspectFill // here
self.videoView.addSubview(avController.view) // add subview to videoView
self.addChild(avController)
}
}
I am just trying to play network stream in AVPlayer. I can't share the link but this is some informations about :
And the code :
let asset = AVAsset(url: inUrl)
let playerItem = AVPlayerItem(asset: asset)
let playern = AVPlayer(playerItem: playerItem)
print("playable = \(asset.isReadable)") // => false
playern.allowsExternalPlayback = true
playern.play()
Is there something that I'm missing ?
The step I’m missing was to fragment my mpeg-ts to HLS because AVPlayer can't read my stream.
I finally chose to use MobileVLCKit (https://code.videolan.org/videolan/VLCKit) which provide VLCMediaPlayer natively play my network stream.
My problem is that when I'm displaying video it's bigger than my screen size. Maybe the problem is not content mode but wrong frame bounds or something.
let theURL = Bundle.main.url(forResource:"videkas", withExtension: "mp4")
avPlayer = AVPlayer(url: theURL!)
avPlayerLayer = AVPlayerLayer(player: avPlayer)
avPlayerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
avPlayer.volume = 0
avPlayer.actionAtItemEnd = .none
avPlayerLayer.frame = (featureView13?.bounds)!
featureView13?.backgroundColor = .clear
featureView13?.layer.insertSublayer(avPlayerLayer, at: 0)
NotificationCenter.default.addObserver(self,
selector: #selector(playerItemDidReachEnd(notification:)),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: avPlayer.currentItem)
I also tried avPlayerLayer.frame = (featureView13?.layer.bounds)! but no luck.
This may work for you
https://gist.github.com/jkosoy/c835fea2c03e76720c77
Call this method when your video started buffering
DispatchQueue.main.async {
let size = CGSize.aspectFit(aspectRatio: self. avPlayerLayer.frame.size, boundingSize: self.view.frame.size)
self.avPlayerLayerHeightContraint.constant = size.height
self.avPlayerLayer.layoutIfNeeded()
}
Hope you get
the desired result.
Change resizeAspectFill to resizeAspect. There is a big difference!
use AVLayerVideoGravity.resizeAspectFit to fit the video in the screen:
avPlayerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFit
Try using,
avPlayerLayer.frame = (featureView13?.bounds)!
avPlayerLayer.videoGravity = .resizeAspect
avPlayerLayer.contentsGravity = .resizeAspect
I have a AVPlayer, and am using it as a background in my UIView as an aesthetic. To initialize and instantiate it, I do
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
guard let path = Bundle.main.path(forResource: "Background", ofType:"mp4") else {
debugPrint("Background.mp4 not found")
return
}
let player = AVPlayer(url: URL(fileURLWithPath: path))
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.view.bounds
player.actionAtItemEnd = .none
player.volume = 0.0
//chage content mode ** TODO **
self.view.layer.addSublayer(playerLayer)
player.play()
}
When I run this, the video plays just as expected, however the AVPlayer sets the content mode to fitToWidth as a default. I searched through all the methods of player and playerLayer for some type of method on changing content type, but to no avail.
What I want is instead of fitToWidth I would like fitToHeight.
Any thoughts? Thanks for looking!
I can think of some really easy way to do this. When your video content is fetched, use videoRect method in AVPlayerLayer to extract the frame of the video image. Then resize your view according to the size of the image such that height goes to the extent that you want and width is proportion of the ratio of video height vs the height of view.
At the moment, this is how I'm playing a video on the subview of my UIViewController:
override func viewDidAppear(animated: Bool) {
let filePath = NSBundle.mainBundle().pathForResource("musicvideo", ofType: "mp4")
self.moviePlayerController.contentURL = NSURL.fileURLWithPath(filePath)
self.moviePlayerController.play()
self.moviePlayerController.repeatMode = .One
self.moviePlayerController.view.frame = self.view.bounds
self.moviePlayerController.scalingMode = .AspectFill
self.moviePlayerController.controlStyle = .None
self.moviePlayerController.allowsAirPlay = false
self.view.addSubview(self.moviePlayerController.view)
}
I've read on ways to disable the audio by doing the following below (none of which work, at all). Keep in mind I'm trying to disable it to the point of not interrupting the current music playing via the Music app, Spotify, etc.
// Playing media items with the applicationMusicPlayer will restore the user's Music state after the application quits.
// The current volume of playing music, in the range of 0.0 to 1.0.
// This property is deprecated -- use MPVolumeView for volume control instead.
1) MPMusicPlayerController.applicationMusicPlayer().volume = 0
2) MPVolumeView doesn't even have a setting for setting the actual volume? It's a control.
3) self.moviePlayerController.useApplicationAudioSession = false
So I found this answer.
This is my Swift code that I ended up going with. I then used an AVPlayerLayer to add to the view as a sublayer, which works perfectly.
Thanks to the OP who managed to get a hold of an Apple technician and provided the original Objective-C code.
The only problems I'm facing now is that it:
1) Interrupts current music playback, whether it's from Music, Spotify, etc.
2) Video stops playing if I close the app and open it up again.
override func viewDidAppear(animated: Bool) {
let filePath = NSBundle.mainBundle().pathForResource("musicvideo", ofType: "mp4")
var asset: AVURLAsset?
asset = AVURLAsset.URLAssetWithURL(NSURL.fileURLWithPath(filePath), options: nil)
var audioTracks = NSArray()
audioTracks = asset!.tracksWithMediaType(AVMediaTypeAudio)
// Mute all the audio tracks
let allAudioParams = NSMutableArray()
for track: AnyObject in audioTracks {
// AVAssetTrack
let audioInputParams = AVMutableAudioMixInputParameters()
audioInputParams.setVolume(0.0, atTime: kCMTimeZero)
audioInputParams.trackID = track.trackID
allAudioParams.addObject(audioInputParams)
}
let audioZeroMix = AVMutableAudioMix()
audioZeroMix.inputParameters = allAudioParams
// Create a player item
let playerItem = AVPlayerItem(asset: asset)
playerItem.audioMix = audioZeroMix
// Create a new Player, and set the player to use the player item
// with the muted audio mix
let player = AVPlayer.playerWithPlayerItem(playerItem) as AVPlayer
player.play()
let layer = AVPlayerLayer(player: player)
player.actionAtItemEnd = .None
layer.frame = self.view.bounds
layer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.view.layer.addSublayer(layer)
}