How to detect SKVideoNode playback completion - sprite-kit

I am using an SKVideoNode to display animated splash scene using this code:
SKVideoNode *video = [SKVideoNode videoNodeWithVideoFileNamed:#"splash_video.mp4"];
video.position = CGPointMake(CGRectGetMidX(self.frame),
CGRectGetMidY(self.frame));
[self addChild:video];
[video play];
I wish to display the next scene once video playback is done. How can I achieve that?

Add this to your viewDidLoad or elsewhere suitable
let videoURL = NSURL(string: url)
let player = AVPlayer(URL: videoURL!)
player.actionAtItemEnd = .None
let videoSpriteKitNode = SKVideoNode(AVPlayer: player)
NSNotificationCenter.defaultCenter().addObserver(self, selector: "playerItemDidReachEnd:", name: AVPlayerItemDidPlayToEndTimeNotification, object: player.currentItem)
And add this function:
func playerItemDidReachEnd(notification: NSNotification) {
if let playerItem = notification.object as? AVPlayerItem {
//Start your next video here
}
}

Related

overlay UIView over AVPlayer - Swift - Programmatically

I have a AVPlayer which I set in this way:
fileprivate func setUpPlayer(_ videoURL: String, completion: #escaping () -> Void) {
let url = URL(string: videoURL)
self.player = AVPlayer(url: url!)
self.playerLayer = AVPlayerLayer(player: player)
let audioSession = AVAudioSession.sharedInstance()
playerLayer.frame = view.bounds
playerLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(playerLayer)
player.volume = 1
player.play()
player.actionAtItemEnd = .none
}
I want to put a UIView which contains textViews and drawings over it but I don't know how to do it.
The ideal would be something similar to UIGraphicsGetImageFromCurrentImageContext() with images

how to add CIFilters to a video at runtime

I have recorded a video using avfoundation and after clicking on the video its start to play on a new viewcontroller. The problem is, I wouldn't be able to add filter effects to a playing video. Following is the code of what i have achieved.
import UIKit
import AVFoundation
class VideoPlayback: UIViewController {
let avPlayer = AVPlayer()
var avPlayerLayer: AVPlayerLayer!
var videoURL: URL!
#IBOutlet weak var videoView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
NotificationCenter.default.addObserver(self, selector: #selector(finishVideo), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil)
avPlayerLayer = AVPlayerLayer(player: avPlayer)
avPlayerLayer.frame = view.bounds
avPlayerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoView.layer.insertSublayer(avPlayerLayer, at: 0)
view.layoutIfNeeded()
let playerItem = AVPlayerItem(url: videoURL as URL)
avPlayer.replaceCurrentItem(with: playerItem)
avPlayer.play()
}
#objc func finishVideo()
{
print("Video Finished")
self.performSegue(withIdentifier: "unwindToFifteenSeconds", sender: self)
}
// Remove Observer
deinit {
NotificationCenter.default.removeObserver(self)
}
}
Above code is only to play a video that was captured by the camera using AVfoundation. I only need to add CIfilters to this played video.
you can apply a filter to a video
let filter = CIFilter(name: "CIGaussianBlur")!
let asset = AVAsset(url: streamURL)
let item = AVPlayerItem(asset: asset)
item.videoComposition = AVVideoComposition(asset: asset, applyingCIFiltersWithHandler: { request in
// Clamp to avoid blurring transparent pixels at the image edges
let source = request.sourceImage.clampingToExtent()
filter.setValue(source, forKey: kCIInputImageKey)
// Vary filter parameters based on video timing
let seconds = CMTimeGetSeconds(request.compositionTime)
filter.setValue(seconds * 10.0, forKey: kCIInputRadiusKey)
// Crop the blurred output to the bounds of the original image
let output = filter.outputImage!.cropping(to: request.sourceImage.extent)
// Provide the filter output to the composition
request.finish(with: output, context: nil)
})

CloudKit won't play CKAsset Song Swift 3

I have a song stored in CloudKit as a CKAsset. I successfully retrieve the song and get no error but it just won't play. Any help would be appreciated, thanks.
My AVPlayer is lazily instantiated within the class.
//Create song player to play song, lazily instantiated
lazy var songPlayer: AVPlayer = {
let player = AVPlayer()
return player
}()
func playPause() {
//Unwrap song - if there is no song, then return...you can't play something that is not there.
guard let song = selectedArtist.song else {
return
}
//Create player item
let playerItem = AVPlayerItem(url: song.fileURL)
//Add observer so app knows when the song reached its end
NotificationCenter.default.addObserver(
self,
selector: #selector(itemDidFinishPlaying(notification:)),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: playerItem)
//Assign playerItem to AVPlayer
songPlayer = AVPlayer(playerItem: playerItem)
if isSongPlaying {
print("Song at \(song.fileURL) stopped.")
isSongPlaying = false;
songPlayer.pause()
songPlayPauseImageView.image = UIImage(named: "play.png")
} else {
print("Song at \(song.fileURL) started.")
isSongPlaying = true;
songPlayer.play()
songPlayPauseImageView.image = UIImage(named: "pause.png")
}
}
When I start playing the song I get the message (as expected):
Song at file:///private/var/mobile/Containers/Data/Application/E34BE227-86DD-4AD7-A3C1-6A0610CE8C39/Library/Caches/CloudKit/6e22c352bc4bdeb0b75d42a0d681d67c69eeaad4/Assets/6BD84822-D039-4228-A5E9-59CB281BB964.0182fa97976e7d7b941bb8d8790ea75c6d3b72c302 started.
Unfortunately, the song doesn't play.

AVPlayer videoGravity AVLayerVideoGravityResizeAspectFill doesn't work Swift2.2

Please help me with AVPlayer. I need resize video when iPhone rotate (landscape orientation). Here is my code:
var player: AVPlayer?
override func viewDidLoad() {
super.viewDidLoad()
// Load the video from the app bundle.
let videoURL: NSURL = NSBundle.mainBundle().URLForResource("bg_animate", withExtension: "3gp")!
player = AVPlayer(URL: videoURL)
player?.actionAtItemEnd = .None
player?.muted = true
let playerLayer = AVPlayerLayer(player: player)
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
playerLayer.zPosition = -1
playerLayer.frame = view.frame
view.layer.addSublayer(playerLayer)
player?.play()
//loop video
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(ViewController.loopVideo), name: AVPlayerItemDidPlayToEndTimeNotification, object: nil)
}
But string:
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
doesnt work.
I also set value as AVLayerVideoGravityResize and AVLayerVideoGravityResizeAspect, but had similar results...

NSNotificationCenter to AVQueuePlayer in swift

I added list of AVQueuePlayer to UITableViewCell. I added array of videos to AVQueuePlayer as follows:
if let urls = delegate.arrayVideofeedItemsList[indexPath.row] as? NSArray
{
let player = AVQueuePlayer()
for currentVideoObject in urls {
let avAsset = AVURLAsset(URL: NSURL(fileURLWithPath: "\(currentVideoObject)"))
avAsset.loadValuesAsynchronouslyForKeys(["playable", "tracks", "duration"], completionHandler: {
dispatch_async(dispatch_get_main_queue(), {
// self.enqueue(avAsset)
let item = AVPlayerItem(asset: avAsset)
player.insertItem(item, afterItem: nil)
})
})
}
let playerLayer = AVPlayerLayer(player: player)
// playerLayer.backgroundColor = UIColor.greenColor().CGColor
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
playerLayer.frame = CGRectMake(0, 0,SCREEN_WIDTH, 250)
cell.contentView.layer.addSublayer(playerLayer)
player.play()
player.actionAtItemEnd = AVPlayerActionAtItemEnd.Advance
player.volume = 0.0
NSNotificationCenter.defaultCenter().addObserver(self, selector: "playerItemDidReachEnd:", name: AVPlayerItemDidPlayToEndTimeNotification,object:player.currentItem)
}
func playerItemDidReachEnd(notification: NSNotification) {
let p: AVPlayerItem = notification.object as! AVPlayerItem
p.seekToTime(kCMTimeZero)
}
I have three videos in AVQueuePlayer. Three videos are played sequentially but After completion of this first video is not playing. What I need to add to this ? Please help me to complete it.
Try to add this line after initializing player:
player.actionAtItemEnd = AVPlayerActionAtItemEnd.None