Pause video when image reference is not tracked - arkit

I made this script in RealityKit & SwiftUI that when a reference image is detected it overlays a video with the same filename on top of it.
class Coordinator: NSObject, ARSessionDelegate {
var parent: ARViewContainer
var videoPlayer = AVPlayer()
init(parent: ARViewContainer) {
self.parent = parent
}
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
guard let validAnchor = anchors[0] as? ARImageAnchor else { return }
let anchor = AnchorEntity(anchor: validAnchor)
anchor.addChild(createdVideoPlayerNodeFor(validAnchor.referenceImage))
parent.arView.scene.addAnchor(anchor)
}
func createdVideoPlayerNodeFor(_ target: ARReferenceImage) -> ModelEntity {
var videoPlane = ModelEntity()
//var videoPlayer = AVPlayer()
if let targetName = target.name,
let validURL = Bundle.main.url(forResource: targetName, withExtension: "mp4") {
videoPlayer = AVPlayer(url: validURL)
videoPlayer.play()
}
let videoMaterial = VideoMaterial(avPlayer: videoPlayer)
videoPlane = ModelEntity(mesh: .generatePlane(width: Float(target.physicalSize.width), depth: Float(target.physicalSize.height)), materials: [videoMaterial])
print (target.name as Any)
return videoPlane
}
}
It works as intended, but I would like to pause the video when the reference image is not tracked (without pausing other videos that might be in the view at the same time). I tried experimenting with this, but it doesn't work - it either pauses all videos or doesn't work at all.
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
guard let validAnchor = anchors[0] as? ARImageAnchor else { return }
if validAnchor.isTracked {
videoPlayer.play()
print("Played")
} else {
videoPlayer.pause()
print("Paused")
}
}
How do I target only the video that stops being tracked, please?

Related

How to make a video loop in Xcode swift 5?

I am trying to get a video background in my app and I have written the following code. It runs one time and then stops. How do I make the video repeat forever? Thanks in advance.
#IBOutlet weak var videoLayer: UIView!
override func viewDidLoad() {
super.viewDidLoad()
playBackgroundVideo()
}
func playBackgroundVideo(){
guard let path = Bundle.main.path(forResource: "City", ofType: "MOV") else {
return
}
let player = AVPlayer(url: URL(fileURLWithPath: path))
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.view.bounds
playerLayer.videoGravity = .resizeAspectFill
self.videoLayer.layer.addSublayer(playerLayer)
player.play()
}
#objc func playerItemDidReachEnd(notification: Notification) {
let p: AVPlayerItem = notification.object as! AVPlayerItem
p.seek(to: .zero)
}
}
Try adding a notification listener to trigger the playerItemDidReachEnd code.
You do this by involving a NotificationCenter.default.addObserver in your setup.
Did not test your code, but you want a setup like this example:
#IBOutlet weak var videoLayer: UIView!
var player: AVPlayer!
override func viewDidLoad()
{
super.viewDidLoad()
playBackgroundVideo()
}
func playBackgroundVideo()
{
guard let path = Bundle.main.path(forResource: "City", ofType: "MOV") else{ return }
player = AVPlayer(url: URL(fileURLWithPath: path))
let playerLayer = AVPlayerLayer(player: player)
//# add a Listener
NotificationCenter.default.addObserver( self,
selector: #selector(playerItemDidReachEnd),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: nil)
playerLayer.frame = self.view.bounds
playerLayer.videoGravity = .resizeAspectFill
self.videoLayer.layer.addSublayer(playerLayer)
player.play()
}
#objc func playerItemDidReachEnd(notification: Notification)
{
player.seek(to: CMTime.zero)
player.play()
}

define video url as the uiview in your class

My swift code should be able to take a snapshot of a video and then take that image and display in a uiimageview. Instead of using a online link I just want the url to be the uiview in my class.So the video url should be previewView not the https link that I have below. All the code below is in this class
import UIKit;import AVFoundation
class ViewController: UIViewController, AVCapturePhotoCaptureDelegate {
#IBOutlet var previewView : UIView!
#IBOutlet var captureImageView : UIImageView!
var captureSession: AVCaptureSession!
var stillImageOutput: AVCapturePhotoOutput!
var videoPreviewLayer: AVCaptureVideoPreviewLayer!
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
// Setup your camera here...
captureSession = AVCaptureSession()
captureSession.sessionPreset = .medium
guard let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
else {
print("Unable to access back camera!")
return
}
do {
let input = try AVCaptureDeviceInput(device: backCamera)
//Step 9
stillImageOutput = AVCapturePhotoOutput()
stillImageOutput = AVCapturePhotoOutput()
if captureSession.canAddInput(input) && captureSession.canAddOutput(stillImageOutput) {
captureSession.addInput(input)
captureSession.addOutput(stillImageOutput)
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
}
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer.videoGravity = .resizeAspect
videoPreviewLayer.connection?.videoOrientation = .portrait
previewView.layer.addSublayer(videoPreviewLayer)
//Step12
DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
self.captureSession.startRunning()
//Step 13
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.previewView.bounds
}
}
}
#IBAction func startRecord(_ sender: Any) {
}
#IBAction func Save(_ sender: Any) {
//what do I put in the 2 highlighted blocks
let videoURL = "https://www.youtube.com/watch?v=Txt25dw-lIk"
self.getThumbnailFromUrl(videoURL) { [weak self] (img) in
guard let _ = self else { return }
if let img = img {
self?.captureImageView.image = img
}
}
}
func getThumbnailFromUrl(_ url: String?, _ completion: #escaping ((_ image: UIImage?)->Void)) {
guard let url = URL(string: url ?? "") else { return }
DispatchQueue.main.async {
let asset = AVAsset(url: url)
let assetImgGenerate = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let time = CMTimeMake(value: 2, timescale: 1)
do {
let img = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
let thumbnail = UIImage(cgImage: img)
completion(thumbnail)
} catch {
print("Error :: ", error.localizedDescription)
completion(nil)
}
}
}
#IBAction func didTakePhoto(_ sender: Any) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
let image = UIImage(data: imageData)
captureImageView.image = image
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.captureSession.stopRunning()
}
}

MPRemoteCommandCenter doesn't appear on iOS13

I have a video player that show a .m3u8 stream. I've tried to implement the Remote Control but the widget doesn't appear both in lock screen, in the Notification Center and in the Command Center.
I need only to have play-pause, volume controls and some static info as Title, Artist and Artwork.
This is my code:
AppDelegate.swift
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback)
try AVAudioSession.sharedInstance().setActive(true)
} catch {
print(error.localizedDescription)
}
return true
}
ViewController.swift
class ViewController: UIViewController {
var player = AVPlayer()
var playerViewcontroller = AVPlayerViewController()
var playerItem: AVPlayerItem!
var playerLayer = AVPlayerLayer()
#IBAction func playVideo(_ sender: Any) {
guard let url = URL(string: "https://video.m3u8") else {
return
}
// Create an AVPlayer, passing it the HTTP Live Streaming URL.
playerItem = AVPlayerItem(url: url)
playerItem.preferredForwardBufferDuration = 8
player = AVPlayer(playerItem: playerItem)
// Create a new AVPlayerViewController and pass it a reference to the player.
playerViewcontroller.player = player
playerViewcontroller.showsPlaybackControls = true
// Modally present the player and call the player's play() method when complete.
present(playerViewcontroller, animated: true) {
self.player.play()
}
setupNowPlaying()
}
override func viewDidLoad() {
//super.viewDidLoad()
}
public func disconnectAVPlayer() {
playerViewcontroller.player = nil
}
public func reconnectAVPlayer() {
playerViewcontroller.player = player
}
func setupNowPlaying() {
print("_________________________________setupPlaying")
// Define Now Playing Info
let nowPlayingInfoCenter = MPNowPlayingInfoCenter.default()
var nowPlayingInfo = nowPlayingInfoCenter.nowPlayingInfo ?? [String: Any]()
let title = "TV NAME"
let album = "TV DESCRIPTION"
let image = UIImage(named: "ICON") ?? UIImage()
let artwork = MPMediaItemArtwork(boundsSize: image.size, requestHandler: { (_) -> UIImage in
return image
})
nowPlayingInfo[MPMediaItemPropertyTitle] = title
nowPlayingInfo[MPMediaItemPropertyAlbumTitle] = album
nowPlayingInfo[MPMediaItemPropertyArtwork] = artwork
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = NSNumber(value: 1.0)
nowPlayingInfoCenter.nowPlayingInfo = nowPlayingInfo
}
}
Hope that infos are sufficient. Thanks
Remote Control Works fine in iOS 13 , you just didn’t set it up first. Add this code after assigning to your AVPlayer
func commandCenterSetup() {
UIApplication.shared.beginReceivingRemoteControlEvents()
let commandCenter = MPRemoteCommandCenter.shared()
setupNowPlaying()
commandCenter.pauseCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
print("PAUSE")
self.playerViewcontroller.player.pause()
return .success
}
commandCenter.playCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
print("PLAY")
playerViewcontroller.player.play()
return .success
}
}

RealityKit – How to add a Video Material to a ModelEntity?

I use the code to add a picture texture in RealityKit and it works fine.
var material = SimpleMaterial()
material.baseColor = try! .texture(.load(named: "image.jpg"))
I try to use this code to add a video file to be a texture, but it crashes!!!
guard let url = Bundle.main.url(forResource: "data", withExtension: "mp4") else {
return
}
material.baseColor = try! .texture(.load(contentsOf: url))
How can I add a video file?
RealityKit video material
You can use Video Material in RealityKit 2.0 and higher. RealityKit v1.0 doesn't support video materials. Here's a code showing you how to apply a video material:
Approach A
import SwiftUI
import RealityKit
import AVFoundation
struct ARViewContainer : UIViewRepresentable {
let arView = ARView(frame: .zero)
let anchor = AnchorEntity()
func makeUIView(context: Context) -> ARView {
anchor.position.z += 1.0
self.loadVideoMaterial()
return arView
}
func loadVideoMaterial() {
guard let pathToVideo = Bundle.main.path(forResource: "video",
ofType: "mp4")
else { return }
let videoURL = URL(fileURLWithPath: pathToVideo)
let avPlayer = AVPlayer(url: videoURL)
// 16:9 video
let mesh = MeshResource.generatePlane(width: 1.92, height: 1.08)
let material = VideoMaterial(avPlayer: avPlayer)
let planeModel = ModelEntity(mesh: mesh, materials: [material])
anchor.addChild(planeModel)
arView.scene.anchors.append(anchor)
avPlayer.volume = 0.05
avPlayer.play()
}
func updateUIView(_ view: ARView, context: Context) { }
}
struct ContentView : View {
var body: some View {
ARViewContainer().ignoresSafeArea()
}
}
Also, you can add RealityKit's VideoMaterial this way:
Approach B
// AVPLAYER and PlayerItem
let url = Bundle.main.url(forResource: "video", withExtension: "mp4")
let asset = AVAsset(url: url!)
let playerItem = AVPlayerItem(asset: asset)
let avPlayer = AVPlayer()
// ENTITY
let mesh = MeshResource.generateSphere(radius: 1)
let material = VideoMaterial(avPlayer: avPlayer)
let entity = ModelEntity(mesh: mesh, materials: [material])
// ANCHOR
let anchor = AnchorEntity(world: [0,0,-10])
anchor.addChild(entity)
arView.scene.anchors.append(anchor)
// PLAYBACK
avPlayer.replaceCurrentItem(with: playerItem)
avPlayer.play()
SceneKit video material
import SwiftUI
import SceneKit
import AVFoundation
struct VRViewContainer : UIViewRepresentable {
let sceneView = SCNView(frame: .zero)
func makeUIView(context: Context) -> SCNView {
sceneView.scene = SCNScene()
sceneView.backgroundColor = .black
sceneView.pointOfView?.position.z += 0.5
sceneView.isPlaying = true
self.loadVideoMaterial()
return sceneView
}
func loadVideoMaterial() {
guard let pathToVideo = Bundle.main.path(forResource: "video",
ofType: "mp4")
else { return }
let videoURL = URL(fileURLWithPath: pathToVideo)
let avPlayer = AVPlayer(url: videoURL)
// 16:9 video
let material = SCNMaterial()
material.diffuse.contents = avPlayer
let mesh = SCNPlane(width: 1.92, height: 1.08)
mesh.materials[0] = material
let planeModel = SCNNode(geometry: mesh)
sceneView.scene?.rootNode.addChildNode(planeModel)
avPlayer.volume = 0.05
avPlayer.play()
}
func updateUIView(_ view: SCNView, context: Context) { }
}
struct ContentView : View {
var body: some View {
VRViewContainer().ignoresSafeArea()
}
}
I found the workaround, such as below code
self.arView = arView
let scene = SCNScene()
scnView = SCNView(frame: arView.frame)
scnView?.scene = scene
scnView?.backgroundColor = UIColor.clear
scnView?.scene?.background.contents = UIColor.clear
Then add SCN camera and set camera transform from ARFrame, such as:
let rotation = SCNMatrix4MakeRotation(.pi / 2.0, 0, 0, 1)
let cameraTransform = simd_mul(frame.camera.transform, simd_float4x4(rotation))
let projectionMatrix = SCNMatrix4(frame.camera.projectionMatrix(for: .portrait,
viewportSize: self.viewBounds!.size,
zNear: 0.001,
zFar: 10000) )
self.arCameraNode?.simdTransform = cameraTransform
self.arCameraNode?.camera?.projectionTransform = projectionMatrix
Finally, add your SCN video node into rootNode
However, there is a little shifting, I think you can wait for Realitykit to support video material.
I might be wrong, but currently RealityKit does not support videos. A video is not a normal texture, it is a set of animated textures.

VNTrackObjectRequest not tracking face using VNFaceObservation in ARSCNView

I am trying to track face using VNFaceObservation in ARSCNView
let response = observations.map({ (face) -> (observation: VNFaceObservation, image: CIImage, frame: ARFrame) in
self.lastObservation = VNDetectedObjectObservation(boundingBox: face.boundingBox)
Than I do
func session(_ session: ARSession, didUpdate frame: ARFrame) {
guard
let lastObservation = self.lastObservation
else { return }
let request = VNTrackObjectRequest(detectedObjectObservation: lastObservation, completionHandler: self.handleVisionRequestUpdate)
request.trackingLevel = .accurate
do {
try self.visionSequenceHandler.perform([request], on: frame.capturedImage)
} catch {
print("Throws: \(error)")
}
}
But the request result doesn't track the face and returns wrong rect
guard let newObservation = request.results?.first as? VNDetectedObjectObservation else { return }