RealityKit – How to add a Video Material to a ModelEntity? - swift

I use the code to add a picture texture in RealityKit and it works fine.
var material = SimpleMaterial()
material.baseColor = try! .texture(.load(named: "image.jpg"))
I try to use this code to add a video file to be a texture, but it crashes!!!
guard let url = Bundle.main.url(forResource: "data", withExtension: "mp4") else {
return
}
material.baseColor = try! .texture(.load(contentsOf: url))
How can I add a video file?

RealityKit video material
You can use Video Material in RealityKit 2.0 and higher. RealityKit v1.0 doesn't support video materials. Here's a code showing you how to apply a video material:
Approach A
import SwiftUI
import RealityKit
import AVFoundation
struct ARViewContainer : UIViewRepresentable {
let arView = ARView(frame: .zero)
let anchor = AnchorEntity()
func makeUIView(context: Context) -> ARView {
anchor.position.z += 1.0
self.loadVideoMaterial()
return arView
}
func loadVideoMaterial() {
guard let pathToVideo = Bundle.main.path(forResource: "video",
ofType: "mp4")
else { return }
let videoURL = URL(fileURLWithPath: pathToVideo)
let avPlayer = AVPlayer(url: videoURL)
// 16:9 video
let mesh = MeshResource.generatePlane(width: 1.92, height: 1.08)
let material = VideoMaterial(avPlayer: avPlayer)
let planeModel = ModelEntity(mesh: mesh, materials: [material])
anchor.addChild(planeModel)
arView.scene.anchors.append(anchor)
avPlayer.volume = 0.05
avPlayer.play()
}
func updateUIView(_ view: ARView, context: Context) { }
}
struct ContentView : View {
var body: some View {
ARViewContainer().ignoresSafeArea()
}
}
Also, you can add RealityKit's VideoMaterial this way:
Approach B
// AVPLAYER and PlayerItem
let url = Bundle.main.url(forResource: "video", withExtension: "mp4")
let asset = AVAsset(url: url!)
let playerItem = AVPlayerItem(asset: asset)
let avPlayer = AVPlayer()
// ENTITY
let mesh = MeshResource.generateSphere(radius: 1)
let material = VideoMaterial(avPlayer: avPlayer)
let entity = ModelEntity(mesh: mesh, materials: [material])
// ANCHOR
let anchor = AnchorEntity(world: [0,0,-10])
anchor.addChild(entity)
arView.scene.anchors.append(anchor)
// PLAYBACK
avPlayer.replaceCurrentItem(with: playerItem)
avPlayer.play()
SceneKit video material
import SwiftUI
import SceneKit
import AVFoundation
struct VRViewContainer : UIViewRepresentable {
let sceneView = SCNView(frame: .zero)
func makeUIView(context: Context) -> SCNView {
sceneView.scene = SCNScene()
sceneView.backgroundColor = .black
sceneView.pointOfView?.position.z += 0.5
sceneView.isPlaying = true
self.loadVideoMaterial()
return sceneView
}
func loadVideoMaterial() {
guard let pathToVideo = Bundle.main.path(forResource: "video",
ofType: "mp4")
else { return }
let videoURL = URL(fileURLWithPath: pathToVideo)
let avPlayer = AVPlayer(url: videoURL)
// 16:9 video
let material = SCNMaterial()
material.diffuse.contents = avPlayer
let mesh = SCNPlane(width: 1.92, height: 1.08)
mesh.materials[0] = material
let planeModel = SCNNode(geometry: mesh)
sceneView.scene?.rootNode.addChildNode(planeModel)
avPlayer.volume = 0.05
avPlayer.play()
}
func updateUIView(_ view: SCNView, context: Context) { }
}
struct ContentView : View {
var body: some View {
VRViewContainer().ignoresSafeArea()
}
}

I found the workaround, such as below code
self.arView = arView
let scene = SCNScene()
scnView = SCNView(frame: arView.frame)
scnView?.scene = scene
scnView?.backgroundColor = UIColor.clear
scnView?.scene?.background.contents = UIColor.clear
Then add SCN camera and set camera transform from ARFrame, such as:
let rotation = SCNMatrix4MakeRotation(.pi / 2.0, 0, 0, 1)
let cameraTransform = simd_mul(frame.camera.transform, simd_float4x4(rotation))
let projectionMatrix = SCNMatrix4(frame.camera.projectionMatrix(for: .portrait,
viewportSize: self.viewBounds!.size,
zNear: 0.001,
zFar: 10000) )
self.arCameraNode?.simdTransform = cameraTransform
self.arCameraNode?.camera?.projectionTransform = projectionMatrix
Finally, add your SCN video node into rootNode
However, there is a little shifting, I think you can wait for Realitykit to support video material.

I might be wrong, but currently RealityKit does not support videos. A video is not a normal texture, it is a set of animated textures.

Related

How do I add configure an AVPlayer to present an image rather than video?

I have a page view controller - a feed - which an AVPlayer is nested in. How would I configure the AVPlayer to present just an image, or add a switch statement, to present an ImageViewController when an image is loaded into the newsfeed hierarchy?
private func configureVideo() {
guard let path = Bundle.main.path(forResource: "video", ofType: "mp4") else {
return
}
let url = URL(fileURLWithPath: path)
player = AVPlayer(url: url)
let playerLayer = AVPlayerLayer(player: player)
playerLayer.videoGravity = .resizeAspectFill
playerLayer.frame = self.view.frame
playerLayer.frame = self.view.frame.inset(by: UIEdgeInsets(top: 0, left: 0.5, bottom: 80, right: 0.5))
self.view.layer.addSublayer(playerLayer)
player?.volume = 0
player?.play()
guard let player = player else {
return
}
playerDidFinishObserver = NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime,
object: player.currentItem,
queue: .main
) { _ in
player.seek(to: .zero)
player.play()
}
}
}

overlay UIView over AVPlayer - Swift - Programmatically

I have a AVPlayer which I set in this way:
fileprivate func setUpPlayer(_ videoURL: String, completion: #escaping () -> Void) {
let url = URL(string: videoURL)
self.player = AVPlayer(url: url!)
self.playerLayer = AVPlayerLayer(player: player)
let audioSession = AVAudioSession.sharedInstance()
playerLayer.frame = view.bounds
playerLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(playerLayer)
player.volume = 1
player.play()
player.actionAtItemEnd = .none
}
I want to put a UIView which contains textViews and drawings over it but I don't know how to do it.
The ideal would be something similar to UIGraphicsGetImageFromCurrentImageContext() with images

Issue with adding node to scene view

I am having an issue with the way the model is being placed on the scene view.
The problem is that the model downloads successfully but for some reason it appears on top of the scene view.
here is a video of the problem:
https://www.youtube.com/watch?v=-pTPEZTF9zo
view did load:
override func viewDidLoad() {
super.viewDidLoad()
self.dowloadModel()
self.sceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints]
self.configuration.planeDetection = .horizontal
self.sceneView.session.run(configuration)
self.registerGestureRecogniser()
}
Gesture recofniser:
func registerGestureRecogniser(){
let tapGestureRecogiser = UITapGestureRecognizer(target: self, action: #selector(tapped))
self.sceneView.addGestureRecognizer(tapGestureRecogiser)
}
Tapped function:
#objc func tapped(sender:UITapGestureRecognizer){
let sceneView = sender.view as! ARSCNView
let tapLocation = sender.location(in: sceneView)
let hitTest = sceneView.hitTest(tapLocation, types: .existingPlaneUsingExtent)
if !hitTest.isEmpty{
print("touched a horizontal surface")
self.addItem2(hitTestResult: hitTest.first!)
}
else{
print("no match")
}
}
Download model from AWS:
private func dowloadModel(){
let url = URL(string: "https://ENTER URL HERE")!
URLSession.shared.dataTask(with: url) { data, response, error in
if let error = error{
print(error.localizedDescription)
return
}
if let data = data{
print(data)
let documentDirectories = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
if let documentDirectory = documentDirectories.first{
let fileURL = documentDirectory.appendingPathComponent("Food.scn")
let dataNS : NSData? = data as NSData
try! dataNS?.write(to: fileURL, options: .atomic)
print("Saved!")
}
}
}.resume()
}
Add Item 2 function:
func addItem2(hitTestResult : ARHitTestResult){
let documentDirectories = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
if let documentDirectory = documentDirectories.first{
let fileURL = documentDirectory.appendingPathComponent("Food.scn")
do{
let scene = try SCNScene(url: fileURL, options: nil)
let node = scene.rootNode.childNode(withName: "Burger", recursively: true)!
let material = SCNMaterial()
material.diffuse.contents = UIImage(named: "Hamburger_BaseColor")
material.diffuse.wrapT = SCNWrapMode.repeat
material.diffuse.wrapS = SCNWrapMode.repeat
material.isDoubleSided = true
let transform = hitTestResult.worldTransform
let thirdColumn = transform.columns.3
node.position = SCNVector3(thirdColumn.x, thirdColumn.y, thirdColumn.z)
self.sceneView.scene.rootNode.addChildNode(node)
}
catch{
print(error)
}
}
}
It seems that this issue entirely depends on a pivot point's position and a scale of your model.
Pivot point meets ARAnchor and helps you control a model's offset, orientation and scale of model on a floor, a table-top, or a wall.
node.scale = SCNVector3(x: 0.2, y: 0.2, z: 0.2)
node.pivot = SCNMatrix4MakeTranslation(0, -0.5, 0)
...or for positioning pivot use this approach:
node.simdPivot.columns.3.y = -0.5
So if you want your model to precisely stand on an invisible detected plane move a pivot point in SceneKit toward desired place or set its position in 3D authoring tool (like 3dsMax or Blender).

how to add CIFilters to a video at runtime

I have recorded a video using avfoundation and after clicking on the video its start to play on a new viewcontroller. The problem is, I wouldn't be able to add filter effects to a playing video. Following is the code of what i have achieved.
import UIKit
import AVFoundation
class VideoPlayback: UIViewController {
let avPlayer = AVPlayer()
var avPlayerLayer: AVPlayerLayer!
var videoURL: URL!
#IBOutlet weak var videoView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
NotificationCenter.default.addObserver(self, selector: #selector(finishVideo), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil)
avPlayerLayer = AVPlayerLayer(player: avPlayer)
avPlayerLayer.frame = view.bounds
avPlayerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoView.layer.insertSublayer(avPlayerLayer, at: 0)
view.layoutIfNeeded()
let playerItem = AVPlayerItem(url: videoURL as URL)
avPlayer.replaceCurrentItem(with: playerItem)
avPlayer.play()
}
#objc func finishVideo()
{
print("Video Finished")
self.performSegue(withIdentifier: "unwindToFifteenSeconds", sender: self)
}
// Remove Observer
deinit {
NotificationCenter.default.removeObserver(self)
}
}
Above code is only to play a video that was captured by the camera using AVfoundation. I only need to add CIfilters to this played video.
you can apply a filter to a video
let filter = CIFilter(name: "CIGaussianBlur")!
let asset = AVAsset(url: streamURL)
let item = AVPlayerItem(asset: asset)
item.videoComposition = AVVideoComposition(asset: asset, applyingCIFiltersWithHandler: { request in
// Clamp to avoid blurring transparent pixels at the image edges
let source = request.sourceImage.clampingToExtent()
filter.setValue(source, forKey: kCIInputImageKey)
// Vary filter parameters based on video timing
let seconds = CMTimeGetSeconds(request.compositionTime)
filter.setValue(seconds * 10.0, forKey: kCIInputRadiusKey)
// Crop the blurred output to the bounds of the original image
let output = filter.outputImage!.cropping(to: request.sourceImage.extent)
// Provide the filter output to the composition
request.finish(with: output, context: nil)
})

for loop in swift step by step?

I have the following array with urls:
let KStorePlayURL = [
https://source.s3-us-west-2.amazonaws.com/ENVOI/2018/07/19/ATASTEOFDANCE_S1_EP3.mp4,
https://source.s3-us-west-2.amazonaws.com/ENVOI/2018/05/23/ATasteOfDance_S1E1_Episode.mp4,
https://source.s3-us-west-2.amazonaws.com/ENVOI/2018/05/23/ATasteOfDance_S1E1_Episode.mp4,
https://source.s3-us-west-2.amazonaws.com/ENVOI/2018/05/23/ATasteOfDance_S1E1_Episode.mp4
]
How can I get the URLs one after the other in the for loop? Also when one video is played and followed by other videos in this Bitmovin player?
Here is the code I have already tried:
#objc func setUpPlayerVideos1() {
print(KStorePlayURL)
for i in 0..<KStorePlayURL.count {
let streamURL = URL(string: KStorePlayURL[i])
playlist.append(PlaylistItem(url: streamURL!, title: "player"))
// Create player based with a default configuration
let player = BitmovinPlayer()
// Create player view and pass the player instance to it
let playerView = BMPBitmovinPlayerView(player: player, frame: .zero)
// Listen to player events
player.add(listener: self)
playerView.autoresizingMask = [.flexibleHeight, .flexibleWidth]
playerView.frame = view.bounds
view.addSubview(playerView)
view.bringSubview(toFront: playerView)
// store the reference to the player
self.Bitmovinplayer = player
}
// Start the playlist
playNextItem()
}
You shouldn't be using a for-loop, because you are going to be overriding the player in each iteration. What you need to do is fill the playlistusing a for-loop or a more functional style like this :
KStorePlayURL.forEach { urlString in
//Make sure that the url address is correct
guard let streamURL = URL(string: urlString) else {
fatalError("Error in stream url")
}
playlist.append(PlaylistItem(url: streamURL!, title: "player"))
}
Here I am supposing that KStorePlayURL is an array of strings:
let KStorePlayURL = [
"https://source.s3-us-west-2.amazonaws.com/ENVOI/2018/07/19/ATASTEOFDANCE_S1_EP3.mp4",
"https://source.s3-us-west-2.amazonaws.com/ENVOI/2018/05/23/ATasteOfDance_S1E1_Episode.mp4",
"https://source.s3-us-west-2.amazonaws.com/ENVOI/2018/05/23/ATasteOfDance_S1E1_Episode.mp4",
"https://source.s3-us-west-2.amazonaws.com/ENVOI/2018/05/23/ATasteOfDance_S1E1_Episode.mp4"
]
Your final code should look like this:
#objc func setUpPlayerVideos1() {
KStorePlayURL.forEach { urlString in
guard let streamURL = URL(string: urlString) else {
fatalError("Error in stream url")
}
playlist.append(PlaylistItem(url: streamURL!, title: "player"))
}
let player = BitmovinPlayer()
// Create player view and pass the player instance to it
let playerView = BMPBitmovinPlayerView(player: player, frame: .zero)
// Listen to player events
player.add(listener: self)
playerView.autoresizingMask = [.flexibleHeight, .flexibleWidth]
playerView.frame = view.bounds
view.addSubview(playerView)
view.bringSubview(toFront: playerView)
// store the reference to the player
self.Bitmovinplayer = player
}
// Start the playlist
playNextItem()
}
You can find a complete sample code for playing a playlist with the bitmovin player here.