Apply GIF to SCNPlane - swift

Right now my Swift code creates a SCNPlane with an image called we.jpg. What I would like to do is to replace we.jpg with a ball.gif. material.diffuse.contents is where the UIImage is attached to the SCNPlane.
let planeGeometry = SCNPlane(width: 0.2, height: 0.35)
let material = SCNMaterial()
material.diffuse.contents = UIImage(named: "we.jpg")
planeGeometry.materials = [material]

You can easily replace SCNPlane's .jpg texture with .gif texture. But remember that in SceneKit there's no support for animated GIF textures.
Here's a code:
import ARKit
import SceneKit
class ViewController: UIViewController {
#IBOutlet var sceneView: ARSCNView! // in case you're using AR app
//#IBOutlet var sceneView: SCNView! // in case you're using VR app
let planeNode = SCNNode()
override func viewDidLoad() {
super.viewDidLoad()
let scene = SCNScene()
sceneView.scene = scene
sceneView.allowsCameraControl = true
planeNode.geometry = SCNPlane(width: 0.2, height: 0.35)
planeNode.position = SCNVector3(0, 0,-0.5)
sceneView.scene?.rootNode.addChildNode(planeNode)
DispatchQueue.main.asyncAfter(deadline: .now() + 3.0) { // 3 sec delay
let image = UIImage(named: "texture.jpg")
self.planeNode.geometry?.firstMaterial?.diffuse.contents = image
}
// Other stuff...
}
//...and you can replace a texture with a button's click:
#IBAction func replaceTexture(_ sender: UIButton) {
let image = UIImage(named: "texture.gif")
planeNode.geometry?.firstMaterial?.diffuse.contents = image
}
}

Related

Change texture image for 3d model

I'm investigating iOS and trying to understand is it possible to change the texture of the model using any other picture?
Let's say I have model.obj with related texture green.png which is applied to this model so the appearance of the product is green. Is it possible to choose any other image, for example, blue.png, and apply it programmatically in runtime to 3d model and make the appearance of the product blue?
I have one working example
override func viewDidLoad() {
super.viewDidLoad()
let node = SCNNode()
let geometry = SCNSphere(radius: 0.2)
node.position = SCNVector3(0, 0, -1)
sceneView.backgroundColor = .black
sceneView.scene = SCNScene()
node.geometry = self.geometry
let image = UIImage(named: "art.scnassets/green.jpeg")
print(image)
node.geometry?.firstMaterial?.diffuse.contents = image
sceneView.scene.rootNode.addChildNode(node)
}
But when I try apply image to uploaded 3d model it's appearance doesn't change, here is a code.
override func viewDidLoad() {
super.viewDidLoad()
let tempScene = SCNScene(named: "art.scnassets/California_chair_1.obj")!
let node = tempScene.rootNode
node.position = SCNVector3(0, 0, -1)
sceneView.scene = SCNScene()
let image = UIImage(named: "art.scnassets/green.jpeg")
print(image)
/**/
let material = SCNMaterial()
material.isDoubleSided = false
material.diffuse.contents = image
node.geometry?.materials = [material]
/**/
node.geometry?.firstMaterial?.diffuse.contents = image
sceneView.scene.rootNode.addChildNode(node)
}
How to apply image to any 3d model created by designer?
Many thanks for any help!
For retrieving a model from SCNScene, you may use subscript .childNodes[0] several times to get to geometry and its corresponding materials in hierarchy.
import ARKit
class ViewController: UIViewController {
#IBOutlet var sceneView: ARSCNView!
let node = SCNNode()
override func viewDidLoad() {
super.viewDidLoad()
sceneView.scene = SCNScene(named: "art.scnassets/California_chair.scn")!
sceneView.autoenablesDefaultLighting = true
node = sceneView.scene.rootNode.childNode(withName: "firstChair",
recursively: true)
let green = UIColor.green
node?.childNodes[0].geometry?.firstMaterial?.diffuse.contents = green
}
#IBAction func changeTexture(_ sender: UIButton) {
let blue = UIImage(named: "art.scnassets/blueTexture.png")
node.geometry?.firstMaterial?.diffuse.contents = blue
}
}
Consider that SCNGeometry may be nested inside deep hierarchy:
node?.childNodes[0].childNodes[0].childNodes[0].geometry.firstMaterial?.diffuse
In Xcode's Scene graph such a nested hierarchy looks like this:
Also always check your node's size (scale), to find out if your camera is inside 3D model or not.
P.S.
In case you use obj models – use their corresponding mtl textures:
sceneView.scene = SCNScene(named: "art.scnassets/file.obj")!
let obj = sceneView.scene.rootNode.childNode(withName: "default",
recursively: true)
obj?.geometry?.firstMaterial?.diffuse.contents = UIImage(named:
"art.scnassets/file.mtl")

How to add a semi-transparent background on top of camera ARKit

I'm trying to add a semi transparent background that cover my camera view from ARKit.
I try different things :
Add background to sceneView.scene but that not support transparency
Add an overlaySKScene but nodes on my scene are overlayed too.
Use CIImage from session > capturedImage but too slow.
Use this post : Reliable access and modify captured camera frames under SceneKit, it's OK for transform to Black And White but I don't understand how I can keep colors and blend gray color.
Search on OpenGL or Metal but I'm a noob !
So, do you have an idea to realize that operation in ARKit ?
Thanks in advance.
You can do it this way:
import ARKit
class ViewController: UIViewController,
ARSCNViewDelegate {
#IBOutlet var sceneView: ARSCNView!
override func viewDidLoad() {
super.viewDidLoad()
sceneView.delegate = self
sceneView.session.delegate = self
let scene = SCNScene()
sceneView.scene = scene
let planeNode = SCNNode()
planeNode.geometry = SCNPlane(width: 100,
height: 100)
planeNode.geometry?.firstMaterial?.diffuse.contents = UIColor(white: 0,
alpha: 0.9)
planeNode.position.z = -5 // 5 meters away
sceneView.pointOfView?.addChildNode(planeNode) // PINNING TO CAMERA
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
let configuration = ARWorldTrackingConfiguration()
sceneView.session.run(configuration)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
sceneView.session.pause()
}
}
... or in extension:
extension ViewController: ARSessionDelegate {
func session(_ session: ARSession,
didUpdate frame: ARFrame) {
let planeNode = SCNNode()
planeNode.geometry = SCNPlane(width: 100,
height: 100)
planeNode.position.z = -5
planeNode.geometry?.firstMaterial?.diffuse.contents = UIColor(white: 0,
alpha: 0.9)
// var translation = matrix_identity_float4x4
// translation.columns.3.z = -5
// planeNode.simdTransform = matrix_multiply(translation,
// frame.camera.transform)
sceneView.pointOfView?.addChildNode(planeNode)
}
}

Color keying video with GPUImage on a SCNPlane in ARKit

I am trying to play a video, showing transparency in an ARSCNView. A SCNPlane is used as a projection space for the video and I am trying to color key this video with GPUImage.
I followed this example here. Unfortunately, I have not found a way to project that video back on my videoSpriteKitNode. Because the filter is rendered in a GPUImageView, and the SKVideoNode takes a AVPlayer.
I am not sure if it is possible at all, what I am trying to do, so if anyone could share their insight I'd be very thankful!
import UIKit
import ARKit
import GPUImage
class ARTransVC: UIViewController{
#IBOutlet weak var sceneView: ARSCNView!
let configuration = ARWorldTrackingConfiguration()
var movie: GPUImageMovie!
var filter: GPUImageChromaKeyBlendFilter!
var sourcePicture: GPUImagePicture!
var player = AVPlayer()
var gpuImageView: GPUImageView!
override func viewDidLoad() {
super.viewDidLoad()
self.sceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints]
self.sceneView.session.run(configuration)
self.gpuImageView = GPUImageView()
self.gpuImageView.translatesAutoresizingMaskIntoConstraints = false
//a delay for ARKit to capture the surroundings
DispatchQueue.main.asyncAfter(deadline: .now() + 3) {
// A SpriteKit scene to contain the SpriteKit video node
let spriteKitScene = SKScene(size: CGSize(width: self.sceneView.frame.width, height: self.sceneView.frame.height))
spriteKitScene.scaleMode = .aspectFit
// Create a video player, which will be responsible for the playback of the video material
guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { return }
let playerItem = AVPlayerItem(url: url)
self.player.replaceCurrentItem(with: playerItem)
//trans
self.filter = GPUImageChromaKeyBlendFilter()
self.filter.thresholdSensitivity = 0.15
self.filter.smoothing = 0.3
self.filter.setColorToReplaceRed(0.322, green: 0.455, blue: 0.831)
self.movie = GPUImageMovie(playerItem: playerItem)
self.movie.playAtActualSpeed = true
self.movie.addTarget(self.filter)
self.movie.startProcessing()
let backgroundImage = UIImage(named: "transparent.png")
self.sourcePicture = GPUImagePicture(image: backgroundImage, smoothlyScaleOutput: true)!
self.sourcePicture.addTarget(self.filter)
self.sourcePicture.processImage()
///HERE DON'T KNOW HOW TO CONTINUE ?
self.filter.addTarget(self.gpuImageView)
// To make the video loop
self.player.actionAtItemEnd = .none
NotificationCenter.default.addObserver(
self,
selector: #selector(ARTransVC.playerItemDidReachEnd),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: self.player.currentItem)
// Create the SpriteKit video node, containing the video player
let videoSpriteKitNode = SKVideoNode(avPlayer: self.player)
videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width / 2.0, y: spriteKitScene.size.height / 2.0)
videoSpriteKitNode.size = spriteKitScene.size
videoSpriteKitNode.yScale = -1.0
videoSpriteKitNode.play()
spriteKitScene.addChild(videoSpriteKitNode)
// Create the SceneKit scene
let scene = SCNScene()
self.sceneView.scene = scene
self.sceneView.isPlaying = true
// Create a SceneKit plane and add the SpriteKit scene as its material
let background = SCNPlane(width: CGFloat(1), height: CGFloat(1))
background.firstMaterial?.diffuse.contents = spriteKitScene
let backgroundNode = SCNNode(geometry: background)
backgroundNode.geometry?.firstMaterial?.isDoubleSided = true
backgroundNode.position = SCNVector3(0,0,-2.0)
scene.rootNode.addChildNode(backgroundNode)
}
}
#objc func playerItemDidReachEnd(notification: NSNotification) {
if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem {
playerItem.seek(to: kCMTimeZero, completionHandler: nil)
}
}
}
So, I now managed to chroma-key and play a now transparent video in ARSCNView, but it still is a little patchy solution.
I stepped away from my former approach and implemented ChromaKeyMaterial from Lësha Turkowski!
Here it is, adjusted to the color I wanted to key:
import SceneKit
public class ChromaKeyMaterial: SCNMaterial {
public var backgroundColor: UIColor {
didSet { didSetBackgroundColor() }
}
public var thresholdSensitivity: Float {
didSet { didSetThresholdSensitivity() }
}
public var smoothing: Float {
didSet { didSetSmoothing() }
}
public init(backgroundColor: UIColor = .green, thresholdSensitivity: Float = 0.15, smoothing: Float = 0.0) {
self.backgroundColor = backgroundColor
self.thresholdSensitivity = thresholdSensitivity
self.smoothing = smoothing
super.init()
didSetBackgroundColor()
didSetThresholdSensitivity()
didSetSmoothing()
// chroma key shader is based on GPUImage
// https://github.com/BradLarson/GPUImage/blob/master/framework/Source/GPUImageChromaKeyFilter.m
let surfaceShader =
"""
uniform vec3 c_colorToReplace;
uniform float c_thresholdSensitivity;
uniform float c_smoothing;
#pragma transparent
#pragma body
vec3 textureColor = _surface.diffuse.rgb;
float maskY = 0.2989 * c_colorToReplace.r + 0.5866 * c_colorToReplace.g + 0.1145 * c_colorToReplace.b;
float maskCr = 0.7132 * (c_colorToReplace.r - maskY);
float maskCb = 0.5647 * (c_colorToReplace.b - maskY);
float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
float Cr = 0.7132 * (textureColor.r - Y);
float Cb = 0.5647 * (textureColor.b - Y);
float blendValue = smoothstep(c_thresholdSensitivity, c_thresholdSensitivity + c_smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));
float a = blendValue;
_surface.transparent.a = a;
"""
//_surface.transparent.a = a;
shaderModifiers = [
.surface: surfaceShader,
]
}
required public init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
//setting background color to be keyed out
private func didSetBackgroundColor() {
//getting pixel from background color
//let rgb = backgroundColor.cgColor.components!.map{Float($0)}
//let vector = SCNVector3(x: rgb[0], y: rgb[1], z: rgb[2])
let vector = SCNVector3(x: 0.216, y: 0.357, z: 0.663)
setValue(vector, forKey: "c_colorToReplace")
}
private func didSetSmoothing() {
setValue(smoothing, forKey: "c_smoothing")
}
private func didSetThresholdSensitivity() {
setValue(thresholdSensitivity, forKey: "c_thresholdSensitivity")
}
}
Here is the code that plays the keyed video in ARKit on a SCNPlane:
import UIKit
import ARKit
class ARTransVC: UIViewController{
#IBOutlet weak var arSceneView: ARSCNView!
let configuration = ARWorldTrackingConfiguration()
private var player: AVPlayer = {
guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { fatalError() }
return AVPlayer(url: url)
}()
override func viewDidLoad() {
super.viewDidLoad()
self.arSceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints]
self.arSceneView.session.run(configuration)
//a delay for ARKit to capture the surroundings
DispatchQueue.main.asyncAfter(deadline: .now() + 3) {
// A SpriteKit scene to contain the SpriteKit video node
let spriteKitScene = SKScene(size: CGSize(width: self.arSceneView.frame.width, height: self.arSceneView.frame.height))
spriteKitScene.scaleMode = .aspectFit
spriteKitScene.backgroundColor = .clear
spriteKitScene.scaleMode = .aspectFit
//Create the SpriteKit video node, containing the video player
let videoSpriteKitNode = SKVideoNode(avPlayer: self.player)
videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width / 2.0, y: spriteKitScene.size.height / 2.0)
videoSpriteKitNode.size = spriteKitScene.size
videoSpriteKitNode.yScale = -1.0
videoSpriteKitNode.play()
spriteKitScene.addChild(videoSpriteKitNode)
// To make the video loop
self.player.actionAtItemEnd = .none
NotificationCenter.default.addObserver(
self,
selector: #selector(ARTransVC.playerItemDidReachEnd),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: self.player.currentItem)
// Create the SceneKit scene
let scene = SCNScene()
self.arSceneView.scene = scene
//Create a SceneKit plane and add the SpriteKit scene as its material
let background = SCNPlane(width: CGFloat(1), height: CGFloat(1))
background.firstMaterial?.diffuse.contents = spriteKitScene
let chromaKeyMaterial = ChromaKeyMaterial()
chromaKeyMaterial.diffuse.contents = self.player
let backgroundNode = SCNNode(geometry: background)
backgroundNode.geometry?.firstMaterial?.isDoubleSided = true
backgroundNode.geometry!.materials = [chromaKeyMaterial]
backgroundNode.position = SCNVector3(0,0,-2.0)
scene.rootNode.addChildNode(backgroundNode)
//video does not start without delaying the player
//playing the video before just results in [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef)
DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
self.player.seek(to:CMTimeMakeWithSeconds(1, 1000))
self.player.play()
}
}
}
#objc func playerItemDidReachEnd(notification: NSNotification) {
if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem {
playerItem.seek(to: kCMTimeZero, completionHandler: nil)
}
}
I was getting a [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef), which was apparently fixed in iOS 11.2. For now, I just found a rather patchy solution were I restarted the video after a one second delay. A better approach for that is greatly appreciated.
try clearing the background and set the scalemode with
backgroundColor = .clear
scaleMode = .aspectFit

Not able to add texture image to a cube in ARKit

I am not able to add an image to the cube in ARKit using the "Material" object.
Here is the code:
import UIKit
import SceneKit
import ARKit
class SimpleBoxViewController: UIViewController, ARSCNViewDelegate {
var sceneView: ARSCNView!
override func viewDidLoad() {
super.viewDidLoad()
self.sceneView = ARSCNView(frame: self.view.frame)
self.view.addSubview(self.sceneView)
sceneView.delegate = self
sceneView.showsStatistics = true
let scene = SCNScene()
let box = SCNBox(width: 0.2, height: 0.2, length: 0.2, chamferRadius: 0)
let material = SCNMaterial()
//This is not working
material.diffuse.contents = UIImage(named: "<someImage>.png")
let node = SCNNode()
node.geometry = box
node.geometry?.materials = [material]
node.position = SCNVector3(0, -0.1, -0.5)
scene.rootNode.addChildNode(node)
sceneView.scene = scene
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
let configuration = ARWorldTrackingConfiguration()
sceneView.session.run(configuration)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
sceneView.session.pause()
}
I tried to add various different images but nothing works. The only image that works is the image named "textures.png" which is preloaded into an ARKit project.
Is there a specific requirement for an image to be to loaded?
I'm not 100% sure on this one, but the issue might be with including the .png with the imageName, since this image should be in your Assets folder.
Anyway, this code is working for me and tries do to the same thing with regards to creating the cube with an image.
var box = SCNBox(width: pd.width, height: pd.height, length: 0.01,
chamferRadius: 0.0)
var imageMaterial = SCNMaterial()
var image = UIImage(named: "image")
imageMaterial.diffuse.contents = image
box.materials = [imageMaterial, imageMaterial, imageMaterial, imageMaterial, imageMaterial, imageMaterial]
var cube = SCNNode(geometry: box)
you have to add the path like:
material.diffuse.contents = UIImage(named: "art.scnassets/textur")
this works for me.
I was also facing the same issue. I have copied the png files into Assets.xcassets folder and its works for me.

SceneKit Cocoa snapshot failed assertion

I am working on a Swift/Cocoa/Xcode application.
This application contains a SceneKit View. The rendering API is set to Default (I think this is Metal).
If I run a snapshot() on this SceneKit view object, I get this error message. What I want to do is to capture an UIImage of the scene, viewed from camera
Texture PixelFormat MTLPixelFormatBGRA8Unorm does not match Resolve PixelFormat MTLPixelFormatRGBA8Unorm
If I set the rendering API to OpenGL, I have no error, everything works.
I have tried the same thing on an iOS app, it works on both cases (Metal or OpenGL).
I do not understand why I get this error and what should I do to avoid it.
Here is sample code:
import SceneKit
import Cocoa
class ViewController: NSViewController {
#IBOutlet weak var vue_scene: SCNView!
#IBOutlet weak var img_snapshot: NSImageView!
let camera_node = SCNNode()
var box_node:SCNNode = SCNNode()
override func viewDidLoad() {
super.viewDidLoad()
let scene = SCNScene()
vue_scene.scene = scene
vue_scene.backgroundColor = NSColor.clear
vue_scene.showsStatistics = false
vue_scene.allowsCameraControl = false
vue_scene.autoenablesDefaultLighting = true
camera_node.camera = SCNCamera()
camera_node.camera?.zNear = 0.01
camera_node.camera?.zFar = 1000000.0
vue_scene.pointOfView = camera_node
vue_scene.scene!.rootNode.addChildNode(camera_node)
let box = SCNBox(width: 10.0,
height: 10.0,
length: 10.0,
chamferRadius: 0.0)
box.firstMaterial?.diffuse.contents = NSColor.red
box.firstMaterial?.isDoubleSided = true
box_node = SCNNode(geometry:box)
box_node.position = SCNVector3Make(0,0,0)
box_node.opacity = 1.0
vue_scene.scene!.rootNode.addChildNode(box_node)
camera_node.position = SCNVector3Make(0.0,
0.0,
70.0)
}
#IBAction func on_btn(_ sender: Any) {
// signal SIGABRT here:
// /Library/Caches/com.apple.xbs/Sources/Metal/Metal-56.6.1/ToolsLayers/Debug/MTLDebugCommandBuffer.mm:215: failed assertion `Texture PixelFormat MTLPixelFormatBGRA8Unorm does not match Resolve PixelFormat MTLPixelFormatRGBA8Unorm'
let image = vue_scene.snapshot()
img_snapshot.image = image;
}
}
It works on Metal.
In macOS use Tab View (for example) for accommodation of SceneView and NSImageView.
I also tested it in Xcode 13.3 on macOS 12.3 Monterey.
import SceneKit
import Cocoa
class ViewController: NSViewController {
#IBOutlet var sceneView: SCNView!
#IBOutlet var imageView: NSImageView!
override func viewDidLoad() {
super.viewDidLoad()
sceneView.scene = SCNScene()
sceneView.pointOfView?.position.z = 20
sceneView.allowsCameraControl = true
sceneView.showsStatistics = true
sceneView.backgroundColor = NSColor.black
sceneView.autoenablesDefaultLighting = true
let box = SCNBox(width: 1.0, height: 1.0,
length: 1.0, chamferRadius: 0.0)
box.firstMaterial?.diffuse.contents = NSColor.systemTeal
let boxNode = SCNNode(geometry: box)
boxNode.position = SCNVector3(0,0,-10)
sceneView.scene!.rootNode.addChildNode(boxNode)
}
#IBAction func createSnapshot(_ sender: NSButton) {
let image = sceneView.snapshot()
imageView.image = image
}
}