How to show texture inside USDZ model's frame? - swift

I'm using below code to show image in .usdz model image frame
var imageMaterial = UnlitMaterial()
imageMaterial.color = .init(tint: .white, texture: .init(texture))
modelEntity.model?.materials = [imageMaterial]
anchorEntity.addChild(modelEntity)
The issue is that image is showing on frame border as well but we need to show in image inside frame not on frame border.
Frame.usdz file:
Frame after adding image:

If the frame and its canvas is a single object (so called mono-model), then you cannot apply a texture to just the surface of the canvas. You need the canvas to be a separate part. The simplest solution in this case would be to create a Plane primitive and apply a texture to it. Then transform the plane as needed and attach it to the same anchor.
import UIKit
import RealityKit
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
override func viewDidLoad() {
super.viewDidLoad()
let frameEntity = try! Entity.loadModel(named: "frame.usdz")
let canvas: MeshResource = .generatePlane(width: 0.3, height: 0.4)
var material = SimpleMaterial()
material.color = try! .init(texture: .init(.load(named: "canvas")))
let canvasEntity = ModelEntity(mesh: canvas, materials: [material])
canvasEntity.position = [0.3, 0.1, 0.0]
canvasEntity.scale *= 1.25
let anchorEntity = AnchorEntity()
anchorEntity.addChild(frameEntity)
anchorEntity.addChild(canvasEntity)
arView.scene.anchors.append(anchorEntity)
}
}

Related

Implement a crosshair kind behaviour in RealityKit

What I want to achieve: Attach a sphere to the camera position (so that it always stay at the center of the screen as the device move) and detect when it is on top of other AR objects - to trigger other actions/behaviour on the AR objects.
Approach: I have created the sphere and attached to the center of the screen as shown below
#IBOutlet var arView: ARView!
override func viewDidLoad() {
super.viewDidLoad()
let mesh = MeshResource.generateSphere(radius: 0.1)
let sphere = ModelEntity(mesh: mesh)
let anchor = AnchorEntity(.camera)
sphere.setParent(anchor)
arView.scene.addAnchor(anchor)
sphere.transform.translation.z = -0.75
}
Next step, perform a hittest or a raycast in session(_:didUpdate:)
let results = arView.hitTest(CGPoint(x: 0.5, y: 0.5), query: .all, mask: .default)
//normalised center ; 2D position of the camera (our sphere) in the view’s coordinate system
But I am constantly getting ground plane as my result with this approach. Is there something I am missing or there is a different approach to achieving this
Note: Just in case there is something wrong I have created my basic scene as I want to track an image and add content on top of the image marker in Reality Composer and using the .rcproject in Xcode also have enabled collision property for all the overlaid items.
Try the following solution:
import ARKit
import RealityKit
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
var sphere: ModelEntity?
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
let touch = arView.center
let results: [CollisionCastHit] = arView.hitTest(touch)
if let result: CollisionCastHit = results.first {
if result.entity.name == "Cube" && sphere?.isAnchored == true {
print("BOOM!")
}
}
}
override func viewDidLoad() {
super.viewDidLoad()
// Crosshair
let mesh01 = MeshResource.generateSphere(radius: 0.01)
sphere = ModelEntity(mesh: mesh01)
sphere?.transform.translation.z = -0.15
let cameraAnchor = AnchorEntity(.camera)
sphere?.setParent(cameraAnchor)
arView.scene.addAnchor(cameraAnchor)
// Model for collision
let mesh02 = MeshResource.generateBox(size: 0.3)
let box = ModelEntity(mesh: mesh02, materials: [SimpleMaterial()])
box.generateCollisionShapes(recursive: true)
box.name = "Cube"
let planeAnchor = AnchorEntity(.plane(.any,
classification: .any,
minimumBounds: [0.2, 0.2]))
box.setParent(planeAnchor)
arView.scene.addAnchor(planeAnchor)
}
}

RealityKit – Material's Alpha transparency

Is it possible to have alpha transparency with textures?
I have png file that contains 8 bit RGBA, but for some reason, the supposed-to-be-transparent parts are simply black.
I assign the material like this:
private func setupLightMeshes(_ scene: Entity) {
let lightEntity = scene.findEntity(named: "LightWindow_Plane")!
var lightMaterial = UnlitMaterial()
lightMaterial.baseColor = try! MaterialColorParameter.texture(
TextureResource.load(named: "light.png")) // this is 8bpc RGBA
var modelComponent = lightEntity.components[ModelComponent] as! ModelComponent
modelComponent = ModelComponent(mesh: modelComponent.mesh, materials: [lightMaterial])
lightEntity.components.set(modelComponent)
}
RealityKit 1.0
.tintColor is a multiplier for .baseColor
If you have a .png file with a premultiplied alpha (RGB*A). all you need to do is to additionally use a tintColor instance property with alpha equal to 0.9999.
material.tintColor = UIColor(white: 1.0, alpha: 0.9999)
Here's how it looks like in a real code:
fileprivate func material() -> UnlitMaterial {
var material = UnlitMaterial()
material.baseColor = try! .texture(.load(named: "transparent.png"))
material.tintColor = UIColor(white: 1.0, alpha: 0.9999)
return material
}
override func viewDidLoad() {
super.viewDidLoad()
let sphere: MeshResource = .generateSphere(radius: 0.5)
let entity = ModelEntity(mesh: sphere,
materials: [material()])
let anchor = AnchorEntity()
anchor.orientation = simd_quatf(angle: .pi, axis: [0, 1, 0])
anchor.addChild(entity)
arView.scene.anchors.append(anchor)
}
P.S.
For me, it seems like a bug in RealityKit 1.0. I have no clues why method .load(named: "file.png") doesn't work as expected.
RealityKit 2.0
The same story about partially transparent textures is in RealityKit 2.0:
var material = SimpleMaterial()
material.color = try! .init(tint: .white.withAlphaComponent(0.9999),
texture: .init(.load(named: "semi.png", in: nil)))
tint parameter is a multiplier for texture as well.

Apply GIF to SCNPlane

Right now my Swift code creates a SCNPlane with an image called we.jpg. What I would like to do is to replace we.jpg with a ball.gif. material.diffuse.contents is where the UIImage is attached to the SCNPlane.
let planeGeometry = SCNPlane(width: 0.2, height: 0.35)
let material = SCNMaterial()
material.diffuse.contents = UIImage(named: "we.jpg")
planeGeometry.materials = [material]
You can easily replace SCNPlane's .jpg texture with .gif texture. But remember that in SceneKit there's no support for animated GIF textures.
Here's a code:
import ARKit
import SceneKit
class ViewController: UIViewController {
#IBOutlet var sceneView: ARSCNView! // in case you're using AR app
//#IBOutlet var sceneView: SCNView! // in case you're using VR app
let planeNode = SCNNode()
override func viewDidLoad() {
super.viewDidLoad()
let scene = SCNScene()
sceneView.scene = scene
sceneView.allowsCameraControl = true
planeNode.geometry = SCNPlane(width: 0.2, height: 0.35)
planeNode.position = SCNVector3(0, 0,-0.5)
sceneView.scene?.rootNode.addChildNode(planeNode)
DispatchQueue.main.asyncAfter(deadline: .now() + 3.0) { // 3 sec delay
let image = UIImage(named: "texture.jpg")
self.planeNode.geometry?.firstMaterial?.diffuse.contents = image
}
// Other stuff...
}
//...and you can replace a texture with a button's click:
#IBAction func replaceTexture(_ sender: UIButton) {
let image = UIImage(named: "texture.gif")
planeNode.geometry?.firstMaterial?.diffuse.contents = image
}
}

Combining 3D and 2D in macOS

I need to draw 2D legends, 2D frame, etc. on top of a 3D scene. Here, I am trying to display a 2D label on top of a 3D sphere, so that the label will always face the user, even if they rotate the scene.
I am using a SCNView to draw the 3D scene (=> works fine)
I am using its NSView parent (subclassed to override its draw function) to display the 2D text (=> nothing is displayed)
(var twoDCoordinates:CGPoint is extern)
in my viewController:
#IBOutlet weak var sceneView: SCNView!
func sceneSetup() {
let scene = SCNScene()
// (I set the light and the camera...)
// Adding the sphere
let geometry = SCNSphere (radius: 4.0)
geometry.firstMaterial!.diffuse.contents = CGColor.red
geometry.firstMaterial!.specular.contents = CGColor.white
sphereNode = SCNNode (geometry: geometry)
sphereNode.position = SCNVector3Make(-6, 0, 0)
scene.rootNode.addChildNode (sphereNode)
sceneView.scene = scene
sceneView.allowsCameraControl = true
// compute the 2D coordinates of the text to be displayed
var q = sphereNode.position
q.x += 6.0; q.y += 6.0; q.z += 6.0 // outside of the sphere
let projected = sceneView.projectPoint (q)
twoDCoordinates = CGPoint (x: projected.x, y: projected.y)
sceneView.needsDisplay = true
}
=> The 3D sphere is correctly displayed; twoDCoordinates is correct
In TwoDView:NSView class (which is the sceneView container)
override func draw(_ dirtyRect: NSRect) {
super.draw(dirtyRect)
// Drawing code here.
let paragraphStyle = NSMutableParagraphStyle()
paragraphStyle.lineBreakMode = .byTruncatingTail
paragraphStyle.alignment = .left
let textAttributes = [
NSAttributedString.Key.font: NSFont.systemFont(ofSize: 72.0),
NSAttributedString.Key.paragraphStyle: paragraphStyle]
let label = "Sphere Label"
let size = label (withAttributes: textAttributes)
let rect = CGRect(x: twoDCoordinates.x, y: twoDCoordinates.y, width: size.width, height: size.height)
label.draw(in: rect, withAttributes: textAttributes)
}
=> the function draw is called, but no text is displayed.
I expect the text "Sphere label" to be displayed next to the 3D sphere, but only the 3D scene is displayed., even though function draw is called.

How to Add Material to ModelEntity programatically in RealityKit?

The docs for RealityKit include the structs: OcclusionMaterial, SimpleMaterial, and UnlitMaterial for adding materials to a ModelEntity.
Alternatively you can load in a model with a material attached to it.
I want to add a custom material/texture to a ModelEntity programmatically. How can I achieve this on the fly without adding the material to a model in Reality Composer or some other 3D Software?
Updated: January 26, 2023
RealityKit materials
There are 6 types of materials in RealityKit 2.0 and RealityFoundation at the moment:
SimpleMaterial
UnlitMaterial
OcclusionMaterial (read this post to find out how to setup SceneKit occlusion shader)
VideoMaterial (look at this post to find out how to setup it)
PhysicallyBasedMaterial
CustomMaterial (Medium story)
SwiftUI version
Here I used two macOS implementations (SwiftUI and Cocoa) to demonstrate how to programmatically assign RealityKit materials.
import SwiftUI
import RealityKit
struct VRContainer : NSViewRepresentable {
let arView = ARView(frame: .zero)
let anchor = AnchorEntity()
func makeNSView(context: Context) -> ARView {
var smpl = SimpleMaterial()
smpl.color.tint = .blue
smpl.metallic = 0.7
smpl.roughness = 0.2
var pbr = PhysicallyBasedMaterial()
pbr.baseColor.tint = .green
let mesh: MeshResource = .generateBox(width: 0.5,
height: 0.5,
depth: 0.5,
cornerRadius: 0.02,
splitFaces: true)
let box = ModelEntity(mesh: mesh, materials: [smpl, pbr])
box.orientation = Transform(pitch: .pi/4,
yaw: .pi/4, roll: 0.0).rotation
anchor.addChild(box)
arView.scene.anchors.append(anchor)
arView.environment.background = .color(.black)
return arView
}
func updateNSView(_ view: ARView, context: Context) { }
}
struct ContentView: View {
var body: some View {
VRContainer().ignoresSafeArea()
}
}
Cocoa version
import Cocoa
import RealityKit
class ViewController: NSViewController {
#IBOutlet var arView: ARView!
override func awakeFromNib() {
let box = try! Experience.loadBox()
var simpleMat = SimpleMaterial()
simpleMat.color = .init(tint: .blue, texture: nil)
simpleMat.metallic = .init(floatLiteral: 0.7)
simpleMat.roughness = .init(floatLiteral: 0.2)
var pbr = PhysicallyBasedMaterial()
pbr.baseColor = .init(tint: .green, texture: nil)
let mesh: MeshResource = .generateBox(width: 0.5,
height: 0.5,
depth: 0.5,
cornerRadius: 0.02,
splitFaces: true)
let boxComponent = ModelComponent(mesh: mesh,
materials: [simpleMat, pbr])
box.steelBox?.children[0].components.set(boxComponent)
box.steelBox?.orientation = Transform(pitch: .pi/4,
yaw: .pi/4,
roll: 0).rotation
arView.scene.anchors.append(box)
}
}
Read this post to find out how to load a texture for RealityKit's shaders.
RealityKit shaders vs SceneKit shaders
We know that in SceneKit there are 5 different shading models, so we can use RealityKit's SimpleMaterial, PhysicallyBasedMaterial and UnlitMaterial to generate all these five shaders that we've been accustomed to.
Let's see how it looks like:
SCNMaterial.LightingModel.blinn – SimpleMaterial(color: . gray,
roughness: .float(0.5),
isMetallic: false)
SCNMaterial.LightingModel.lambert – SimpleMaterial(color: . gray,
roughness: .float(1.0),
isMetallic: false)
SCNMaterial.LightingModel.phong – SimpleMaterial(color: . gray,
roughness: .float(0.0),
isMetallic: false)
SCNMaterial.LightingModel.physicallyBased – PhysicallyBasedMaterial()
// all three shaders (`.constant`, `UnlitMaterial` and `VideoMaterial `)
// don't depend on lighting
SCNMaterial.LightingModel.constant – UnlitMaterial(color: .gray)
– VideoMaterial(avPlayer: avPlayer)