let height: Float = 1
let width: Float = 0.5
let box = MeshResource.generateBox(width: 0.02, height: height, depth: width)
This box will have a real-time position same as the current camera position, In AR World I would have multiple boxes with different shapes, I want to identify which object is intersecting with the current real-time box.
I can not do this with position matching (The nearest one). I literally want to know the object which is touching/intersecting the real-time box.
Thanks in advance.
You can easily do that using subscribe() method. The following code is a reference:
(physics for both objects was enabled in Reality Composer)
import UIKit
import RealityKit
import Combine
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
var subscriptions: [Cancellable] = []
override func viewDidLoad() {
super.viewDidLoad()
let boxScene = try! Experience.loadBox()
arView.scene.anchors.append(boxScene)
let floorEntity = boxScene.children[0].children[1]
let subscribe = arView.scene.subscribe(to: CollisionEvents.Began.self,
on: floorEntity) { (event) in
print("Collision Occured")
print(event.entityA.name)
print(event.entityB.name)
}
self.subscriptions += [subscribe]
}
}
Related
How can I create a border/outline on a ModelEntity in RealityKit?
Something like this blue border in Reality Composer:
You can achieve similar effect in two ways: either using Metal framework's features, or natively, in RealityKit (but sometimes with some visual artifacts). In RealityKit, such an outline could be rendered with faceCulling property for cloned model:
import UIKit
import RealityKit
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
override func viewDidLoad() {
super.viewDidLoad()
let scene = try! Experience2.loadScene()
let scene2 = scene.clone(recursive: true)
let outline = scene2.findEntity(named: "simpBld_root") as! ModelEntity
outline.scale *= 1.02
var material = PhysicallyBasedMaterial()
material.emissiveColor.color = .white
material.emissiveIntensity = 0.5
// an outer surface doesn't contribute to the final image
material.faceCulling = .front
outline.model?.materials[0] = material
arView.scene.anchors.append(scene)
arView.scene.anchors.append(scene2)
}
}
P. S.
In your case, the name of a rook is:
.findEntity(named: "chess_rook_white_base_iconic_lod0")
I composed a scene in Reality Composer and added 3 objects in it. The problem is that the shadows are too intense (dark).
I tried using the Directional Light in RealityKit from this answer rather than a default light from Reality Composer (since you don't have an option to adjust light in it).
Update
I implemented the spotlight Lighting as explained by #AndyFedo in the answer. The shadow is still so dark.
In case you need soft and semi-transparent shadows in your scene, use SpotLight lighting fixture which is available when you use a SpotLight class or implement HasSpotLight protocol. By default SpotLight is north-oriented. At the moment there's no opacity instance property for shadows in RealityKit.
outerAngleInDegrees instance property must be not more than 179 degrees.
import RealityKit
class Lighting: Entity, HasSpotLight {
required init() {
super.init()
self.light = SpotLightComponent(color: .yellow,
intensity: 50000,
innerAngleInDegrees: 90,
outerAngleInDegrees: 179, // greater angle – softer shadows
attenuationRadius: 10) // can't be Zero
}
}
Then create shadow instance:
class ViewController: NSViewController {
#IBOutlet var arView: ARView!
override func awakeFromNib() {
arView.environment.background = .color(.black)
let spotLight = Lighting().light
let shadow = Lighting().shadow
let boxAndCurlAnchor = try! Experience.loadBoxAndCurl()
boxAndCurlAnchor.components.set(shadow!)
boxAndCurlAnchor.components.set(spotLight)
arView.scene.anchors.append(boxAndCurlAnchor)
}
}
Here's an image produced without this line: boxAnchor.components.set(shadow!).
Here's an image produced with the following value outerAngleInDegrees = 140:
Here's an image produced with the following value outerAngleInDegrees = 179:
In a room keep SpotLight fixture at a height of 2...4 meters from a model.
For bigger objects you must use higher values for intensity and attenuationRadius:
self.light = SpotLightComponent(color: .white,
intensity: 625000,
innerAngleInDegrees: 10,
outerAngleInDegrees: 120,
attenuationRadius: 10000)
Also you can read my STORY about RealityKit lights on Medium.
The shadows appear darker when I use "Hide" action sequence on "Scene Start" and post a notification to call "Show" action sequence on tap gesture.
The shadows were fixed when I scaled the Object to 0% and post Notification to call "Move,Rotate,Scale to" action sequence on tap gesture.
Scaled Image
Unhide Image
Object Difference with hidden and scaled actions
import UIKit
import RealityKit
import ARKit
class Lighting: Entity, HasDirectionalLight {
required init() {
super.init()
self.light = DirectionalLightComponent(color: .red, intensity: 1000, isRealWorldProxy: true)
}
}
class SpotLight: Entity, HasSpotLight {
required init() {
super.init()
self.light = SpotLightComponent(color: .yellow,
intensity: 50000,
innerAngleInDegrees: 90,
outerAngleInDegrees: 179, // greater angle – softer shadows
attenuationRadius: 10) // can't be Zero
}
}
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
enum TapObjects {
case None
case HiddenChair
case ScaledChair
}
var furnitureAnchor : Furniture._Furniture!
var tapObjects : TapObjects = .None
override func viewDidLoad() {
super.viewDidLoad()
furnitureAnchor = try! Furniture.load_Furniture()
arView.scene.anchors.append(furnitureAnchor)
addTapGesture()
}
func addTapGesture() {
let tapGesture = UITapGestureRecognizer(target: self, action: #selector(onTap))
arView.addGestureRecognizer(tapGesture)
}
#objc func onTap(_ sender: UITapGestureRecognizer) {
switch tapObjects {
case .None:
furnitureAnchor.notifications.unhideChair.post()
tapObjects = .HiddenChair
case .HiddenChair:
furnitureAnchor.notifications.scaleChair.post()
tapObjects = .ScaledChair
default:
break
}
}
}
I am not able to figure out how to make the "ball" entity a physics entity/body and apply a force to it.
// I'm using UIKit for the user interface and RealityKit +
// the models made in Reality Composer for the Augmented reality and Code
import RealityKit
import ARKit
class ViewController: UIViewController {
var ball: (Entity & HasPhysics)? {
try? Entity.load(named: "golfball") as? Entity & HasPhysics
}
#IBOutlet var arView: ARView!
// referencing the play now button on the home screen
#IBAction func playNow(_ sender: Any) { }
// referencing the slider in the AR View - this slider will be used to
// control the power of the swing. The slider values range from 10% to
// 100% of swing power with a default value of 55%. The user will have
// to gain experience in the game to know how much power to use.
#IBAction func slider(_ sender: Any) { }
//The following code will fire when the view loads
override func viewDidLoad() {
super.viewDidLoad()
// defining the Anchor - it looks for a flat surface .3 by .3
// meters so about a foot by a foot - on this surface, it anchors
// the golf course and ball when you tap
let anchor = AnchorEntity(plane: .horizontal, minimumBounds: [0.3, 0.3])
// placing the anchor in the scene
arView.scene.addAnchor(anchor)
// defining my golf course entity - using modelentity so it
// participates in the physics of the scene
let entity = try? ModelEntity.load(named: "golfarnew")
// defining the ball entity - again using modelentity so it
// participates in the physics of the scene
let ball = try? ModelEntity.load(named: "golfball")
// loading my golf course entity
anchor.addChild(entity!)
// loading the golf ball
anchor.addChild(ball!)
// applying a force to the ball at the balls position and the
// force is relative to the ball
ball.physicsBody(SIMD3(1.0, 1.0, 1.0), at: ball.position, relativeTo: ball)
// sounds, add physics body to ball, iPad for shot direction,
// connect slider to impulse force
}
}
Use the following code to find out how to implement a RealityKit's physics.
Pay particular attention: Participates in Physics is ON in Reality Composer.
import ARKit
import RealityKit
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
let boxScene = try! Experience.loadBox()
let secondBoxAnchor = try! Experience.loadBox()
let boxEntity = boxScene.steelBox as! (Entity & HasPhysics)
let kinematics: PhysicsBodyComponent = .init(massProperties: .default,
material: nil,
mode: .kinematic)
let motion: PhysicsMotionComponent = .init(linearVelocity: [0.1 ,0, 0],
angularVelocity: [3, 3, 3])
boxEntity.components.set(kinematics)
boxEntity.components.set(motion)
let anchor = AnchorEntity()
anchor.addChild(boxEntity)
arView.scene.addAnchor(anchor)
arView.scene.addAnchor(secondBoxAnchor)
print(boxEntity.isActive) // Entity must be active!
}
}
Also, look at THIS POST to find out how to implement RealityKit's physics with a custom class.
I have created a very simple scene ("SpeechScene") using Reality Composer, with a single speech callout object ("Speech Bubble") anchored to a Face anchor.
I have loaded this scene into code via the following:
let speechAnchor = try! Experience.loadSpeechScene()
arView.scene.anchors.append(speechAnchor)
let bubble = (arView.scene as? Experience.SpeechScene)?.speechBubble
It renders as expected. However, I would like to dynamically change the text of this existing entity.
I found a similar question here, but it's unclear to me how to refer to the meshResource property of a vanilla RealityKit.Entity object.
Is this possible? Thank you!
First Approach
At first you need to find out what's an hierarchy in Reality Composer's scene containing Bubble Speech object. For that I used simple print() command:
print(textAnchor.swift!.children[0].components.self) /* Bubble Plate */
print(textAnchor.swift!.children[1].components.self) /* Text Object */
Now I can extract a text entity object:
let textEntity: Entity = textAnchor.swift!.children[1].children[0].children[0]
And bubble plate entity object:
let bubbleEntity: Entity = textAnchor.swift!.children[0]
Here's a final code version that you can adapt for your needs:
import RealityKit
class GameViewController: UIViewController {
#IBOutlet var arView: ARView!
override func viewDidLoad() {
super.viewDidLoad()
let textAnchor = try! SomeText.loadTextScene()
let textEntity: Entity = textAnchor.swift!.children[1].children[0].children[0]
textAnchor.swift!.parent!.scale = [4,4,4] // Scale for both objects
var textModelComp: ModelComponent = (textEntity.components[ModelComponent])!
var material = SimpleMaterial()
material.baseColor = .color(.red)
textModelComp.materials[0] = material
textModelComp.mesh = .generateText("Obj-C",
extrusionDepth: 0.01,
font: .systemFont(ofSize: 0.08),
containerFrame: CGRect(),
alignment: .left,
lineBreakMode: .byCharWrapping)
textEntity.position = [-0.1,-0.05, 0.01]
textAnchor.swift!.children[1].children[0].children[0].components.set(textModelComp)
arView.scene.anchors.append(textAnchor)
}
}
Second Approach
And you can always use a simpler approach for this case – to create several scenes in Reality Composer, each one must contain different speech-object.
Consider, this code isn't for tracking, it's just a test for dynamically switching two objects using Tap Gesture. Then you need to adapt this code for tracking faces.
import RealityKit
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
var counter = 0
var bonjourObject: FaceExperience.Bonjour? = nil
var holaObject: FaceExperience.Hola? = nil
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Reality Composer Scene named "Bonjour"
// Model name – "french"
bonjourObject = try! FaceExperience.loadBonjour()
bonjourObject?.french?.scale = SIMD3(x: 2, y: 2, z: 2)
bonjourObject?.french?.position.y = 0.25
// Reality Composer Scene named "Hola"
// Model name – "spanish"
holaObject = try! FaceExperience.loadHola()
holaObject?.spanish?.scale = SIMD3(x: 2, y: 2, z: 2)
holaObject?.spanish?.position.z = 0.3
}
#IBAction func tapped(_ sender: UITapGestureRecognizer) {
if (counter % 2) == 0 {
arView.scene.anchors.removeAll()
arView.scene.anchors.append(holaObject!)
} else {
arView.scene.anchors.removeAll()
arView.scene.anchors.append(bonjourObject!)
}
counter += 1
}
}
If you want a text portion to be on the same place – just copy-paste object from one scene to another.
#maxxfrazer is correct in his assertion that currently the only way to change text dynamically is to replace the ModelComponentof the Entity assuming of course it adheres to the HasModel Protocol.
I have written a simple extension which can help with this:
//-------------------------
//MARK: - Entity Extensions
//-------------------------
extension Entity{
/// Changes The Text Of An Entity
/// - Parameters:
/// - content: String
func setText(_ content: String){ self.components[ModelComponent] = self.generatedModelComponent(text: content) }
/// Generates A Model Component With The Specified Text
/// - Parameter text: String
func generatedModelComponent(text: String) -> ModelComponent{
let modelComponent: ModelComponent = ModelComponent(
mesh: .generateText(text, extrusionDepth: TextElements().extrusionDepth, font: TextElements().font,
containerFrame: .zero, alignment: .center, lineBreakMode: .byTruncatingTail),
materials: [SimpleMaterial(color: TextElements().colour, isMetallic: true)]
)
return modelComponent
}
}
//--------------------
//MARK:- Text Elements
//--------------------
/// The Base Setup Of The MeshResource
struct TextElements{
let initialText = "Cube"
let extrusionDepth: Float = 0.01
let font: MeshResource.Font = MeshResource.Font.systemFont(ofSize: 0.05, weight: .bold)
let colour: UIColor = .white
}
In order to use it lets say you create an Entity called textEntity:
var textEntity = Entity()
You can then set the dynamically change the Text via replacing the ModelComponent and setting the MeshResource at any time by simply calling the following method:
textEntity.setText("Stack Overflow")
Of course in regard to centering or aligning the text you will need to do some simple calculations (which I have ommited here).
Hope it helps.
Find your model entity (maybe by putting a breakpoint and looking through the children initially), find the Entity that conforms to the HasModel protocol, then replace its model with a different one using generatetext:
https://developer.apple.com/documentation/realitykit/meshresource/3244422-generatetext
Trouble converting program from UIKit to SceneKit. Biggest difficulty for me is understanding how delegate file, Tile, synched with array, Board, is set up with SceneKit. It is a simple project. A screenshot: http://imgur.com/9hsv7X5. It displays a 3 x 5 array. User taps an item and it becomes highlighted. Then tap another item, it becomes highlighted, previous item, unhighlighted.
Here is the UIKit project composed of 3 files:
VIEWCONTROLLER
import UIKit
struct BoardLoc {
var x: Int
var y: Int
}
class ViewController: UIViewController, TileDelegate {
var tile: Tile!
override func viewDidLoad() {
super.viewDidLoad()
let scene = Board()
tile.tileDelegate = self
tile.board = scene
}
func getTileAtLoc(tile: Tile, _ boardLoc: BoardLoc) {
tile.boardLoc = boardLoc
}
}
BOARD
import Foundation
class Board {
var board: Array<Array<String>> = Array(count:3, repeatedValue:Array(count:5, repeatedValue:"foo"))
func putTileAt(boardLoc: BoardLoc) -> String {
return board[boardLoc.x][boardLoc.y]
}
}
TILE
import UIKit
protocol TileDelegate {
func getTileAtLoc(tile: Tile, _ boardLoc: BoardLoc)
}
class Tile: UIView {
var boardLoc: BoardLoc?
var board: Board?
var tileDelegate: TileDelegate?
required init(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
addGestureRecognizer(UITapGestureRecognizer(target: self, action:"handleTap:"))
}
override func drawRect(rect: CGRect) {
for x in 0...2 {
for y in 0...4 {
let context = UIGraphicsGetCurrentContext()
let red = UIColor.redColor().CGColor
let orange = UIColor.orangeColor().CGColor
let bigCircle = CGRectMake(CGFloat(106 * x),CGFloat(106 * y), 106, 106)
let smallCircle = CGRectMake(CGFloat(106 * x) + 3, CGFloat(106 * y) + 3, 100, 100)
if (boardLoc != nil && boardLoc!.x == x && boardLoc!.y == y) {
CGContextSetFillColorWithColor(context, red)
CGContextFillEllipseInRect(context, bigCircle)
}
if board!.putTileAt(BoardLoc(x: x, y: y)) == "foo" {
CGContextSetFillColorWithColor(context, orange)
CGContextFillEllipseInRect(context, smallCircle)
}
}
}
}
func handleTap(gestureRecognizer: UIGestureRecognizer) {
let point = gestureRecognizer.locationInView(self)
let boardLoc = BoardLoc(x: Int(point.x) / 106, y: Int(point.y) / 106)
tileDelegate!.getTileAtLoc(self, boardLoc)
setNeedsDisplay()
}
}
First of all, I recommend you to read Apple SceneKit document and some tutorials.
Scene Kit is a 3D-rendering Objective-C framework that combines a high-performance rendering engine with a high-level, descriptive API. Scene Kit supports the import, manipulation, and rendering of 3D assets without requiring the exact steps to render a scene the way OpenGL does.
http://www.objc.io/issue-18/scenekit.html
https://www.weheartswift.com/introduction-scenekit-part-1/
http://www.raywenderlich.com/83748/beginning-scene-kit-tutorial
http://tacow.org/assets/attachments/SceneKit.pdf
Scene Kit allows you to render 3D scene easily, without OpenGL ES APIs. However you should understand how Scene Kit works.
Basically, Scene Kit provides a view controller that maintains an animation loop. This loop follows a design pattern common in games and simulations, with two phases: update and render. In the implementation, Scene Kit has more phases like the following figure (from http://www.objc.io/issue-18/scenekit.html), but basically, two phases, update and render.
So how to create Scene Kit project, the basics is
Prepare SCNView
Initialize 3D scene
Create touch event handler
Implement Update phase: Update game board using the touched object or the touched position, Update the animation of the objects, or some sort of stuff.
Implement Render phase: Basically, Scene Kit automatically renders registered 3D objects and models.
Thus, you should implement as the following.
Use SCNView instead of ViewController
Create a scene
Place Board and Tiles as Scene Kit 3D objects
Use hitTest for touching Tile and update Tiles in Update phase
import SceneKit
class GameViewController: UIViewController {
struct BoardLoc {
var x: Int
var y: Int
}
enum Type {
case Yellow
case Orange
}
var boardArray: Array<Array<Type>> = []
override func viewDidLoad() {
super.viewDidLoad()
for x in 0...2 {
boardArray.append(Array(count:5, repeatedValue:Type.Orange))
for y in 0...4 {
boardArray[x][y] = Type.Orange
}
}
let scene = SCNScene(named: "art.scnassets/balls8.dae")
let scnView = self.view as SCNView
scnView.scene = scene
scnView.autoenablesDefaultLighting = true
let taps = NSMutableArray()
let tap = UITapGestureRecognizer(target: self, action: "handleTap:")
taps.addObject(tap)
scnView.gestureRecognizers = taps
}
func handleTap(gestureRecognizer: UIGestureRecognizer) {
let scnView = view as SCNView
let point = gestureRecognizer.locationInView(scnView)
if let hitResults = scnView.hitTest(point, options: nil) {
if hitResults.count > 0 {
let result: AnyObject! = hitResults[0]
if !result.node!.name!.hasPrefix("Orange") {
return
}
let tapLoc = BoardLoc(x: Int(point.x) / 106, y: Int(point.y) / 106)
boardArray[tapLoc.x][tapLoc.y] = Type.Yellow
for col in 0...2 {
for row in 0...4 {
var yellowBall = scnView.scene!.rootNode.childNodeWithName("Yellow", recursively: true)
var secxtorX = Float(col) * 16.5 - 16
var sectorY = 34 - (Float(row) * 16.5)
if boardArray[col][row] == Type.Yellow {
yellowBall!.runAction(SCNAction.moveTo(SCNVector3(x: secxtorX, y: sectorY, z: 25), duration: 0.01))
boardArray[tapLoc.x][tapLoc.y] = Type.Orange
}
}
}
}
}
}
}