The ARViewContainer has a coordinator, which is created once by makeCoordinator() -> Coordinator. A print("makeCoordinator()") in that method shows me, that it is only called once. It is the first print in the debug console. I also have a print("init") in the initializer of ARViewContainer, which is called many times according to the debug console.
Why is the ARViewContainer created many times (as expected, because the ContentView calls ARViewContainer() in its body), but the coordinator is only made once?
import ARKit
import SwiftUI
import RealityKit
struct ARViewContainer: UIViewRepresentable {
init(coordinator: Coordinator) {
print("init")
}
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let config = ARWorldTrackingConfiguration()
config.planeDetection = [.horizontal, .vertical]
config.environmentTexturing = .automatic
if ARWorldTrackingConfiguration.supportsSceneReconstruction(.mesh){
config.sceneReconstruction = .mesh
}
arView.session.run(config)
arView.debugOptions = [
.showWorldOrigin,
.showSceneUnderstanding]
arView.session.delegate = context.coordinator
return arView
}
func makeCoordinator() -> Coordinator {
print("makeCoordinator()")
return Coordinator()
}
}
Related
I have an rcproject file with about 12 scenes (500mb or so). In order to lessen the load on iOS devices I tried breaking it apart into separate rcproject files and change the scene using notification triggers. However when doing this and adding the new scene as a child to the main anchor, the new scene renders in a new spot, breaking the AR experience. There must be a way to add the new scenes to the exact same anchor/position. Alternatively, is there a better way than seperating the rcproject to lessen load on ram etc?
Here is my ARView
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
// The experience consists of a "Base" (it acts as a permanent platform for all the scenes to be rendered on)
let baseAnchor = try! Base.loadIntro()
let introAnchor = try! IntroSceneOM.loadIntro()
introAnchor.actions.changeStoriesWithTrigger.onAction = loadStories
arView.scene.anchors.append(baseAnchor)
arView.scene.anchors.append(introAnchor)
func loadStories(_ entity: Entity?) -> Void {
arView.scene.anchors.remove(introAnchor)
let storiesAnchor = try! StoriesSceneOM.loadStoriesScene()
baseAnchor.addChild(storiesAnchor)
}
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {} }
EDIT:
Recreated the project using an implementation of Andy Jazz's code.
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
var anchor = AnchorEntity()
let scene01 = try! Experience.loadBoxScene()
let scene02 = try! Experience.loadBallScene()
// Base Scene
let scene03 = try! Experience.loadFloppyScene()
anchor = AnchorEntity(.plane(.horizontal, classification: .any,
minimumBounds: [0.1, 0.1]))
scene01.actions.boxTapped.onAction = loadScene02
scene02.actions.ballTapped.onAction = loadScene01
anchor.addChild(scene01)
anchor.addChild(scene03)
arView.scene.anchors.append(anchor)
func loadScene02(_ entity: Entity?) -> Void {
scene01.removeFromParent()
anchor.addChild(scene02)
}
func loadScene01(_ entity: Entity?) -> Void {
scene02.removeFromParent()
anchor.addChild(scene01)
}
return arView
}
However I still get the same issue where the anchor moves each time a new scene is added.
The code is quite simple, but regarding the issue of loading scenes with a large number of polygons, remains unresolved. At the maximum, the current scene should contain no more than 100K polygons, but ideally they should be within 50...70K. Texture resolution should not exceed 2K.
import RealityKit
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
#IBOutlet var label: UILabel!
var anchor = AnchorEntity()
let scene01 = try! Experience.loadBox()
let scene02 = try! Experience.loadBall()
var cube = ModelEntity()
var sphere = ModelEntity()
override func viewDidLoad() {
super.viewDidLoad()
self.cube = scene01.steelBox?.children[0] as! ModelEntity
self.sphere = scene02.ball?.children[0] as! ModelEntity
self.anchor = AnchorEntity(.plane(.horizontal, classification: .any,
minimumBounds: [0.1, 0.1]))
self.anchor.addChild(cube)
arView.scene.anchors.append(anchor)
DispatchQueue.main.asyncAfter(deadline: .now() + 5.0) {
self.label.text = String(describing: self.anchor.id)
}
}
#IBAction func pressed(_ sender: UIButton) {
self.cube.removeFromParent()
self.anchor.addChild(sphere)
self.label.text = String(describing: self.anchor.id)
}
}
Hi I'm trying to make a AR face tracking project, but here I have a error when assigning a face anchor to AnchorEntity. Error message is "No exact matches in call to initializer". I tried different ways but it didn't work at all. I'm a new swift learner, could anyone help me on this? Thanks
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
class Coordinator: NSObject, ARSessionDelegate {
var parent: ARViewContainer
var faceAnchorEntity: AnchorEntity
var arView: ARView
init(_ parent: ARViewContainer) {
self.parent = parent
self.faceAnchorEntity = AnchorEntity()
self.arView = ARView()
}
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
guard let faceAnhcor = anchors[0] as? ARFaceAnchor else { return }
parent.viewModel.vertices = faceAnhcor.geometry.vertices
faceAnchorEntity = AnchorEntity(anchor: faceAnhcor)
arView.scene.addAnchor(faceAnhcor)
}
}
You can use RealityKit's native .face target, which is much easier to implement.
import SwiftUI
import RealityKit
import ARKit
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let ball = ModelEntity(mesh: .generateSphere(radius: 0.07))
arView.session.run(ARFaceTrackingConfiguration())
let anchor = AnchorEntity(.face)
anchor.position.y += 0.04
anchor.addChild(ball)
arView.scene.addAnchor(anchor)
return arView
}
Simply trying to print a hello when I tap on an object in the Reality file I made in Reality Composer. Not able to set the link between Notify and in-app actions.
import SwiftUI
import RealityKit
struct ContentView: View {
var body: some View {
ZStack{
ARViewContainer()
Text("Level 1")
}
}
}
struct ARViewContainer : UIViewRepresentable {
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let yellowEntity = try! ModelEntity.load(named: "Yellow")
let anchorEntity = AnchorEntity(plane: .horizontal)
anchorEntity.addChild(yellowEntity)
arView.scene.addAnchor(anchorEntity)
yellowEntity.actions.Yellowtapped.onAction = handleTap(_:)
func handleTap(_entity: Entity?){
guard let entity = entity else {return}
print("Hello")
}
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
}
}
If you use .rcproject all works perfectly:
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let scene = try! Experience.loadBox()
scene.actions.notifier.onAction = printer
let anchor = AnchorEntity(plane: .horizontal)
anchor.addChild(scene)
arView.scene.addAnchor(anchor)
return arView
}
func updateUIView(_ uiView: ARView, context: Context) { }
func printer(_ entity: Entity?) -> Void { print("Hello") }
}
P.S.
Do not forget to merge 2 actions together (in Reality Composer).
really stuck on this.
I'm trying to pass ARView from MakeUIView to makeCoordinator
I really need this to use ARView inside of #objc func handleTap
struct ARViewContainer: UIViewRepresentable{
func makeUIView(context: Context) -> ARView {
let myARView = ARView(frame: .zero)
//...config and things….
let tapGesture = UITapGestureRecognizer(target: context.coordinator, action: #selector(context.coordinator.handleTap(_:)))
myARView.addGestureRecognizer(tapGesture)
return myARView
}
func makeCoordinator() -> Coordinator {
Coordinator("whatshouldiusehere", self.$focusObject, self.$focusName)
}
class Coordinator: NSObject {
private let view: ARView
private var object: Binding<Entity?>
private var objectname: Binding<String?>
init(_ view: ARView, _ obj: Binding<Entity?>, _ objname: Binding<String?>) {
self.objectname = objname
self.object = obj
self.view = view
super.init()
}
#objc func handleTap(_ sender: UIGestureRecognizer? = nil) {
guard let touchInView = sender?.location(in: view) else {
return
}
guard let hitEntity = view.entity(at: touchInView) else {return}
//doing something with object here, assigning to #Binding for example
}
}
}
I can't move myARView = ARView(frame: .zero) outside of makeUIView, cuz I'm using SwiftUI and it inits every time when variables changes.
But how I can pass it in any way?
Or any other option to access Binding with ARView same time.
A coordinator is available via context, so you can inject it via property, like
struct ARViewContainer: UIViewRepresentable{
func makeUIView(context: Context) -> ARView {
let myARView = ARView(frame: .zero)
//...config and things….
let tapGesture = UITapGestureRecognizer(target: context.coordinator, action: #selector(context.coordinator.handleTap(_:)))
myARView.addGestureRecognizer(tapGesture)
context.coordinator.view = myARView // << inject here !!
return myARView
}
func makeCoordinator() -> Coordinator {
Coordinator(self.$focusObject, self.$focusName)
}
class Coordinator: NSObject {
var view: ARView? // << optional initially
private var object: Binding<Entity?>
private var objectname: Binding<String?>
init(_ obj: Binding<Entity?>, _ objname: Binding<String?>) {
self.objectname = objname
self.object = obj
super.init()
}
// ... other code update accordingly
}
I am trying to Update the globalDataTransfer class from the ARView extension and reflect the changes on to ArView. Below is the globalDataTransfer function
class globalDataTransfer: ObservableObject{
#Published var val: String = "No Value"
required init (any : String){
self.val = any
}
func check() -> String{
return(self.val)
}
}
My ARViewContainer
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: UIViewRepresentableContext<ARViewContainer>) -> ARView {
let arView = ARView(frame: .zero,cameraMode: .ar,automaticallyConfigureSession: true)
arView.setupForBodyTracking()
arView.scene.addAnchor(bodySkeletonAnchor)
return(arView)
}
func updateUIView(_ uiView: ARView, context: Context) {
}
typealias UIViewType = ARView
}
My ARView extension is
extension ARView: ARSessionDelegate{
func setupForBodyTracking(){
let config = ARBodyTrackingConfiguration()
self.session.run(config)
self.session.delegate = self
}
public func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
for anchor in anchors{
if let bodyAnchor = anchor as? ARBodyAnchor{
if let skeleton = bodySkeleton{
skeleton.update(with: bodyAnchor)
var
}
else{
let skeleton = BodySkeleton(for: bodyAnchor)
bodySkeleton = skeleton
bodySkeletonAnchor.addChild(skeleton)
}
var Val:String = calcAngle(anchor: bodyAnchor) //Function that returns Joint Angles and lengths
}
}
}
}
Content View :
struct ArView: View {
#ObservedObject var forGlobalValue: globalDataTransfer = globalDataTransfer(any: "No Value")
var body: some View{
ZStack{
ARViewContainer()
VStack{
Text(forGlobalValue.val)
Text(" This is \(test(any: forGlobalValue))")
}
}
}
}
Can anyone let me know How do I pass this Observable object to the ARView or is there any way of getting the Val variable from ARView extension and update it regularly on my View Text.