No exact matches in call to initializer for AnchorEntity - swift

Hi I'm trying to make a AR face tracking project, but here I have a error when assigning a face anchor to AnchorEntity. Error message is "No exact matches in call to initializer". I tried different ways but it didn't work at all. I'm a new swift learner, could anyone help me on this? Thanks
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
class Coordinator: NSObject, ARSessionDelegate {
var parent: ARViewContainer
var faceAnchorEntity: AnchorEntity
var arView: ARView
init(_ parent: ARViewContainer) {
self.parent = parent
self.faceAnchorEntity = AnchorEntity()
self.arView = ARView()
}
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
guard let faceAnhcor = anchors[0] as? ARFaceAnchor else { return }
parent.viewModel.vertices = faceAnhcor.geometry.vertices
faceAnchorEntity = AnchorEntity(anchor: faceAnhcor)
arView.scene.addAnchor(faceAnhcor)
}
}

You can use RealityKit's native .face target, which is much easier to implement.
import SwiftUI
import RealityKit
import ARKit
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let ball = ModelEntity(mesh: .generateSphere(radius: 0.07))
arView.session.run(ARFaceTrackingConfiguration())
let anchor = AnchorEntity(.face)
anchor.position.y += 0.04
anchor.addChild(ball)
arView.scene.addAnchor(anchor)
return arView
}

Related

How can I move a model using facial expression like look-right?

I have set a virtual object in rear camera view. I want to move that object using facial expression with respect to world origin and measure the displacement angles of the virtual object.
Is that possible using ARKit or RealityKit?
Use the following solution. At first setup a configuration:
import RealityKit
import ARKit
class ViewController: UIViewController, ARSessionDelegate {
#IBOutlet var arView: ARView!
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
arView.session.delegate = self
arView.automaticallyConfigureSession = false
let config = ARFaceTrackingConfiguration()
config.isWorldTrackingEnabled = true // Simultaneous tracking
arView.session.run(config)
}
}
Run your transform animation when a defined facial expression occurs:
func facialExpression(anchor: ARFaceAnchor) {
let eyeUpLeft = anchor.blendShapes[.eyeLookUpLeft]
let eyeUpRight = anchor.blendShapes[.eyeLookUpRight]
if ((eyeUpLeft?.decimalValue ?? 0.0) +
(eyeUpRight?.decimalValue ?? 0.0)) > 0.75 {
// ModelEntity's animation goes here
}
}
Delegate's method (running at 60 fps):
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
guard let faceAnchor = anchors[0] as? ARFaceAnchor else { return }
self.facialExpression(anchor: faceAnchor)
}
The answer to your second question you can see HERE.

How to add a new scene to an existing anchor and remove the previous scene?

I have an rcproject file with about 12 scenes (500mb or so). In order to lessen the load on iOS devices I tried breaking it apart into separate rcproject files and change the scene using notification triggers. However when doing this and adding the new scene as a child to the main anchor, the new scene renders in a new spot, breaking the AR experience. There must be a way to add the new scenes to the exact same anchor/position. Alternatively, is there a better way than seperating the rcproject to lessen load on ram etc?
Here is my ARView
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
// The experience consists of a "Base" (it acts as a permanent platform for all the scenes to be rendered on)
let baseAnchor = try! Base.loadIntro()
let introAnchor = try! IntroSceneOM.loadIntro()
introAnchor.actions.changeStoriesWithTrigger.onAction = loadStories
arView.scene.anchors.append(baseAnchor)
arView.scene.anchors.append(introAnchor)
func loadStories(_ entity: Entity?) -> Void {
arView.scene.anchors.remove(introAnchor)
let storiesAnchor = try! StoriesSceneOM.loadStoriesScene()
baseAnchor.addChild(storiesAnchor)
}
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {} }
EDIT:
Recreated the project using an implementation of Andy Jazz's code.
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
var anchor = AnchorEntity()
let scene01 = try! Experience.loadBoxScene()
let scene02 = try! Experience.loadBallScene()
// Base Scene
let scene03 = try! Experience.loadFloppyScene()
anchor = AnchorEntity(.plane(.horizontal, classification: .any,
minimumBounds: [0.1, 0.1]))
scene01.actions.boxTapped.onAction = loadScene02
scene02.actions.ballTapped.onAction = loadScene01
anchor.addChild(scene01)
anchor.addChild(scene03)
arView.scene.anchors.append(anchor)
func loadScene02(_ entity: Entity?) -> Void {
scene01.removeFromParent()
anchor.addChild(scene02)
}
func loadScene01(_ entity: Entity?) -> Void {
scene02.removeFromParent()
anchor.addChild(scene01)
}
return arView
}
However I still get the same issue where the anchor moves each time a new scene is added.
The code is quite simple, but regarding the issue of loading scenes with a large number of polygons, remains unresolved. At the maximum, the current scene should contain no more than 100K polygons, but ideally they should be within 50...70K. Texture resolution should not exceed 2K.
import RealityKit
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
#IBOutlet var label: UILabel!
var anchor = AnchorEntity()
let scene01 = try! Experience.loadBox()
let scene02 = try! Experience.loadBall()
var cube = ModelEntity()
var sphere = ModelEntity()
override func viewDidLoad() {
super.viewDidLoad()
self.cube = scene01.steelBox?.children[0] as! ModelEntity
self.sphere = scene02.ball?.children[0] as! ModelEntity
self.anchor = AnchorEntity(.plane(.horizontal, classification: .any,
minimumBounds: [0.1, 0.1]))
self.anchor.addChild(cube)
arView.scene.anchors.append(anchor)
DispatchQueue.main.asyncAfter(deadline: .now() + 5.0) {
self.label.text = String(describing: self.anchor.id)
}
}
#IBAction func pressed(_ sender: UIButton) {
self.cube.removeFromParent()
self.anchor.addChild(sphere)
self.label.text = String(describing: self.anchor.id)
}
}

How can I update the Observable Object from an ARView extension in SwiftUI?

I am trying to Update the globalDataTransfer class from the ARView extension and reflect the changes on to ArView. Below is the globalDataTransfer function
class globalDataTransfer: ObservableObject{
#Published var val: String = "No Value"
required init (any : String){
self.val = any
}
func check() -> String{
return(self.val)
}
}
My ARViewContainer
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: UIViewRepresentableContext<ARViewContainer>) -> ARView {
let arView = ARView(frame: .zero,cameraMode: .ar,automaticallyConfigureSession: true)
arView.setupForBodyTracking()
arView.scene.addAnchor(bodySkeletonAnchor)
return(arView)
}
func updateUIView(_ uiView: ARView, context: Context) {
}
typealias UIViewType = ARView
}
My ARView extension is
extension ARView: ARSessionDelegate{
func setupForBodyTracking(){
let config = ARBodyTrackingConfiguration()
self.session.run(config)
self.session.delegate = self
}
public func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
for anchor in anchors{
if let bodyAnchor = anchor as? ARBodyAnchor{
if let skeleton = bodySkeleton{
skeleton.update(with: bodyAnchor)
var
}
else{
let skeleton = BodySkeleton(for: bodyAnchor)
bodySkeleton = skeleton
bodySkeletonAnchor.addChild(skeleton)
}
var Val:String = calcAngle(anchor: bodyAnchor) //Function that returns Joint Angles and lengths
}
}
}
}
Content View :
struct ArView: View {
#ObservedObject var forGlobalValue: globalDataTransfer = globalDataTransfer(any: "No Value")
var body: some View{
ZStack{
ARViewContainer()
VStack{
Text(forGlobalValue.val)
Text(" This is \(test(any: forGlobalValue))")
}
}
}
}
Can anyone let me know How do I pass this Observable object to the ARView or is there any way of getting the Val variable from ARView extension and update it regularly on my View Text.

Problem loading .usdz into a custom Entity class

Is there any way to load usdz model to a custom entity class?
I tried to cast the returned ModelEntity to my custom class but it didn't work out.
let entity: CustomEntity = try! CustomEntity.load(named: name) as! CustomEntity
UIKit version
import UIKit
import RealityKit
class CustomClass: Entity, HasModel {
let modelName: String? = "gramophone"
let myAnchor = AnchorEntity()
func loader() -> AnchorEntity {
if let name = self.modelName {
let modelEntity = try! CustomClass.loadModel(named: name)
myAnchor.addChild(modelEntity)
}
return myAnchor
}
}
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
let modelName: String? = "gramophone"
override func viewDidLoad() {
super.viewDidLoad()
let usdz = CustomClass().loader()
arView.scene.anchors.append(usdz)
}
}
SwiftUI version:
import SwiftUI
import RealityKit
class CustomClass: Entity, HasModel {
func printer() {
print("I'm inside CustomClass...")
}
}
struct ARViewContainer: UIViewRepresentable {
let modelName: String? = "gramophone"
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
typealias CustomEntity = ModelEntity
var modelEntity = CustomEntity()
if let name = self.modelName {
modelEntity = try! CustomClass.loadModel(named: name)
let anchor = AnchorEntity()
anchor.addChild(modelEntity)
arView.scene.anchors.append(anchor)
}
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
CustomClass().printer()
}
}

Pass value from SwiftUI to ARKit dynamically

I'm currently engaging personal project using ARKit with Swift.
In this app, a skeleton is appearing by detected body and it tracks body's motion.
What I want to know is, how to change the position of skeleton by touching a slider dynamically.
The position of skeleton is defined in ARKit class ARDelegateHandler such as self.characterOffset = [1, 0, 0] which means 1m right from a detected body.
I want to change the characterOffset's x-axis value by slider in SwiftUI.(something like self.characterOffset = [x, 0, 0])
Here is my code.
ARViewContainer.swift
import SwiftUI
import RealityKit
import ARKit
import Combine
struct ARViewContainer: UIViewRepresentable {
let characterAnchor = AnchorEntity()
#Binding var offsetValue:Float
#ObservedObject var offsetInstance = Offset()
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
arView.session.delegate = context.coordinator
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
let configuration = ARBodyTrackingConfiguration()
uiView.session.run(configuration)
uiView.scene.addAnchor(characterAnchor)
print(offsetValue)
}
func makeCoordinator() -> ARDelegateHandler {
ARDelegateHandler(self, anchor: characterAnchor)
}
class ARDelegateHandler: NSObject, ARSessionDelegate {
var arVC: ARViewContainer
let characterAnchor: AnchorEntity
var character: BodyTrackedEntity?
var characterOffset: SIMD3<Float>
init(_ control: ARViewContainer, anchor: AnchorEntity) {
self.arVC = control
self.characterAnchor = anchor
self.characterOffset = [1, 0, 0]
super.init()
setSkeleton()
}
func setSkeleton(){
var cancellable: AnyCancellable? = nil
cancellable = Entity.loadBodyTrackedAsync(named: "character/robot").sink(
receiveCompletion: { completion in
if case let .failure(error) = completion {
print("Error: Unable to load model: \(error.localizedDescription)")
}
cancellable?.cancel()
}, receiveValue: { (character: Entity) in
if let character = character as? BodyTrackedEntity {
character.scale = [1, 1, 1]
self.character = character
cancellable?.cancel()
} else {
print("Error: Unable to load model as BodyTrackedEntity")
}
})
}
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
for anchor in anchors {
guard let bodyAnchor = anchor as? ARBodyAnchor else { continue }
let bodyPosition = simd_make_float3(bodyAnchor.transform.columns.3)
characterAnchor.position = bodyPosition + characterOffset
characterAnchor.orientation = Transform(matrix: bodyAnchor.transform).rotation
if let character = character, character.parent == nil {
characterAnchor.addChild(character)
}
}
}
}
}
ContentView.swift (User is expected to touch slider and offsetValue would be changed. SlideView has been already implemented)
import SwiftUI
import RealityKit
import ARKit
import Combine
struct ContentView : View {
#State var offsetValue:Float = 0.0
#ObservedObject var offsetInstance = Offset()
var body: some View {
VStack{
Button(action:{self.offsetInstance.setOffset(offset: 1.0)},
label:{Text("check")})
ZStack(alignment: .bottom) {
ARViewContainer(offsetValue: $offsetValue)
SlideView(offSetValue: $offsetValue)
}
}
}
}
Thank you very much for your help!