How can I move a model using facial expression like look-right? - swift

I have set a virtual object in rear camera view. I want to move that object using facial expression with respect to world origin and measure the displacement angles of the virtual object.
Is that possible using ARKit or RealityKit?

Use the following solution. At first setup a configuration:
import RealityKit
import ARKit
class ViewController: UIViewController, ARSessionDelegate {
#IBOutlet var arView: ARView!
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
arView.session.delegate = self
arView.automaticallyConfigureSession = false
let config = ARFaceTrackingConfiguration()
config.isWorldTrackingEnabled = true // Simultaneous tracking
arView.session.run(config)
}
}
Run your transform animation when a defined facial expression occurs:
func facialExpression(anchor: ARFaceAnchor) {
let eyeUpLeft = anchor.blendShapes[.eyeLookUpLeft]
let eyeUpRight = anchor.blendShapes[.eyeLookUpRight]
if ((eyeUpLeft?.decimalValue ?? 0.0) +
(eyeUpRight?.decimalValue ?? 0.0)) > 0.75 {
// ModelEntity's animation goes here
}
}
Delegate's method (running at 60 fps):
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
guard let faceAnchor = anchors[0] as? ARFaceAnchor else { return }
self.facialExpression(anchor: faceAnchor)
}
The answer to your second question you can see HERE.

Related

No exact matches in call to initializer for AnchorEntity

Hi I'm trying to make a AR face tracking project, but here I have a error when assigning a face anchor to AnchorEntity. Error message is "No exact matches in call to initializer". I tried different ways but it didn't work at all. I'm a new swift learner, could anyone help me on this? Thanks
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
class Coordinator: NSObject, ARSessionDelegate {
var parent: ARViewContainer
var faceAnchorEntity: AnchorEntity
var arView: ARView
init(_ parent: ARViewContainer) {
self.parent = parent
self.faceAnchorEntity = AnchorEntity()
self.arView = ARView()
}
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
guard let faceAnhcor = anchors[0] as? ARFaceAnchor else { return }
parent.viewModel.vertices = faceAnhcor.geometry.vertices
faceAnchorEntity = AnchorEntity(anchor: faceAnhcor)
arView.scene.addAnchor(faceAnhcor)
}
}
You can use RealityKit's native .face target, which is much easier to implement.
import SwiftUI
import RealityKit
import ARKit
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let ball = ModelEntity(mesh: .generateSphere(radius: 0.07))
arView.session.run(ARFaceTrackingConfiguration())
let anchor = AnchorEntity(.face)
anchor.position.y += 0.04
anchor.addChild(ball)
arView.scene.addAnchor(anchor)
return arView
}

How to render a Canonical Face Mesh with RealityKit?

I’m trying to render a face mesh with RealityKit, no success yet. So when ARKit detected a human face, then ARSession generates an ARFaceAnchor which has a face geometry mesh in it.
But it cannot being generated as a model entity.
Could anyone help on this?
Canonical Face Mesh in RealityKit
To programmatically generate and render an ARKit's canonical face mesh (ARFaceGeometry object consisting of 1220 vertices) in RealityKit 2.0 use the following code:
import ARKit
import RealityKit
class ControllerView: UIViewController {
#IBOutlet var arView: ARView!
var anchor = AnchorEntity()
var model = ModelEntity()
override func viewDidLoad() {
super.viewDidLoad()
arView.automaticallyConfigureSession = false
arView.session.delegate = self
guard ARFaceTrackingConfiguration.isSupported
else {
fatalError("We can't run face tracking config")
}
let config = ARFaceTrackingConfiguration()
config.maximumNumberOfTrackedFaces = 1
arView.session.run(config)
}
}
Then create a method for converting face anchor's sub-properties. Note that I used for-in loop to convert indices from [Int16] to [UInt32] type (type casting doesn't help here).
extension ControllerView {
private func nutsAndBoltsOf(_ anchor: ARFaceAnchor) -> MeshDescriptor {
let vertices: [simd_float3] = anchor.geometry.vertices
var triangleIndices: [UInt32] = []
let texCoords: [simd_float2] = anchor.geometry.textureCoordinates
for index in anchor.geometry.triangleIndices { // [Int16]
triangleIndices.append(UInt32(index))
}
print(vertices.count) // 1220 vertices
var descriptor = MeshDescriptor(name: "canonical_face_mesh")
descriptor.positions = MeshBuffers.Positions(vertices)
descriptor.primitives = .triangles(triangleIndices)
descriptor.textureCoordinates = MeshBuffers.TextureCoordinates(texCoords)
return descriptor
}
}
And, at last, let's run a delegate's method to feed a mesh resource:
extension ControllerView: ARSessionDelegate {
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
guard let faceAnchor = anchors[0] as? ARFaceAnchor else { return }
arView.session.add(anchor: faceAnchor)
self.anchor = AnchorEntity(anchor: faceAnchor)
self.anchor.scale *= 1.2
let mesh: MeshResource = try! .generate(from: [nutsAndBoltsOf(faceAnchor)])
var material = SimpleMaterial(color: .magenta, isMetallic: true)
self.model = ModelEntity(mesh: mesh, materials: [material])
self.anchor.addChild(self.model)
arView.scene.anchors.append(self.anchor)
}
}
Result (tested on iPad Pro 4th gen in iPadOS 16.2).
I also recommend you take a look at the post about visualizing detected planes in RealityKit 2.0.
Merry Christmas!

How to implement a Billboard effect (LookAt camera) in RealityKit?

I want to achieve the billboard effect in RealityKit (the plane always look at the camera), I used the Entity.Look() method, but the result is weird, I can't even see the plane, the scripts I used as below, so, what is the problem?
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let config = ARWorldTrackingConfiguration()
config.planeDetection = .horizontal
arView.session.run(config, options:[ ])
arView.session.delegate = arView
arView.createPlane()
return arView
}
func updateUIView(_ uiView: ARView, context: Context) { }
}
var planeMesh = MeshResource.generatePlane(width: 0.15, height: 0.15)
var planeMaterial = SimpleMaterial(color:.white,isMetallic: false)
var planeEntity = ModelEntity(mesh:planeMesh,materials:[planeMaterial])
var arCameraPostion : SIMD3<Float>!
var isPlaced = false
extension ARView : ARSessionDelegate{
func createPlane(){
let planeAnchor = AnchorEntity(plane:.horizontal)
planeAnchor.addChild(planeEntity)
self.scene.addAnchor(planeAnchor)
//planeAnchor.transform.rotation = simd_quatf(angle: .pi, axis: [0,1,0])
}
public func session(_ session: ARSession, didUpdate frame: ARFrame){
guard let arCamera = session.currentFrame?.camera else { return }
if isPlaced {
arCameraPostion = SIMD3(arCamera.transform.columns.3.x,0,arCamera.transform.columns.3.z)
planeEntity.look(at: arCameraPostion, from: planeEntity.position, upVector: [0, 1, 0],relativeTo: nil)
}
}
public func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
isPlaced = true
}
}
session(_:didUpdate:) method
Try the following logic to implement a "billboard" behavior for RealityKit camera. You can use this code as a starting point. It generates a rotation of the model around its local Y axis based on camera position.
import RealityKit
import ARKit
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
var model = Entity()
override func viewDidLoad() {
super.viewDidLoad()
arView.session.delegate = self
let config = ARWorldTrackingConfiguration()
arView.session.run(config)
self.model = try! ModelEntity.load(named: "drummer")
let anchor = AnchorEntity(world: [0, 0, 0])
anchor.addChild(self.model)
arView.scene.anchors.append(anchor)
}
}
A pivot point of the model must be in the center of it (not at some distance from the model).
extension ViewController: ARSessionDelegate {
func session(_ session: ARSession, didUpdate frame: ARFrame) {
let camTransform: float4x4 = arView.cameraTransform.matrix
let alongXZPlane: simd_float4 = camTransform.columns.3
let yaw: Float = atan2(alongXZPlane.x - model.position.x,
alongXZPlane.z - model.position.z)
print(yaw)
// Identity matrix 4x4
var positionAndScale = float4x4()
// position
positionAndScale.columns.3.z = -0.25
// scale
positionAndScale.columns.0.x = 0.01
positionAndScale.columns.1.y = 0.01
positionAndScale.columns.2.z = 0.01
// orientation matrix
let orientation = Transform(pitch: 0, yaw: yaw, roll: 0).matrix
// matrices multiplication
let transform = simd_mul(positionAndScale, orientation)
self.model.transform.matrix = transform
}
}
subscribe(to:on:_:) method
Alternatively, you can implement a subscription to the event stream.
import RealityKit
import Combine
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
var model = Entity()
var subs: [AnyCancellable] = []
override func viewDidLoad() {
super.viewDidLoad()
self.model = try! ModelEntity.load(named: "drummer")
let anchor = AnchorEntity(world: [0, 0, 0])
anchor.addChild(self.model)
arView.scene.anchors.append(anchor)
arView.scene.subscribe(to: SceneEvents.Update.self) { _ in
let camTransform: float4x4 = self.arView.cameraTransform.matrix
let alongXZPlane: simd_float4 = camTransform.columns.3
let yaw: Float = atan2(alongXZPlane.x - self.model.position.x,
alongXZPlane.z - self.model.position.z)
var positionAndScale = float4x4()
positionAndScale.columns.3.z = -0.25
positionAndScale.columns.0.x = 0.01
positionAndScale.columns.1.y = 0.01
positionAndScale.columns.2.z = 0.01
let orientation = Transform(pitch: 0, yaw: yaw, roll: 0).matrix
let transform = simd_mul(positionAndScale, orientation)
self.model.transform.matrix = transform
}.store(in: &subs)
}
}

Face Recognition iPhone X Swift

I done some experiments with ARFaceAnchor for recognize some emotions like blinking eyes and so on. For sure I set correctly the FaceAnchor because on the debugger I'm able to see the coordinates but it seems that it's not recognizing any emotions that I set...
Attached you will find the ViewController and in a separate Class you will find the Emotions.
Any ideas? Thank you!
// ViewController.swift
//
import UIKit
import SceneKit
import ARKit
class ViewController: UIViewController, ARSessionDelegate {
#IBOutlet var sceneView: ARSCNView!
let session = ARSession()
override func viewDidLoad() {
super.viewDidLoad()
self.sceneView.scene = SCNScene()
self.sceneView.rendersContinuously = true
// Configure our ARKit tracking session for facial recognition
let config = ARFaceTrackingConfiguration()
config.worldAlignment = .gravity
session.delegate = self
session.run(config, options: [])
}
// AR Session
var currentFaceAnchor: ARFaceAnchor?
var currentFrame: ARFrame?
func session(_ session: ARSession, didUpdate frame: ARFrame) {
self.currentFrame = frame
DispatchQueue.main.async {
}
}
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
}
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
guard let faceAnchor = anchors.first as? ARFaceAnchor else { return }
self.currentFaceAnchor = faceAnchor
print("Face",faceAnchor)
}
func session(_ session: ARSession, didRemove anchors: [ARAnchor]) {
}
var expressionsToUse: [Expression] = [SmileExpression(), EyebrowsRaisedExpression(), EyeBlinkLeftExpression(), EyeBlinkRightExpression(), JawOpenExpression(), LookLeftExpression(), LookRightExpression()] //All the expressions
var currentExpression: Expression? = nil {
didSet {
if currentExpression != nil {
self.currentExpressionShownAt = Date()
} else {
self.currentExpressionShownAt = nil
}
}
}
var currentExpressionShownAt: Date? = nil
}
The reason no Expressions are being detected is because you aren't actually doing anything with them, apart from adding them to expressionsToUse Array.
Each Expression has three functions, which you aren't currently using:
func name() -> String {}
func isExpressing(from: ARFaceAnchor) -> Bool {}
func isDoingWrongExpression(from: ARFaceAnchor) -> Bool {}
Since you want to detect the Emotions you need to hook these functions into the following delegate method:
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) { }
As such something like this will point you in the right direction:
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
//1. Check To See We Have A Valid ARFaceAnchor
guard let faceAnchor = anchors.first as? ARFaceAnchor else { return }
self.currentFaceAnchor = faceAnchor
//2. Loop Through Each Of The Expression & Determine Which One Is Being Used
expressionsToUse.forEach { (possibleExpression) in
//a. If The The User Is Doing A Particular Expression Then Assign It To The currentExpression Variable
if possibleExpression.isExpressing(from: faceAnchor){
currentExpression = possibleExpression
print("""
Current Detected Expression = \(possibleExpression.name())
It Was Detected On \(currentExpressionShownAt!)
""")
}else if possibleExpression.isDoingWrongExpression(from: faceAnchor){
print("Incorrect Detected Expression = \(possibleExpression.name())")
}
}
}
Hope it helps...

ARKIT stereo - move objects

I have two ARSCNView sessions side by side for stereo view. I am trying to create a box in stereo view and then make it spin.
All works fine until I move the parentNode using self.parentNode.runAction
The movement only occurs in the right side view (SceneView2). No movement occurs in the left side view. Views are also offset. I need the movement to be synchronized in both the left and the right views.
Thanks.
Here is the code:
import UIKit
import ARKit
import SceneKit
import CoreLocation
import GLKit
class ViewController1: UIViewController, ARSCNViewDelegate {
#IBOutlet weak var sceneView: ARSCNView!
#IBOutlet weak var SceneView2: ARSCNView!
#IBOutlet weak var Label: UILabel!
var parentNode: SCNNode!
override func viewDidLoad() {
super.viewDidLoad()
addBox()
// Set the view's delegate
sceneView.delegate = self
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
let configuration = ARWorldTrackingConfiguration()
configuration.worldAlignment = .gravityAndHeading
configuration.planeDetection = .horizontal
sceneView.session.run(configuration)
movebox()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
sceneView.session.pause()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Release any cached data, images, etc that aren't in use.
}
func session(_ session: ARSession, didFailWithError error: Error) {
// Present an error message to the user
}
func sessionWasInterrupted(_ session: ARSession) {
// Inform the user that the session has been interrupted, for example, by presenting an overlay
}
func sessionInterruptionEnded(_ session: ARSession) {
// Reset tracking and/or remove existing anchors if consistent tracking is required
}
// UPDATE EVERY FRAME:
func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
DispatchQueue.main.async {
self.updateFrame()
//self.movebox()
}
}
func updateFrame() {
// Clone pointOfView for Second View
let pointOfView : SCNNode = (sceneView.pointOfView?.clone())!
// Determine Adjusted Position for Right Eye
let orientation : SCNQuaternion = pointOfView.orientation
let orientationQuaternion : GLKQuaternion = GLKQuaternionMake(orientation.x, orientation.y, orientation.z, orientation.w)
let eyePos : GLKVector3 = GLKVector3Make(1.0, 0.0, 0.0)
let rotatedEyePos : GLKVector3 = GLKQuaternionRotateVector3(orientationQuaternion, eyePos)
let rotatedEyePosSCNV : SCNVector3 = SCNVector3Make(rotatedEyePos.x, rotatedEyePos.y, rotatedEyePos.z)
let mag : Float = 0.066 // This is the value for the distance between two pupils (in metres). The Interpupilary Distance (IPD).
pointOfView.position.x += rotatedEyePosSCNV.x * mag
pointOfView.position.y += rotatedEyePosSCNV.y * mag
pointOfView.position.z += rotatedEyePosSCNV.z * mag
// Set PointOfView for SecondView
SceneView2.pointOfView = pointOfView
}
func addBox() {
let sideMaterial = SCNMaterial()
sideMaterial.diffuse.contents = UIColor.orange
sideMaterial.locksAmbientWithDiffuse = true;
let box = SCNBox(width: 0.3, height: 0.6, length: 0.1, chamferRadius: 0.005)
box.materials = [sideMaterial, sideMaterial, sideMaterial, sideMaterial, sideMaterial, sideMaterial]
let boxNode = SCNNode()
boxNode.geometry = box
boxNode.position = SCNVector3(0, 0, -0.2)
let scene = SCNScene()
scene.rootNode.addChildNode(boxNode)
parentNode = scene.rootNode
parentNode.position = SCNVector3(0, 0, -1.0)
sceneView.scene = scene
// Set up SceneView2 (Right Eye)
SceneView2.scene = scene
SceneView2.showsStatistics = sceneView.showsStatistics
SceneView2.isPlaying = true // Turn on isPlaying to ensure this ARSCNView recieves updates.
}
func movebox() {
DispatchQueue.main.async {
let rotate = SCNAction.rotateBy(x: 0, y: 5, z: 0, duration: 20)
let moveSequence = SCNAction.sequence([rotate])
let moveLoop = SCNAction.repeatForever(moveSequence)
self.parentNode.runAction(moveLoop)
}
}
}