Face Recognition iPhone X Swift - swift

I done some experiments with ARFaceAnchor for recognize some emotions like blinking eyes and so on. For sure I set correctly the FaceAnchor because on the debugger I'm able to see the coordinates but it seems that it's not recognizing any emotions that I set...
Attached you will find the ViewController and in a separate Class you will find the Emotions.
Any ideas? Thank you!
// ViewController.swift
//
import UIKit
import SceneKit
import ARKit
class ViewController: UIViewController, ARSessionDelegate {
#IBOutlet var sceneView: ARSCNView!
let session = ARSession()
override func viewDidLoad() {
super.viewDidLoad()
self.sceneView.scene = SCNScene()
self.sceneView.rendersContinuously = true
// Configure our ARKit tracking session for facial recognition
let config = ARFaceTrackingConfiguration()
config.worldAlignment = .gravity
session.delegate = self
session.run(config, options: [])
}
// AR Session
var currentFaceAnchor: ARFaceAnchor?
var currentFrame: ARFrame?
func session(_ session: ARSession, didUpdate frame: ARFrame) {
self.currentFrame = frame
DispatchQueue.main.async {
}
}
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
}
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
guard let faceAnchor = anchors.first as? ARFaceAnchor else { return }
self.currentFaceAnchor = faceAnchor
print("Face",faceAnchor)
}
func session(_ session: ARSession, didRemove anchors: [ARAnchor]) {
}
var expressionsToUse: [Expression] = [SmileExpression(), EyebrowsRaisedExpression(), EyeBlinkLeftExpression(), EyeBlinkRightExpression(), JawOpenExpression(), LookLeftExpression(), LookRightExpression()] //All the expressions
var currentExpression: Expression? = nil {
didSet {
if currentExpression != nil {
self.currentExpressionShownAt = Date()
} else {
self.currentExpressionShownAt = nil
}
}
}
var currentExpressionShownAt: Date? = nil
}

The reason no Expressions are being detected is because you aren't actually doing anything with them, apart from adding them to expressionsToUse Array.
Each Expression has three functions, which you aren't currently using:
func name() -> String {}
func isExpressing(from: ARFaceAnchor) -> Bool {}
func isDoingWrongExpression(from: ARFaceAnchor) -> Bool {}
Since you want to detect the Emotions you need to hook these functions into the following delegate method:
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) { }
As such something like this will point you in the right direction:
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
//1. Check To See We Have A Valid ARFaceAnchor
guard let faceAnchor = anchors.first as? ARFaceAnchor else { return }
self.currentFaceAnchor = faceAnchor
//2. Loop Through Each Of The Expression & Determine Which One Is Being Used
expressionsToUse.forEach { (possibleExpression) in
//a. If The The User Is Doing A Particular Expression Then Assign It To The currentExpression Variable
if possibleExpression.isExpressing(from: faceAnchor){
currentExpression = possibleExpression
print("""
Current Detected Expression = \(possibleExpression.name())
It Was Detected On \(currentExpressionShownAt!)
""")
}else if possibleExpression.isDoingWrongExpression(from: faceAnchor){
print("Incorrect Detected Expression = \(possibleExpression.name())")
}
}
}
Hope it helps...

Related

Selector type of expression is ambiguous without more context

I'm trying to get ARSessionDelegate method using selector, but im getting this error:
Type of expression is ambiguous without more context
There is mine code:
#selector(ARSessionDelegate.session(_:didUpdate:) as ((ARSessionDelegate) -> (ARSession, ARFrame) -> Void))
That's how this method looks like:
public protocol ARSessionDelegate : ARSessionObserver {
optional func session(_ session: ARSession, didUpdate frame: ARFrame)
}
And also, I am trying to make an rx extension for the ARKit session using this answer, but im not sure it's caused the problem.
Because there are multiple methods with the same selector name, you are forced to implement the method in the delegate and forward the calls using a subject. Like this:
extension ARSession: HasDelegate { }
extension Reactive where Base: ARSession {
var delegate: ARSessionDelegateProxy {
return ARSessionDelegateProxy.proxy(for: base)
}
var didUpdate: Observable<ARFrame> {
return delegate.didUpdate.asObservable()
}
}
final class ARSessionDelegateProxy
: DelegateProxy<ARSession, ARSessionDelegate>
, DelegateProxyType
, ARSessionDelegate {
init(parentObject: ARSession) {
super.init(
parentObject: parentObject,
delegateProxy: ARSessionDelegateProxy.self
)
}
deinit {
didUpdate.onCompleted()
}
public static func registerKnownImplementations() {
self.register { ARSessionDelegateProxy(parentObject: $0) }
}
func session(_ session: ARSession, didUpdate frame: ARFrame) {
didUpdate.onNext(frame)
}
fileprivate let didUpdate = PublishSubject<ARFrame>()
}

How can I move a model using facial expression like look-right?

I have set a virtual object in rear camera view. I want to move that object using facial expression with respect to world origin and measure the displacement angles of the virtual object.
Is that possible using ARKit or RealityKit?
Use the following solution. At first setup a configuration:
import RealityKit
import ARKit
class ViewController: UIViewController, ARSessionDelegate {
#IBOutlet var arView: ARView!
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
arView.session.delegate = self
arView.automaticallyConfigureSession = false
let config = ARFaceTrackingConfiguration()
config.isWorldTrackingEnabled = true // Simultaneous tracking
arView.session.run(config)
}
}
Run your transform animation when a defined facial expression occurs:
func facialExpression(anchor: ARFaceAnchor) {
let eyeUpLeft = anchor.blendShapes[.eyeLookUpLeft]
let eyeUpRight = anchor.blendShapes[.eyeLookUpRight]
if ((eyeUpLeft?.decimalValue ?? 0.0) +
(eyeUpRight?.decimalValue ?? 0.0)) > 0.75 {
// ModelEntity's animation goes here
}
}
Delegate's method (running at 60 fps):
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
guard let faceAnchor = anchors[0] as? ARFaceAnchor else { return }
self.facialExpression(anchor: faceAnchor)
}
The answer to your second question you can see HERE.

No exact matches in call to initializer for AnchorEntity

Hi I'm trying to make a AR face tracking project, but here I have a error when assigning a face anchor to AnchorEntity. Error message is "No exact matches in call to initializer". I tried different ways but it didn't work at all. I'm a new swift learner, could anyone help me on this? Thanks
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
class Coordinator: NSObject, ARSessionDelegate {
var parent: ARViewContainer
var faceAnchorEntity: AnchorEntity
var arView: ARView
init(_ parent: ARViewContainer) {
self.parent = parent
self.faceAnchorEntity = AnchorEntity()
self.arView = ARView()
}
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
guard let faceAnhcor = anchors[0] as? ARFaceAnchor else { return }
parent.viewModel.vertices = faceAnhcor.geometry.vertices
faceAnchorEntity = AnchorEntity(anchor: faceAnhcor)
arView.scene.addAnchor(faceAnhcor)
}
}
You can use RealityKit's native .face target, which is much easier to implement.
import SwiftUI
import RealityKit
import ARKit
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let ball = ModelEntity(mesh: .generateSphere(radius: 0.07))
arView.session.run(ARFaceTrackingConfiguration())
let anchor = AnchorEntity(.face)
anchor.position.y += 0.04
anchor.addChild(ball)
arView.scene.addAnchor(anchor)
return arView
}

RealityKit – Synchronization Component

I am trying to build a collaborative session using RealityKit. As far as I understand I need to conform to synchronisation component to my Entity.
So my question is: is there any source of documentation about using this component? Specially that I don't understand if I should handle from the recipient side any delegate methods for handing received data.
One thing that I want to stress: I am searching for solution with RealityKit not ARKit 3. And as per apple in the WWDC they say:
If you are using RealityKit, this is the only new code you need to add
to use collaborative session. If you are not using RealityKit, then
you need to implement additional two delegate functions to transmit
the collaboration data.
Thanks.
Adopt ARSessionDelegate and MCSessionDelegate protocols, implement their delegates and set isCollaborationEnabled instance property to true:
import RealityKit
import ARKit
import MultipeerConnectivity
class ViewController: UIViewController, ARSessionDelegate, MCSessionDelegate {
#IBOutlet var arView: ARView!
var mcSession: MCSession?
override func viewDidLoad() {
super.viewDidLoad()
arView.session.delegate = self
mcSession!.delegate = self
let config = ARWorldTrackingConfiguration()
config.planeDetection = [.horizontal]
config.isCollaborationEnabled = true
arView.debugOptions = [.showFeaturePoints]
arView.session.run(config)
}
}
Then use ARSessionDelegate's and MCSessionDelegate's session() instance methods:
extension ViewController {
let name = UIDevice.current.name
let myPeerID = MCPeerID(displayName: name)
var peers = [MCPeerID]()
peers.append(myPeerID)
func session(_ session: ARSession,
didOutputCollaborationData data: ARSession.CollaborationData) {
self.mcSession = MCSession(peer: myPeerID,
securityIdentity: nil,
encryptionPreference: .required)
do {
try self.mcSession.send(data.collaborationData(),
toPeers: peers,
with: .reliable)
} catch {
print("Get Error while outputting Collaboration Data")
}
}
func session(_ session: MCSession,
didReceive data: Data,
fromPeer peerID: MCPeerID) {
self.arView.session.update(with: data.data())
}
func session(_ session: ARSession,
didRemove anchors: [ARAnchor]) {
for anchor in anchors {
if anchor.sessionIdentifier = session.identifier {
// your anchors here...
}
}
}
}
extension ARSession.CollaborationData {
func collaborationData() -> Data {
let data = Data()
// ...
return data
}
}
extension Data {
func data() -> ARSession.CollaborationData {
let data = ARSession.CollaborationData(coder: nsCoder)
// ...
return data!
}
}
You can read and watch about it here.

watchSession.sendMessage works fine on simulator, times out IRL

I wrote two applications, first using transferUserInfo, which caused too much lag (I believe because it sends stuff in background). I switched to sendMessage and was very happy with the results (faster response time). When attempted to run the application on my real iPhone and Apple Watch, I received Transfer timed out. Here's the full code and example of one of the debugs:
iPhone:
// ViewController.swift
import UIKit
import Foundation
import WatchConnectivity
class WatchManager: UIViewController, WCSessionDelegate {
var counter = 0
var watchSession: WCSession? {
didSet {
if let session = watchSession {
session.delegate = self
session.activate()
}
}
}
override func viewDidLoad(){
super.viewDidLoad()
watchSession = WCSession.default
}
private func sendDict(_ dict: [String: Any]) {
self.watchSession?.sendMessage(dict, replyHandler: nil, errorHandler: {error in print(error.localizedDescription)})
}
#IBOutlet weak var transferButton: UIButton!
#IBOutlet weak var label: UILabel!
#IBAction func dataTransfer(_ sender: Any) {
sendDict(["DataKey": counter])
counter+=1
print("sent")
}
func session(_ session: WCSession, activationDidCompleteWith activationState: WCSessionActivationState, error: Error?) {
print("Session activation did complete")
}
public func sessionDidBecomeInactive(_ session: WCSession) {
print("session did become inactive")
}
public func sessionDidDeactivate(_ session: WCSession) {
print("session did deactivate")
}
func session(_ session: WCSession, didReceiveMessage message: [String : Any]) {
print("phone received app context: ", message)
if let temperature = message["DataKey"] as? String {
DispatchQueue.main.async {
self.transferButton.setTitle(temperature, for: .normal)
self.label.text=temperature
}
}
}
}
Apple Watch:
// InterfaceController.swift
import WatchKit
import Foundation
import WatchConnectivity
class InterfaceController: WKInterfaceController {
var watchSession: WCSession? {
didSet {
if let session = watchSession {
session.delegate = self
session.activate()
}
}
}
#IBOutlet weak var temperatureLabel: WKInterfaceButton!
private func sendDict(_ dict: [String: Any]) {
self.watchSession?.sendMessage(dict, replyHandler: nil, errorHandler: {error in print(error.localizedDescription)})
}
#IBAction func button() {
let urg = ["DataKey":UUID().uuidString]
sendDict(urg)
print("watch sent app context \(urg)")
}
}
extension InterfaceController: WCSessionDelegate {
#if os(iOS)
public func sessionDidBecomeInactive(_ session: WCSession) { }
public func sessionDidDeactivate(_ session: WCSession) {
session.activate()
}
#endif
func session(_ session: WCSession, activationDidCompleteWith activationState: WCSessionActivationState, error: Error?) {
print("Session activation did complete")
}
func session(_ session: WCSession, didReceiveMessage message: [String : Any]) {
print("watch received app context: ", message)
if let temperature = message["DataKey"] as? Int {
self.temperatureLabel.setTitle(String(temperature))
}
}
override func willActivate() {
// This method is called when watch view controller is about to be visible to user
super.willActivate()
watchSession = WCSession.default
}
override func didDeactivate() {
// This method is called when watch view controller is no longer visible
super.didDeactivate()
}
}
Debug Example:
Session activation did complete
watch sent app context ["DataKey": "AF793FC6-7A16-4D7D-9A3B-D3BB960EC9D9"]
2019-01-13 21:07:43.524717-0800 testApp WatchKit Extension[1240:1178401] [WC] -[WCSession onqueue_handleMessageCompletionWithError:withMessageID:] C385FF5F-5EA1-478B-A930-54066C2F0B0F due to WCErrorCodeTransferTimedOut -> IDSErrorTypeTimedOut -> IDSResponseTimedOut
2019-01-13 21:07:43.525001-0800 testApp WatchKit Extension[1240:1178401] [WC] -[WCSession _onqueue_notifyOfMessageError:messageID:withErrorHandler:] C385FF5F-5EA1-478B-A930-54066C2F0B0F errorHandler: YES with WCErrorCodeTransferTimedOut -> IDSErrorTypeTimedOut -> IDSResponseTimedOut
Transfer timed out.