Search code examples
iosaugmented-realityscenekitarkit

ARKit: Tracking Head Up and Down Movement in Xcode


I have implemented the following code which allows me to track different facial expressions of a person. But, with this code, I am unable to track the person's head up and down movements using ARKit in Xcode. I would appreciate it if anyone could please let me know how shall I track the person's head up and down movement with ARKit using Xcode!

import UIKit
import SceneKit
import ARKit

class ViewController: UIViewController, ARSCNViewDelegate {

@IBOutlet var sceneView: ARSCNView!
@IBOutlet weak var faceLabel: UILabel!
@IBOutlet weak var labelView: UIView!
var analysis = ""

override func viewDidLoad() {
    super.viewDidLoad()
    
    labelView.layer.cornerRadius = 10
    
    sceneView.delegate = self
    sceneView.showsStatistics = true
    guard ARFaceTrackingConfiguration.isSupported else {
        fatalError("Face tracking is not supported on this device")
    }
}

override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(animated)
    
    // Create a session configuration
    let configuration = ARFaceTrackingConfiguration()

    // Run the view's session
    sceneView.session.run(configuration)
}

override func viewWillDisappear(_ animated: Bool) {
    super.viewWillDisappear(animated)
    
    // Pause the view's session
    sceneView.session.pause()
}

// MARK: - ARSCNViewDelegate
func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
    let faceMesh = ARSCNFaceGeometry(device: sceneView.device!)
    let node = SCNNode(geometry: faceMesh)
    node.geometry?.firstMaterial?.fillMode = .lines
    return node
}

func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
    if let faceAnchor = anchor as? ARFaceAnchor, let faceGeometry = node.geometry as? ARSCNFaceGeometry {
        faceGeometry.update(from: faceAnchor.geometry)
        expression(anchor: faceAnchor)
        
        DispatchQueue.main.async {
            self.faceLabel.text = self.analysis
        }
        
    }
}

func expression(anchor: ARFaceAnchor) {
    let smileLeft = anchor.blendShapes[.mouthSmileLeft]
    let smileRight = anchor.blendShapes[.mouthSmileRight]
    let cheekPuff = anchor.blendShapes[.cheekPuff]
    let tongue = anchor.blendShapes[.tongueOut]
    self.analysis = ""
    
    if ((smileLeft?.decimalValue ?? 0.0) + (smileRight?.decimalValue ?? 0.0)) > 0.9 {
        self.analysis += "You are smiling. "
    }
    
    if cheekPuff?.decimalValue ?? 0.0 > 0.1 {
        self.analysis += "Your cheeks are puffed. "
    }
    
    if tongue?.decimalValue ?? 0.0 > 0.1 {
        self.analysis += "Don't stick your tongue out! "
    }
}
}

Solution

  • Instance property blendShapes is a dictionary of 52 coefficients representing facial expressions. Head nods are not facial expressions that have to be detected, so the fastest way to implement head's nodding is to control ARFaceAnchor direction or node's orientation.

    func renderer(_ renderer: SCNSceneRenderer,
              didUpdate node: SCNNode,
                  for anchor: ARAnchor) {
    
        if let faceAnchor = anchor as? ARFaceAnchor,
           let faceGeo = node.geometry as? ARSCNFaceGeometry {
                
            if faceAnchor.lookAtPoint.y <= 0 {
                print("A head is...")
            }
                
            if node.orientation.x >= Float.pi/32 {
                print("A head is...")
            }
        }
    }