Search code examples
iosswiftimage-processingswift3

How to use GoogleMobileVision in real time frame capture?


I am trying to detect smile probability in real time. Using GoogleMobileVision, but app is getting a crash due options parameter I am passing in GMVDetector.

Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '-[_SwiftValue intValue]

Also when I am passing option as nil it is giving memory issue.

My Code:

import UIKit
import GoogleMobileVision

class ViewController: UIViewController, FrameExtractorDelegate {

@IBOutlet weak var lblSmiling: UILabel!
var frameExtractor: FrameExtractor!
var faceDetector = GMVDetector()

@IBOutlet weak var imageView: UIImageView!

@IBAction func flipButton(_ sender: UIButton) {
    frameExtractor.flipCamera()
}

override func viewDidLoad() {
    super.viewDidLoad()
    frameExtractor = FrameExtractor()
    frameExtractor.delegate = self
    let options: NSDictionary = [GMVDetectorFaceLandmarkType: GMVDetectorFaceLandmark.all, GMVDetectorFaceClassificationType: GMVDetectorFaceClassification.all, GMVDetectorFaceTrackingEnabled: true]

    self.faceDetector = GMVDetector(ofType: GMVDetectorTypeFace, options: options as! [AnyHashable : Any])
}

// Getting individual frame image here
func captured(image: UIImage) {
    processImage(image: image)
    imageView.image = image
}

func processImage(image: UIImage) {

    let faces : [GMVFaceFeature] = faceDetector.features(in: image, options: nil) as! [GMVFaceFeature]

    for face in faces {

        if face.hasSmilingProbability && face.smilingProbability > 0.4 {
            lblSmiling.text = String(describing: face.smilingProbability)
        }
    }
}

}


Solution

  • After searching a lot and using flag I solved it my self,
    Here is my working code:

    import UIKit
    import GoogleMobileVision
    
    class ViewController: UIViewController, FrameExtractorDelegate {
    
    @IBOutlet weak var lblSmiling: UILabel!
    @IBOutlet weak var imageView: UIImageView!
    
    var newView = UIView()
    private let ssQ = DispatchQueue(label: "process queue")
    var frameExtractor: FrameExtractor!
    var faceDetector: GMVDetector?
    var faces = [GMVFaceFeature]()
    var imgIsProcessing = false
    var sessionCountToClr = 0
    
    override func viewDidLoad() {
        super.viewDidLoad()
        frameExtractor = FrameExtractor()
        frameExtractor.delegate = self
        self.faceDetector = GMVDetector(ofType: GMVDetectorTypeFace, options: [GMVDetectorFaceLandmarkType: GMVDetectorFaceLandmark.all.rawValue,
                                                                               GMVDetectorFaceClassificationType: GMVDetectorFaceClassification.all.rawValue,
                                                                               GMVDetectorFaceMinSize: 0.3,
                                                                               GMVDetectorFaceTrackingEnabled: true])
    }
    
    @IBAction func flipButton(_ sender: UIButton) {
        frameExtractor.flipCamera()
    }
    
    func captured(image: UIImage) {
        DispatchQueue.main.async {
            self.processImage(image: image)
            self.imageView.image = image
        }
    }
    
    func processImage(image: UIImage) {
        if imgIsProcessing {
            return
        }
    
        imgIsProcessing = true
        ssQ.async { [unowned self] in
               self.faces = self.faceDetector!.features(in: image, options: nil) as! [GMVFaceFeature]
                DispatchQueue.main.async {
                    if self.faces.count > 0 {
                        for face in self.faces {
                            let rect = CGRect(x: face.bounds.minX, y: face.bounds.minY+100, width: face.bounds.size.width, height: face.bounds.size.height)
    
                            self.drawFaceIndicator(rect: rect)
                            self.lblSmiling.text = String(format: "%.3f", face.smilingProbability)
                        }
                        self.sessionCountToClr = 0
                    }
                    else {
                        if self.sessionCountToClr == 30 {
                            self.newView.removeFromSuperview()
                            self.lblSmiling.text = "0.0"
                            self.sessionCountToClr = 0
                        } else {
                            self.sessionCountToClr+=1
                        }
                    }
                    self.imgIsProcessing = false
                }
            self.faces = []
        }
    }
    
    func drawFaceIndicator(rect: CGRect) {
            newView.removeFromSuperview()
            newView = UIView(frame: rect)
            newView.layer.cornerRadius = 10;
            newView.alpha = 0.3
            newView.layer.borderColor = #colorLiteral(red: 0.3411764801, green: 0.6235294342, blue: 0.1686274558, alpha: 1)
            newView.layer.borderWidth = 4
            self.view.addSubview(newView)
    }
    }
    

    I have uploaded my entire project on github feel free to use