I am trying to make a Face detector using CIDetector
that enables a button as long as the face is detected. The part that I search for and I couldn't find is how to make the code trigger a function when it detects a face. And disable it when the face leaves camera frame.
Here is the code that I have until now:
.h file:
#import <UIKit/UIKit.h>
@interface ViewController : UIViewController
@property (weak, nonatomic) IBOutlet UIButton *actionButton;
//Update 2:
@property (weak, nonatomic) IBOutlet UIView *containerView;
- (IBAction)actionButton:(id)sender;
@end
.m file:
#import "ViewController.h"
@import AVFoundation;
@interface ViewController () <AVCaptureMetadataOutputObjectsDelegate> {
AVCaptureVideoPreviewLayer *_previewLayer;
AVCaptureSession *_session;
CIDetector *_faceDetector;
CIContext *_ciContext;
}
@end
@implementation SCViewController
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
// Create a new AVCaptureSession
_session = [[AVCaptureSession alloc] init];
[_session setSessionPreset:AVCaptureSessionPreset640x480];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if(input) {
// Add the input to the session
[_session addInput:input];
} else {
NSLog(@"error: %@", error);
return;
}
AVCaptureMetadataOutput *output = [[AVCaptureMetadataOutput alloc] init];
// Have to add the output before setting metadata types
[_session addOutput:output];
// Restrict the output metadata to faces
[output setMetadataObjectTypes:@[AVMetadataObjectTypeFace]];
// This VC is the delegate. Please call us on the main queue
[output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
// Display on screen
_previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_previewLayer.bounds = self.view.bounds;
_previewLayer.position = CGPointMake(CGRectGetMidX(self.view.bounds), CGRectGetMidY(self.view.bounds));
// Update 2 change
[self.containerView.layer addSublayer:_previewLayer];
// Hide the button
self.actionButton.hidden = YES;
// Start the AVSession running
[_session startRunning];
}
// Update 1:
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputMetadataObjects:(NSArray *)metadataObjects
fromConnection:(AVCaptureConnection *)connection
{
for(AVMetadataObject *metadataObject in metadataObjects) {
if([metadataObject.type isEqualToString:AVMetadataObjectTypeFace]) {
self.retakeButton.hidden = NO;
}
}
}
- (IBAction)actionButton:(id)sender {
}
@end
In your storyboard you should add a new view to the main view and create the outlet:
@property (weak, nonatomic) IBOutlet UIView *containerView;
And the button that you add should be on the same hierarchical as the newly created subview. Also the button should be in front of the newly created subview.
And in your code change:
[self.view.layer addSublayer:_previewLayer];
to:
[self.containerView.layer addSublayer:_previewLayer];
Hope these help
Update:
If you had gesture recognizer and no UI for it than you could have used this quick and easy fix:
NSTimer *timer = [NSTimer timerWithTimeInterval:0.2f target:self selector:@selector(hideButton) userInfo:nil repeats:YES];
[[NSRunLoop mainRunLoop] addTimer:timer forMode:NSRunLoopCommonModes];
Where:
-(void)hideButton{
if(counterSeconds==2){
if (counterCaptureOutput==0) {
NSLog(@"hide button");
[self.retakeButton setHidden:YES];
}
counterCaptureOutput=0;
counterSeconds=0;
}
counterSeconds++;
}
and:
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputMetadataObjects:(NSArray *)metadataObjects
fromConnection:(AVCaptureConnection *)connection
{
for(AVMetadataObject *metadataObject in metadataObjects) {
if([metadataObject.type isEqualToString:AVMetadataObjectTypeFace]) {
self.retakeButton.hidden = NO;
counterCaptureOutput++;
NSLog(@"ENTER FUNCTION");
}
}
}
Also include in .m:
int counterCaptureOutput;
int counterSeconds;