Search code examples
androidc++opencvjava-native-interface

How can I change this project to a OpenCV realtime face detection app?


Project is a realtime image processer same as my Project but it uses two values which are input and output(I remember that these projects are using frame for processing like that). I changed its native-lib.cpp file this

#include <jni.h>
#include "opencv2/objdetect.hpp"
#include "opencv2/highgui.hpp"
#include <android/log.h>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>

#define TAG "NativeLib"

using namespace std;
using namespace cv;

extern "C" {
//void detect(Mat& input);
void JNICALL Java_com_example_nativeopencvandroidtemplate_MainActivity_adaptiveThresholdFromJNI(JNIEnv *env, jobject instance, jlong inputAddr, jlong outputAddr) {

    Mat &input = *(Mat *) inputAddr;
    Mat &output = *(Mat *) outputAddr;

    clock_t begin = clock();

    cv::adaptiveThreshold(input, output, 255, ADAPTIVE_THRESH_MEAN_C, THRESH_BINARY_INV, 21, 5);

    double total_time = double (clock() - begin ) / CLOCKS_PER_SEC;
    __android_log_print(ANDROID_LOG_INFO, TAG, "adaptiveThreshold computation time = %f seconds\n",  total_time);
}
}

to this

#include <jni.h>
#include "opencv2/objdetect.hpp"
#include "opencv2/highgui.hpp"
#include <android/log.h>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>



using namespace std;
using namespace cv;

extern "C" {
void detect(Mat &input);
void JNICALL
Java_com_example_nativeopencvandroidtemplate_MainActivity_adaptiveThresholdFromJNI(JNIEnv *env, jobject instance,
                                                                                   jlong inputAddr) {

    Mat &input = *(Mat *) inputAddr;
    detect(input);
 }
void detect(Mat &input) {
    String face_cascade_name = "/storage/emulated/0/ony.xml";
    String eyes_cascade_name = "/storage/emulated/0/moe.xml";
    CascadeClassifier face_cascade;
    CascadeClassifier eyes_cascade;


    if (!face_cascade.load(face_cascade_name)) {
        printf("--(!)Error loading\n");
        return;
    };
    if (!eyes_cascade.load(eyes_cascade_name)) {
        printf("--(!)Error loading\n");
        return;
    };


    std::vector<Rect> faces;
    Mat frame_gray;


    cvtColor( input, frame_gray, COLOR_RGB2GRAY );
    equalizeHist(frame_gray, frame_gray);

    //-- Detect faces
    face_cascade.detectMultiScale(frame_gray, faces, 1.1, 2, 0 | CASCADE_SCALE_IMAGE, Size(30, 30));

    for (size_t i = 0; i < faces.size(); i++) {
        Point center(faces[i].x + faces[i].width * 0.5, faces[i].y + faces[i].height * 0.5);
        ellipse(input, center, Size(faces[i].width * 0.5, faces[i].height * 0.5), 0, 0, 360, Scalar(255, 0, 255), 4, 8,
                0);

        Mat faceROI = frame_gray(faces[i]);
        std::vector<Rect> eyes;

        //-- In each face, detect eyes
        eyes_cascade.detectMultiScale(faceROI, eyes, 1.1, 2, 0 | CASCADE_SCALE_IMAGE, Size(30, 30));

        for (size_t j = 0; j < eyes.size(); j++) {
            Point center(faces[i].x + eyes[j].x + eyes[j].width * 0.5, faces[i].y + eyes[j].y + eyes[j].height * 0.5);
            int radius = cvRound((eyes[j].width + eyes[j].height) * 0.25);
            circle(input, center, radius, Scalar(255, 0, 0), 4, 8, 0);
        }
    }
}
}

But in my phone there was a black screen for probably five seconds and app stopping repeatedly.

Note: Sync and build were successfully and before I changed cpp file app was working successfully

Please help me about my Project.

Thanks


Solution

  • Here you made some changes to the definition of the C++ method Java_com_example_nativeopencvandroidtemplate_MainActivity_adaptiveThresholdFromJNI, you will therefore have to reflect these changes on the Kotlin side, as this method is called from your MainActivity.kt using JNI. Here is the code you'll have to adapt in MainActivity.kt:

    class MainActivity : Activity(), CameraBridgeViewBase.CvCameraViewListener2 {
    
        ...
    
        override fun onCameraFrame(inputFrame: CameraBridgeViewBase.CvCameraViewFrame): Mat {
            val mat = inputFrame.gray()
    
            adaptiveThresholdFromJNI(mat.nativeObjAddr)
    
            return mat
        }
    
        private external fun adaptiveThresholdFromJNI(matAddr: Long)
    
        ...
    
    }
    

    Here adaptiveThresholdFromJNI was adapted to handle only one argument (as you did with the C++ equivalent) and that one argument is then returned from onCameraFrame to be displayed on the screen.

    I see in your C++ code that the first thing you try to do is to convert your input Mat to gray, but this is not necessary because the Mat passed to your C++ code is already gray (see val mat = inputFrame.gray()).

    If you want to keep your C++ code intact, you can also pass the colorized version of the camera view frame to your C++ code by using val mat = inputFrame.rgba().