import static com.googlecode.javacv.cpp.opencv_core.*;
import static com.googlecode.javacv.cpp.opencv_highgui.*;
import static com.googlecode.javacv.cpp.opencv_imgproc.*;
import static com.googlecode.javacv.cpp.opencv_contrib.createLBPHFaceRecognizer;
import static com.googlecode.javacv.cpp.opencv_objdetect.cvHaarDetectObjects;
import java.io.File;
import java.io.FileInputStream;
import java.util.Iterator;
import java.util.Properties;
import java.util.Set;
import com.googlecode.javacpp.Loader;
import com.googlecode.javacv.CanvasFrame;
import com.googlecode.javacv.FrameGrabber;
import com.googlecode.javacv.OpenCVFrameGrabber;
import com.googlecode.javacv.cpp.opencv_contrib.FaceRecognizer;
import com.googlecode.javacv.cpp.opencv_core.CvMat;
import com.googlecode.javacv.cpp.opencv_core.CvMemStorage;
import com.googlecode.javacv.cpp.opencv_core.CvRect;
import com.googlecode.javacv.cpp.opencv_core.CvSeq;
import com.googlecode.javacv.cpp.opencv_core.IplImage;
import com.googlecode.javacv.cpp.opencv_core.MatVector;
import static com.googlecode.javacv.cpp.opencv_highgui.cvLoadImage;
import com.googlecode.javacv.cpp.opencv_objdetect;
import static com.googlecode.javacv.cpp.opencv_objdetect.CV_HAAR_DO_CANNY_PRUNING;
import com.googlecode.javacv.cpp.opencv_objdetect.CvHaarClassifierCascade;
import java.io.OutputStream;
import java.io.FileOutputStream;
import java.util.logging.Level;
import java.util.logging.Logger;
import static javax.swing.JFrame.EXIT_ON_CLOSE;
public class LBPFaceRecognizer {
private static String faceDataFolder = "F:\\FaceID\\ID\\";
public static String imageDataFolder = faceDataFolder + "images\\";
private static final String CASCADE_FILE = "C:\\Users\\chatur\\Documents\\NetBeansProjects\\NetbeansWorkspace\\DemoFaceRecognize-master\\haarcascade_frontalface_default.xml";
public static final String personNameMappingFileName = faceDataFolder + "personNumberMap.properties";
static final CvHaarClassifierCascade cascade = new CvHaarClassifierCascade(cvLoad(CASCADE_FILE));
private static LBPFaceRecognizer instance = new LBPFaceRecognizer();
Properties dataMap = new Properties();
private static final int NUM_IMAGES_PER_PERSON =10;
double binaryTreshold = 100;
int highConfidenceLevel = 70;
private FaceRecognizer fr_binary = null;
private LBPFaceRecognizer() {
if(cascade!=null)
{System.out.println(cascade+"\t"+CASCADE_FILE);}
createModels();
loadTrainingData();
}
public static LBPFaceRecognizer getInstance() {
return instance;
}
private void createModels() {
fr_binary = createLBPHFaceRecognizer(1, 8, 8, 8, binaryTreshold);
}
protected CvSeq detectFace(IplImage originalImage) {
System.out.println("in detection");
CvSeq faces = null;
Loader.load(opencv_objdetect.class);
try {
IplImage grayImage = IplImage.create(originalImage.width(), originalImage.height(), IPL_DEPTH_8U, 1);
cvCvtColor(originalImage, grayImage, CV_BGR2GRAY);
CvMemStorage storage = CvMemStorage.create();
faces = cvHaarDetectObjects(grayImage, cascade, storage, 1.1,3,CV_HAAR_DO_CANNY_PRUNING);
for(int i=0;i<faces.total();i++){
CvRect r=new CvRect(cvGetSeqElem(faces,i));
cvRectangle(originalImage, cvPoint(r.x(),r.y()),cvPoint(r.x()+r.width(),r.y()+r.height()),CvScalar.YELLOW,1,CV_AA,0);;
}
} catch (Exception e) {
e.printStackTrace();
}
return faces;
}
public String identifyFace(IplImage image) {
System.err.println("==========================================================");
String personName = "";
Set keys = dataMap.keySet();
if (keys.size() > 0) {
int[] ids = new int[1];
double[] distance = new double[1];
int result = -1;
fr_binary.predict(image, ids, distance);
//just deriving a confidence number against treshold
result = ids[0];
if (result > -1 && distance[0]<highConfidenceLevel) {
personName = (String) dataMap.get("" + result);
}
}
return personName;
}
public boolean learnNewFace(String personName, IplImage[] images) throws Exception
{
System.out.println("in learn new face");
int memberCounter = dataMap.size();
System.out.println(memberCounter);
if(dataMap.containsValue(personName)){
Set keys = dataMap.keySet();
Iterator ite = keys.iterator();
while (ite.hasNext()) {
String personKeyForTraining = (String) ite.next();
String personNameForTraining = (String) dataMap.getProperty(personKeyForTraining);
if(personNameForTraining.equals(personName)){
memberCounter = Integer.parseInt(personKeyForTraining);
System.out.println(memberCounter);
System.err.println("Person already exist.. re-learning..");
}
}
}
dataMap.put("" + memberCounter, personName);
storeTrainingImages(personName, images);
retrainAll();
return true;
}
public IplImage preprocessImage(IplImage image, CvRect r){
IplImage gray = cvCreateImage(cvGetSize(image), IPL_DEPTH_8U, 1);
IplImage roi = cvCreateImage(cvGetSize(image), IPL_DEPTH_8U, 1);
System.out.println("in preprocessing");
CvRect r1 = new CvRect(r.x()-10, r.y()-10, r.width()+10, r.height()+10);
cvCvtColor(image, gray, CV_BGR2GRAY);
cvSetImageROI(gray, r1);
cvResize(gray, roi, CV_INTER_LINEAR);
cvEqualizeHist(roi, roi);
return roi;
}
private void retrainAll() throws Exception {
System.out.println("in retrainAll");
Set keys = dataMap.keySet();
if (keys.size() > 0) {
MatVector trainImages = new MatVector(keys.size() * NUM_IMAGES_PER_PERSON);
CvMat trainLabels = CvMat.create(keys.size() * NUM_IMAGES_PER_PERSON, 1, CV_32SC1);
Iterator ite = keys.iterator();
int count = 0;
System.err.print("Loading images for training...");
while (ite.hasNext()) {
String personKeyForTraining = (String) ite.next();
String personNameForTraining = (String) dataMap.getProperty(personKeyForTraining);
IplImage imagesForTraining[]=new IplImage[NUM_IMAGES_PER_PERSON];
imagesForTraining = readImages(personNameForTraining);
System.out.println("length\t"+imagesForTraining.length+"\t"+personNameForTraining);
IplImage grayImage[]=new IplImage[NUM_IMAGES_PER_PERSON];
for(int j=0;j<imagesForTraining.length;j++){
grayImage[j] = IplImage.create(imagesForTraining[j].width(), imagesForTraining[j].height(), IPL_DEPTH_8U, 1);
}
for (int i = 0; i < imagesForTraining.length; i++) {
trainLabels.put(count, 0, Integer.parseInt(personKeyForTraining));
cvCvtColor(imagesForTraining[i], grayImage[i], CV_BGR2GRAY);
trainImages.put(count,grayImage[i]);
count++;
}
}
System.err.println("done.");
fr_binary.train(trainImages, trainLabels);
System.err.println("done.");
storeTrainingData();
}
}
private void loadTrainingData() {
try {
File personNameMapFile = new File(personNameMappingFileName);
if (personNameMapFile.exists()) {
System.out.print("in if ");
FileInputStream fis = new FileInputStream(personNameMapFile);
dataMap.load(fis);
System.out.println("datdataMap"+dataMap);
fis.close();
dataMap.list(System.out);
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void storeTrainingData() throws Exception {
System.err.print("Storing training models ....");
File personNameMapFile = new File("personNameMappingFileName");
if (personNameMapFile.exists()) {
personNameMapFile.delete();
}
FileOutputStream fos = new FileOutputStream(personNameMapFile, false);
dataMap.store(fos, "");
fos.close();
System.err.println("done.");
}
public void storeTrainingImages(String personName, IplImage[] images) {
System.out.println("in store");
for (int i = 0; i < images.length; i++) {
String imageFileName = imageDataFolder + "training\\" + personName + "_" + i + ".bmp";
File imgFile = new File(imageFileName);
if (imgFile.exists()) {
imgFile.delete();
}
cvSaveImage(imageFileName, images[i]);
}
System.out.println("exit store");
}
private IplImage[] readImages(String personName) {
File imgFolder = new File(imageDataFolder);
IplImage[] images = null;
if (imgFolder.isDirectory() && imgFolder.exists()) {
images = new IplImage[NUM_IMAGES_PER_PERSON];
for (int i = 0; i < NUM_IMAGES_PER_PERSON; i++) {
String imageFileName = imageDataFolder + "training\\" + personName + "_" + i + ".bmp";
IplImage img = cvLoadImage(imageFileName);
images[i] = img;
}
}
return images;
}
public static void main(String ar[]) throws FrameGrabber.Exception, Exception{
try{
//LBPFaceRecognizer fr=new LBPFaceRecognizer();
//fr.getInstance();
LBPFaceRecognizer fr = LBPFaceRecognizer.getInstance();
FrameGrabber grabber = new OpenCVFrameGrabber(0);
grabber.start();
IplImage img=null;
CvSeq faces=null;
CvRect r=null;
CanvasFrame canvas=new CanvasFrame("webcam");
canvas.setDefaultCloseOperation(EXIT_ON_CLOSE);
canvas.setSize(750,750);
boolean flag=true;
while(flag){
img = grabber.grab();
IplImage snapshot = cvCreateImage(cvGetSize(img), img.depth(), img.nChannels());
cvFlip(img, snapshot, 1);
faces = fr.detectFace(img);
if(img!=null){
canvas.showImage(img);
}
r=new CvRect(cvGetSeqElem(faces,0));
int imageCounter = 0;
IplImage trainImages[]= new IplImage[1];
trainImages[0]= fr.preprocessImage(img, r);
fr.learnNewFace("p", trainImages);
fr.identifyFace(fr.preprocessImage(img, r));
}
}
catch(ExceptionInInitializerError e)
{
System.out.println(e);
}
}
}
The above code is for face recognition. It detects faces but while training it is throwing exception. the thing is it is throwing an exception called "java.lang.NullPointerException" in retrainAll() method. at the statement
grayImage[j] = IplImage.create(imagesForTraining[j].width(), imagesForTraining[j].height(), IPL_DEPTH_8U, 1);
pls help to sort out this.
IplImage img = cvLoadImage(imageFileName);
That thing can return null. So one of your imagesForTraining[j]
is null.
That is causing the problem.
if you change your code to:
int width = imagesForTraining[j].width();
int height = imagesForTraining[j].height();
grayImage[j] = IplImage.create(width, height, IPL_DEPTH_8U, 1);
the exception will be thrown on the first of these 3 lines. You should think about the scenario and where your image is not loaded and implement it. Simple implementation is something like:
if(imagesForTraining[j] == null)continue;//or do something, try to load again
int width = imagesForTraining[j].width();
int height = imagesForTraining[j].height();
grayImage[j] = IplImage.create(width, height, IPL_DEPTH_8U, 1);