I am trying to do an image registration with ORB feature. I got a problem at using warpAffine. The compiler told that it is not possible to convert parameter '1' from cv::Mat * to cv::InputArray. Here is my code:
#pragma once
// Standard C++ I/O library.
#include <iostream>
#include <string>
#include <iomanip>
#include <vector>
// OpenCV library.
#include <cv.h>
#include <highgui.h>
// OpenCV feature library.
#include <opencv2/opencv.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <nonfree/features2d.hpp>
// main().
int main(int argv, char ** argc)
{
cv::Mat im_ref, im_cmp;
std::string str_ref, str_cmp;
// Read reference image.
//std::cout<<"Input reference image filename: ";
//std::cin>>str_ref;
std::cout<<"-> Reading images."<<std::endl;
str_ref = "F:\\CPPs\\ImageRegistration\\OpenCVTest\\206.png";
im_ref = cv::imread(str_ref);
cv::imshow("Reference image", im_ref);
// Read testing image.
//std::cout<<"Input testing image filename: ";
//std::cin>>str_cmp;
str_cmp = "F:\\CPPs\\ImageRegistration\\OpenCVTest\\227.png";
im_cmp = cv::imread(str_cmp);
cv::imshow("Testing image", im_cmp);
std::cout<<"Press any key to continue."<<std::endl;
cvWaitKey(0);
// Feature detection.
std::cout<<"-> Feature detection."<<std::endl;
std::vector <cv::KeyPoint> key_ref, key_cmp; // Vectors for features extracted from reference and testing images.
cv::Mat des_ref, des_cmp; // Descriptors for features of 2 images.
cv::ORB orb1; // An ORB object.
orb1(im_ref, cv::Mat(), key_ref, des_ref); // Feature extraction.
orb1(im_cmp, cv::Mat(), key_cmp, des_cmp);
// Show keypoints.
std::cout<<"-> Show keypoints."<<std::endl;
cv::Mat drawkey_ref, drawkey_cmp; // Output image for keypoint drawing.
cv::drawKeypoints(im_ref, key_ref, drawkey_ref); // Generate image for keypoint drawing.
cv::imshow("Keypoints of reference", drawkey_ref);
cv::drawKeypoints(im_cmp, key_cmp, drawkey_cmp);
cv::imshow("Keypoints of test", drawkey_cmp);
cvWaitKey(0);
// Matching.
std::cout<<"-> Matching."<<std::endl;
cv::FlannBasedMatcher matcher1(new cv::flann::LshIndexParams(20,10,2));
std::vector<cv::DMatch> matches1;
matcher1.match(des_ref, des_cmp, matches1); // Match two sets of features.
double max_dist = 0;
double min_dist = 100;
// Find out the minimum and maximum of all distance.
for( int i = 0; i < des_ref.rows; i++ )
{
double dist = matches1[i].distance;
if( dist < min_dist ) min_dist = dist;
if( dist > max_dist ) max_dist = dist;
}
cvWaitKey(0);
// Eliminate relatively bad points.
std::cout<<"-> Bad points elimination"<<std::endl;
std::vector<cv::KeyPoint> kgood_ref, kgood_cmp;
std::vector<cv::DMatch> goodMatch;
for (int i=0; i<matches1.size(); i++)
{
if(matches1[i].distance < 2*min_dist) // Keep points that are less than 2 times of the minimum distance.
{
goodMatch.push_back(matches1[i]);
kgood_ref.push_back(key_ref[i]);
kgood_cmp.push_back(key_cmp[i]);
} // end if
} // end for
cvWaitKey(0);
// Calculate affine transform matrix.
std::cout<<"-> Calculating affine transformation."<<std::endl;
std::vector<cv::Point2f> frm1_feature, frm2_feature;
const int p_size = goodMatch.size();
// * tmpP = new tmpPoint[p_size];
cv::Point2f tmpP;
for(int i=0; i<goodMatch.size(); i++)
{
tmpP.x = kgood_ref[i].pt.x;
tmpP.y = kgood_ref[i].pt.y;
frm1_feature.push_back(tmpP);
tmpP.x = kgood_cmp[i].pt.x;
tmpP.y = kgood_cmp[i].pt.y;
frm2_feature.push_back(tmpP);
}
cv::Mat affine_mat = cv::estimateRigidTransform(frm1_feature, frm2_feature, true);
cv::Mat im_transformed;
// Output results.
cv::warpAffine(&im_cmp, &im_transformed, affine_mat, CV_INTER_LINEAR|CV_WARP_FILL_OUTLIERS); // error comes from here.
cv::imshow("Transformed image", im_transformed);
cvWaitKey(0);
return 0;
}
I have got the result before using the answer given by Evgeniy. The transform I had used is
//cv::warpAffine( im_cmp, im_transformed, affine_mat, cv::Size(im_cmp.cols, im_cmp.rows) );
The transformed result is quite strange
What I want to do is finally get a merged image of both the reference image and this transformed image. This is actually my first step. Is this the problem of using the transformation parameter of the warpAffine().
Finally, I want to get a result like an example here (two images taken at difference position and they are finally aligned)
You are giving a pointer, but wrapAffine accepts reference to a cv::Mat. You can change your code like this:
cv::warpAffine(im_cmp, im_transformed, affine_mat, cv::Size(), CV_INTER_LINEAR|CV_WARP_FILL_OUTLIERS);
Just remove '&'