Finding homography matrix
What is wrong in my code? It gives bad homography matrix for some reason.
SurfFeaturesFinder finder(1);
vector<ImageFeatures> vec_features;
for(int i=0;i<vec_im.size();++i)
{
ImageFeatures t;
finder(vec_im[i],t);
t.img_idx = i;
vec_features.push_back(t);
}
finder.collectGarbage();
int k=0;
for(int i=0;i<vec_features.size();++i)
{
++k;
for(int j=k;j<vec_features.size();++j)
{
Ptr<DescriptorMatcher> matcher = new BFMatcher(NORM_L2);
vector<Point2f> prev_keys, curr_keys;
std::vector<DMatch> matches;
matcher->match(vec_features[i].descriptors, vec_features[j].descriptors, matches);
for (int p = 0; p < (int)matches.size(); ++p)
{
prev_keys.push_back(vec_features[i].keypoints[matches[p].queryIdx].pt);
curr_keys.push_back(vec_features[j].keypoints[matches[p].trainIdx].pt);
}
Mat M = findHomography(prev_keys, curr_keys, CV_RANSAC);
cout<<M<<" "<<M.rows<<"x"<<M.cols<<endl;
cout<<M(Rect(0,0,3,2))<<endl;
Mat im= Mat::zeros(vec_im[j].rows,vec_im[j].cols,vec_im[j].type());
warpAffine(vec_im[j], im, M(Rect(0,0,3,2)), im.size());
imshow("im", im);
Mat merged = Mat::zeros(vec_im[i].rows,vec_im[i].cols,vec_im[i].type());
addWeighted(vec_im[i], 0.5, im, 0.5, 0.0, merged);
imshow("out", merged);
cvWaitKey(0);
}
}