Hello everyone, I am working on a feature detection project based on GridAdaptedFeatureDetector. I need to calculate the number of keypoints in each grid but I'm getting incompatible results. It returns "0" for some grids where there are keypoints and matches. I am pretty sure I'm making a mistake in the counting part. Is there a simpler way to obtain the number of features in each grid?
Entire code is below, I'd appreciate any insight.
int main()
{
CvCapture* cap = cvCreateFileCapture(VIDEO_NAME);
int height = (int) cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_HEIGHT);
int width = (int) cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_WIDTH);
int x = 300;
int y = 300;
int counts[4][4];
int all[4][4];
int inx = 200;
IplImage* src1= cvQueryFrame(cap);
Mat frame_prev(src1);
cv::Mat imageROI1,imageROI2;
imageROI1=frame_prev(Rect(x,y,width-x-inx,height-y));
//ORB detector(500);
BriefDescriptorExtractor extractor;
//SURF extractor;
//ORB extractor;
vector<KeyPoint> keypoints1, keypoints2;
Mat descriptors1, descriptors2;
BFMatcher matcher(NORM_HAMMING);
vector<vector<DMatch>> matches, good_matches;
vector<DMatch>matches2,good_matches2;
Ptr<FeatureDetector> detector = FeatureDetector::create("ORB");
cv::GridAdaptedFeatureDetector det(detector,5000);
IplImage* src2;
det.detect(imageROI1,keypoints1);
extractor.compute(imageROI1,keypoints1,descriptors1);
while(src2 = cvQueryFrame(cap))
{
for(int a = 0; a<4; a++)
{for(int b = 0; b<4; b++)
{counts[a][b]=0;
all[a][b]=0;
}}
Mat frame(src2);
imageROI2=frame(Rect(x,y,width-x-inx,height-y));
det.detect(imageROI2,keypoints2);
extractor.compute(imageROI2,keypoints2,descriptors2);
matcher.radiusMatch(descriptors2,descriptors1,matches,5);
//matcher.match(descriptors2,descriptors1,matches2);
for(int i=0; i<matches.size(); i++)
{int num = matches[i].size();
for(int k=0; k<num; k++)
{if(keypoints2[matches[i][k].queryIdx].pt.x<=(imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.y<=(imageROI2.cols/4))
counts[0][0]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.y<=(imageROI2.cols/2) && keypoints2[matches[i][k].queryIdx].pt.y>(imageROI2.cols/4) )
counts[0][1]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.y<=(3*imageROI2.cols/4) && keypoints2[matches[i][k].queryIdx].pt.y>(imageROI2.cols/2) )
counts[0][2]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.y<=(imageROI2.cols) && keypoints2[matches[i][k].queryIdx].pt.y>(3*imageROI2.cols/4) )
counts[0][3]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(imageROI2.rows/2) && keypoints2[matches[i][k].queryIdx].pt.x>(imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.y<=(imageROI2.cols/4))
counts[1][0]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(imageROI2.rows/2) && keypoints2[matches[i][k].queryIdx].pt.x>(imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.y<=(imageROI2.cols/2) && keypoints2[matches[i][k].queryIdx].pt.y>(imageROI2.cols/4) )
counts[1][1]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(imageROI2.rows/2) && keypoints2[matches[i][k].queryIdx].pt.x>(imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.y<=(3*imageROI2.cols/4) && keypoints2[matches[i][k].queryIdx].pt.y>(imageROI2.cols/2) )
counts[1][2]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(imageROI2.rows/2) && keypoints2[matches[i][k].queryIdx].pt.x>(imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.y<=(imageROI2.cols) && keypoints2[matches[i][k].queryIdx].pt.y>(3*imageROI2.cols/4) )
counts[1][3]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(3*imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.x>(imageROI2.rows/2) && keypoints2[matches[i][k].queryIdx].pt.y<=(imageROI2.cols/4))
counts[2][0]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(3*imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.x>(imageROI2.rows/2) && keypoints2[matches[i][k].queryIdx].pt.y<=(imageROI2.cols/2) && keypoints2[matches[i][k].queryIdx].pt.y>(imageROI2.cols/4) )
counts[2][1]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(3*imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.x>(imageROI2.rows/2) && keypoints2[matches[i][k].queryIdx].pt.y<=(3*imageROI2.cols/4) && keypoints2[matches[i][k].queryIdx].pt.y>(imageROI2.cols/2) )
counts[2][2]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(3*imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.x>(imageROI2.rows/2) && keypoints2[matches[i][k].queryIdx].pt.y<=(imageROI2.cols) && keypoints2[matches[i][k].queryIdx].pt.y>(3*imageROI2.cols/4) )
counts[2][3]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(imageROI2.rows) && keypoints2[matches[i][k].queryIdx].pt.x>(3*imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.y<=(imageROI2.cols/4))
counts[3][0]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(imageROI2.rows) && keypoints2[matches[i][k].queryIdx].pt.x>(3*imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.y<=(imageROI2.cols/2) && keypoints2[matches[i][k].queryIdx].pt.y>(imageROI2.cols/4) )
counts[3][1]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(imageROI2.rows) && keypoints2[matches[i][k].queryIdx].pt.x>(3*imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.y<=(3*imageROI2.cols/4) && keypoints2[matches[i][k].queryIdx].pt.y>(imageROI2.cols/2) )
counts[3][2]++;
if(keypoints2[matches[i][k].queryIdx].pt.x<=(imageROI2.rows) && keypoints2[matches[i][k].queryIdx].pt.x>(3*imageROI2.rows/4) && keypoints2[matches[i][k].queryIdx].pt.y<=(imageROI2.cols) && keypoints2[matches[i][k].queryIdx].pt.y>(3*imageROI2.cols/4) )
counts[3][3]++;
}}
for(int i=0; i<keypoints2.size(); i++)
{{if(keypoints2[i].pt.x<=(imageROI2.rows/4) && keypoints2[i].pt.y<=(imageROI2.cols/4))
all[0][0]++;
if(keypoints2[i].pt.x<=(imageROI2.rows/4) && keypoints2[i].pt.y<=(imageROI2.cols/2) && keypoints2[i].pt.y>(imageROI2.cols/4) )
all[0][1]++;
if(keypoints2[i].pt.x<=(imageROI2.rows/4) && keypoints2[i].pt.y<=(3*imageROI2.cols/4) && keypoints2[i].pt.y>(imageROI2.cols/2) )
all[0][2]++;
if(keypoints2[i].pt.x<=(imageROI2.rows/4) && keypoints2[i].pt.y<=(imageROI2.cols) && keypoints2[i].pt.y>(3*imageROI2.cols/4) )
all[0][3]++;
if(keypoints2[i].pt.x<=(imageROI2.rows/2) && keypoints2[i].pt.x>(imageROI2.rows/4) && keypoints2[i].pt.y<=(imageROI2.cols/4))
all[1][0]++;
if(keypoints2[i].pt.x<=(imageROI2.rows/2) && keypoints2[i].pt.x>(imageROI2.rows/4) && keypoints2[i].pt.y<=(imageROI2.cols/2) && keypoints2[i].pt.y>(imageROI2.cols/4) )
all[1][1]++;
if(keypoints2[i].pt.x<=(imageROI2.rows/2) && keypoints2[i].pt.x>(imageROI2.rows/4) && keypoints2[i].pt.y<=(3*imageROI2.cols/4) && keypoints2[i].pt.y>(imageROI2.cols/2) )
all[1][2]++;
if(keypoints2[i].pt.x<=(imageROI2.rows/2) && keypoints2[i].pt.x>(imageROI2.rows/4) && keypoints2[i].pt.y<=(imageROI2.cols) && keypoints2[i].pt.y>(3*imageROI2.cols/4) )
all[1][3]++;
if(keypoints2[i].pt.x<=(3*imageROI2.rows/4) && keypoints2[i].pt.x>(imageROI2.rows/2) && keypoints2[i].pt.y<=(imageROI2.cols/4))
all[2][0]++;
if(keypoints2[i].pt.x<=(3*imageROI2.rows/4) && keypoints2[i].pt.x>(imageROI2.rows/2) && keypoints2[i].pt.y<=(imageROI2.cols/2) && keypoints2[i].pt.y>(imageROI2.cols/4) )
all[2][1]++;
if(keypoints2[i].pt.x<=(3*imageROI2.rows/4) && keypoints2[i].pt.x>(imageROI2.rows/2) && keypoints2[i].pt.y<=(3*imageROI2.cols/4) && keypoints2[i].pt.y>(imageROI2.cols/2) )
all[2][2]++;
if(keypoints2[i].pt.x<=(3*imageROI2.rows/4) && keypoints2[i].pt.x>(imageROI2.rows/2) && keypoints2[i].pt.y<=(imageROI2.cols) && keypoints2[i].pt.y>(3*imageROI2.cols/4) )
all[2][3]++;
if(keypoints2[i].pt.x<=(imageROI2.rows) && keypoints2[i].pt.x>(3*imageROI2.rows/4) && keypoints2[i].pt.y<=(imageROI2.cols/4))
all[3][0]++;
if(keypoints2[i].pt.x<=(imageROI2.rows) && keypoints2[i].pt.x>(3*imageROI2.rows/4) && keypoints2[i].pt.y<=(imageROI2.cols/2) && keypoints2[i].pt.y>(imageROI2.cols/4) )
all[3][1]++;
if(keypoints2[i].pt.x<=(imageROI2.rows) && keypoints2[i].pt.x>(3*imageROI2.rows/4) && keypoints2[i].pt.y<=(3*imageROI2.cols/4) && keypoints2[i].pt.y>(imageROI2.cols/2) )
all[3][2]++;
if(keypoints2[i].pt.x<=(imageROI2.rows) && keypoints2[i].pt.x>(3*imageROI2.rows/4) && keypoints2[i].pt.y<=(imageROI2.cols) && keypoints2[i].pt.y>(3*imageROI2.cols/4) )
all[3][3]++;
}}
namedWindow("matches", 1);
Mat img_matches;
drawMatches(imageROI2, keypoints2, imageROI1, keypoints1, matches, img_matches);
imshow("matches", img_matches);
imageROI1=imageROI2;
det.detect(imageROI1,keypoints1);
extractor.compute(imageROI1,keypoints1,descriptors1);
printf("First row:\n");
printf("All features: %d\n Matched features: %d\n",all[0][0],counts[0][0]);
printf("All features: %d\n Matched features: %d\n",all[0][1],counts[0][1]);
printf("All features: %d\n Matched features: %d\n",all[0][2],counts[0][2]);
printf("All features: %d\n Matched features: %d\n",all[0][3],counts[0][3]);
printf("\n\n");
printf("Second row:\n");
printf("All features: %d\n Matched features: %d\n",all[1][0],counts[1][0]);
printf("All features: %d\n Matched features: %d\n",all[1][1],counts[1][1]);
printf("All features: %d\n Matched features: %d\n",all[1][2],counts[1][2]);
printf("All features: %d\n Matched features: %d\n",all[1][3],counts[1][3]);
printf("\n\n");
printf("Third row:\n");
printf("All features: %d\n Matched features: %d\n",all[2][0],counts[2][0]);
printf("All features: %d\n Matched features: %d\n",all[2][1],counts[2][1]);
printf("All features: %d\n Matched features: %d\n",all[2][2],counts[2][2]);
printf("All features: %d\n Matched features: %d\n",all[2][3],counts[2][3]);
printf("\n\n");
printf("Fourth row:\n");
printf("All features: %d\n Matched features: %d\n",all[3][0],counts[3][0]);
printf("All features: %d\n Matched features: %d\n",all[3][1],counts[3][1]);
printf("All features: %d\n Matched features: %d\n",all[3][2],counts[3][2]);
printf("All features: %d\n Matched features: %d\n",all[3][3],counts[3][3]);
printf("\n\n");
waitKey(5000);
}
return 0;
}