Epipolar lines wrong [closed]

asked 2017-01-31 06:51:41 -0600

imgpro gravatar image

Hi.I am trying to draw epipolar lines using Orb. I tried it for nearly 10 different image pairs. For some images it shows correct lines but usually it shows camera centers in the wrong places. Epipolar lines intersect at one point in all images.(for some of them at infinity) For debugging i tried putting two images with pre-determined points and it worked for only left image while on the right image lines were wrong.
Also please tell me if i need to change computeCorrespondEpilines function parameters.Thanks.

`

Mat img1 = imread("r1.jpg", CV_LOAD_IMAGE_GRAYSCALE);
Mat img2 = imread("l1.jpg", CV_LOAD_IMAGE_GRAYSCALE);

// detecting keypoints

OrbFeatureDetector detector(2500);
std::vector<KeyPoint> keypoints1, keypoints2;

detector.detect(img1, keypoints1);
detector.detect(img2, keypoints2);
std::cout<< "kp1 size : "<< keypoints1.size() << " kp2 size : "<<keypoints2.size() << std::endl;

// computing descriptors
OrbDescriptorExtractor extractor;
Mat descriptors1, descriptors2;
extractor.compute(img1, keypoints1, descriptors1);
extractor.compute(img2, keypoints2, descriptors2);

std::cout << "descriptor1 size "<< descriptors1.size() << " descriptor2 size  "<< descriptors2.size() << std::endl ;
// matching descriptors
BFMatcher matcher(NORM_HAMMING);// NORM_HAMMING should be used with ORB
std::vector< DMatch > matches;
matcher.match(descriptors1, descriptors2, matches);

double max_dist = 0; double min_dist = 100;

//-- Quick calculation of max and min distances between keypoints
for( int i = 0; i < descriptors1.rows; i++ )
{ double dist = matches[i].distance;
  if( dist < min_dist ) min_dist = dist;
  if( dist > max_dist ) max_dist = dist;
}
std::cout << "min_dist is " << min_dist << std::endl;
std::cout << "max_dist is " << max_dist << std::endl;

//-- Draw only "good" matches (i.e. whose distance is less than 2*min_dist,
//-- or a small arbitrary value ( 0.02 ) in the event that min_dist is very
//-- small)
//-- PS.- radiusMatch can also be used here.
std::vector< DMatch > good_matches;
vector<Point2f> pts1,pts2;


for( int i = 0; i < descriptors1.rows; i++ )
{ if( matches[i].distance <= max(3*min_dist, 0.02) )
  {
    good_matches.push_back( matches[i]);
    pts2.push_back( keypoints2[matches[i].trainIdx].pt );
    pts1.push_back( keypoints1[matches[i].queryIdx].pt );
  }
}
std::cout << "Ratio of good matches is " << good_matches.size() << "/" << matches.size()<< " = "<< (float) good_matches.size()/matches.size() << std::endl;
for( int i = 0; i < (int)good_matches.size(); i++ )
  { printf( "-- Good Match [%d] Keypoint 1: %d  -- Keypoint 2: %d  \n", i, good_matches[i].queryIdx, good_matches[i].trainIdx ); }

//Finding Fundamental Matrix
Mat F = findFundamentalMat(pts1, pts2, FM_LMEDS, 3, 0.99);

//Computing Epipolar Lines
vector<Vec<float,3> > epilines1, epilines2;
computeCorrespondEpilines(pts1, 1, F, epilines2); //Index starts with 1
computeCorrespondEpilines(pts2, 2, F, epilines1);

CV_Assert(pts1.size() == pts2.size() &&
        pts2.size() == epilines1.size() &&
        epilines1.size() == epilines2.size());

cvtColor(img1,img1,COLOR_GRAY2BGR);
cvtColor(img2,img2,COLOR_GRAY2BGR);

cv::RNG rng(0);

for(unsigned int i=0; i<pts1.size()/2; i++)
{

// Draw epipolar lines function

cv::Scalar color(rng(256),rng(256),rng(256)); line(img1, Point(0,-epilines1[i][2]/epilines1[i][1]), Point(img1.cols,-(epilines1[i][2]+epilines1[i][0]*img1.cols)/epilines1[i][1]),color,1); circle(img1, pts1[i], 6, color, -1, CV_AA);

   line(img2, Point(0,-epilines2[i][2]/epilines2[i][1]), Point(img2.cols,-(epilines2[i][2]+epilines2[i][0]*img2.cols)/epilines2[i][1]),color,1);
   circle(img2 ...
(more)
edit retag flag offensive reopen merge delete

Closed for the following reason question is not relevant or outdated by sturkmen
close date 2020-10-07 00:14:51.671607