Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

Anyone looked into xtion reprojection error ?

Hi there,

I am trying to use the Xtion depth sensor. However, when I compute the reprojection error, i.e. when I project the given 3D coordinates to the corresponding rgb image, without moving the camera (so rvec = [0, 0, 0] and tvec = [0, 0, 0]), I get reprojection errors as high as 6 pixels. I calibrated the rgb camera and the error dropped to 4 pixels. This is still big.

I use the openni drivers from the ros package openni_launch and retrieve the depth_registered 3D points. I use the factory registration.

The steps I use are the following : Extract keypoints from an image Filter out the keypoints which 3D coordinates contain NaN Compute the reprojection error between the 3D


#include "opencv2/core/core.hpp"
#include "opencv2/calib3d/calib3d.hpp"
#include "opencv2/features2d/features2d.hpp"
#include "opencv2/nonfree/nonfree.hpp"

using namespace std;
using namespace cv;

void filterNaNKeyPoints (Mat depth, vector<keypoint> kpts,
                         vector<keypoint> &filtered_kpts,
                         vector<point3f> &filtered_p3d) {
    filtered_kpts.clear();
    filtered_p3d.clear();
    for (size_t i=0; i<kpts.size(); ++i)="" {="" vec3f="" p3="depth.at&lt;Vec3f">(kpts[i].pt.y, kpts[i].pt.x); //* 1e3; // in meters, convert in milimeters
        if ( !isnan(p3[0]) && !isnan(p3[1]) && !isnan(p3[2]) ) {
            filtered_kpts.push_back (kpts[i]);
            filtered_p3d.push_back (p3); // looks like Vec3f ~= Point3f
        }
    }
}

double meanReprojectionError (vector<point3f> p3d, vector<point2f> p2d,
                               Mat R, Mat t) {
    vector<float> d;
    vector<point2f> proj_p2d;
    projectPoints (p3d, rvec, tvec, K_, d, proj_p2d);

    double error = norm (Mat(p2d), Mat(proj_p2d), CV_L2);
    int num_points = p3d.size();
    return error_sum = sqrt(error*error/num_points);
}

void main () {
    // Retrieve by some way a grayscale image and the corresponding depth image
    Mat image = ...
    Mat depth = ...

    Ptr<featuredetector> detector = new SIFT();
    vector<keypoint> keypoints;
    detector->detect (image, keypoints);
    vector<point2f> points2d;
    for ( size_t i = 0; i < keypoints.size(); ++i ) {
        points2d.push_back (keypoints[i].pt);
    }

    vector<point3f> points3d;
    vector<keypoint> filtered_points2d;
    filterNaNKeyPoints (Mat depth, keypoints, filtered_points2d, points3d);

    Mat rvec = Mat::zeros(3, 1, CV_32F);
    Mat tvec = Mat::zeros(3, 1, CV_32F);    
    double error = meanReprojectionError (points3d, filtered_points2d, rvec, tvec);

    cout << "Reprojection error : " << error << endl;
}

As anyone seen this problem too ? Is this error coming from a "too approximated" factory registration between rgb and ir cameras ? Any idea to improve the reprojection error appart from calibrating the ir/rgb poses ? Is there an easy way to calibrate the ir/rgb pose in openCV ?

Anyone looked into xtion reprojection error ?

Hi there,

I am trying to use the Xtion depth sensor. However, when I compute the reprojection error, i.e. when I project the given 3D coordinates to the corresponding rgb image, without moving the camera (so rvec = [0, 0, 0] and tvec = [0, 0, 0]), I get reprojection errors as high as 6 pixels. I calibrated the rgb camera and the error dropped to 4 pixels. This is still big.

I use the openni drivers from the ros package openni_launch and retrieve the depth_registered 3D points. I use the factory registration.

The steps I use are the following : Extract keypoints from an image Filter out the keypoints which 3D coordinates contain NaN Compute the reprojection error between the 3D


#include "opencv2/core/core.hpp"
#include "opencv2/calib3d/calib3d.hpp"
#include "opencv2/features2d/features2d.hpp"
#include "opencv2/nonfree/nonfree.hpp"

using namespace std;
using namespace cv;

void filterNaNKeyPoints (Mat depth, vector<keypoint> kpts,
                         vector<keypoint> &filtered_kpts,
                         vector<point3f> &filtered_p3d) {
    filtered_kpts.clear();
    filtered_p3d.clear();
    for (size_t i=0; i<kpts.size(); ++i)="" {="" vec3f="" p3="depth.at&lt;Vec3f">(kpts[i].pt.y, kpts[i].pt.x); //* 1e3; // in meters, convert in milimeters
        if ( !isnan(p3[0]) && !isnan(p3[1]) && !isnan(p3[2]) ) {
            filtered_kpts.push_back (kpts[i]);
            filtered_p3d.push_back (p3); // looks like Vec3f ~= Point3f
        }
    }
}

double meanReprojectionError (vector<point3f> p3d, vector<point2f> p2d,
                               Mat R, Mat t) {
    vector<float> d;
    vector<point2f> proj_p2d;
    projectPoints (p3d, rvec, tvec, K_, d, proj_p2d);

    double error = norm (Mat(p2d), Mat(proj_p2d), CV_L2);
    int num_points = p3d.size();
    return error_sum = sqrt(error*error/num_points);
}

void main () {
    // Retrieve by some way a grayscale image and the corresponding depth image
    Mat image = ...
    Mat depth = ...

    Ptr<featuredetector> detector = new SIFT();
    vector<keypoint> keypoints;
    detector->detect (image, keypoints);
    vector<point2f> points2d;
    for ( size_t i = 0; i < keypoints.size(); ++i ) {
        points2d.push_back (keypoints[i].pt);
    }

    vector<point3f> points3d;
    vector<keypoint> filtered_points2d;
    filterNaNKeyPoints (Mat depth, keypoints, filtered_points2d, points3d);

    Mat rvec = Mat::zeros(3, 1, CV_32F);
    Mat tvec = Mat::zeros(3, 1, CV_32F);    
    double error = meanReprojectionError (points3d, filtered_points2d, rvec, tvec);

    cout << "Reprojection error : " << error << endl;
}

As anyone seen this problem too ? Is this error coming from a "too approximated" factory registration between rgb and ir cameras ? Any idea to improve the reprojection error appart from calibrating the ir/rgb poses ? Is there an easy way to calibrate the ir/rgb pose in openCV ?

Anyone looked into xtion reprojection error ?

Hi there,

I am trying to use the Xtion depth sensor. However, when I compute the reprojection error, i.e. when I project the given 3D coordinates to the corresponding rgb image, without moving the camera (so rvec = [0, 0, 0] and tvec = [0, 0, 0]), I get reprojection errors as high as 6 pixels. I calibrated the rgb camera and the error dropped to 4 pixels. This is still big.

I use the openni drivers from the ros package openni_launch and retrieve the depth_registered 3D points. I use the factory registration.

The steps I use are the following : Extract keypoints from an image Filter out the keypoints which 3D coordinates contain NaN Compute the reprojection error between the 3D


#include "opencv2/core/core.hpp"
#include "opencv2/calib3d/calib3d.hpp"
#include "opencv2/features2d/features2d.hpp"
#include "opencv2/nonfree/nonfree.hpp"

using namespace std;
using namespace cv;

void filterNaNKeyPoints (Mat depth, vector<keypoint> kpts,
                         vector<keypoint> &filtered_kpts,
                         vector<point3f> &filtered_p3d) {
    filtered_kpts.clear();
    filtered_p3d.clear();
    for (size_t i=0; i<kpts.size(); ++i)="" {="" vec3f="" p3="depth.at&lt;Vec3f">(kpts[i].pt.y, kpts[i].pt.x); //* 1e3; // in meters, convert in milimeters
        if ( !isnan(p3[0]) && !isnan(p3[1]) && !isnan(p3[2]) ) {
            filtered_kpts.push_back (kpts[i]);
            filtered_p3d.push_back (p3); // looks like Vec3f ~= Point3f
        }
    }
}

double meanReprojectionError (vector<point3f> p3d, vector<point2f> p2d,
                               Mat R, Mat t) {
    vector<float> d;
    vector<point2f> proj_p2d;
    projectPoints (p3d, rvec, tvec, K_, d, proj_p2d);

    double error = norm (Mat(p2d), Mat(proj_p2d), CV_L2);
    int num_points = p3d.size();
    return sqrt(error*error/num_points);
}

void main () {
    // Retrieve by some way a grayscale image and the corresponding depth image
    Mat image = ...
    Mat depth = ...

    Ptr<featuredetector> detector = new SIFT();
    vector<keypoint> keypoints;
    detector->detect (image, keypoints);
    vector<point2f> points2d;
    for ( size_t i = 0; i < keypoints.size(); ++i ) {
        points2d.push_back (keypoints[i].pt);
    }

    vector<point3f> points3d;
    vector<keypoint> filtered_points2d;
    filterNaNKeyPoints (Mat depth, keypoints, filtered_points2d, points3d);

    Mat rvec = Mat::zeros(3, 1, CV_32F);
    Mat tvec = Mat::zeros(3, 1, CV_32F);    
    double error = meanReprojectionError (points3d, filtered_points2d, rvec, tvec);

    cout << "Reprojection error : " << error << endl;
}

As anyone seen this problem too ? Is this error coming from a "too approximated" factory registration between rgb and ir cameras ? Any idea to improve the reprojection error appart from calibrating the ir/rgb poses ? Is there an easy way to calibrate the ir/rgb pose in openCV ?