Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

Wrong measure on Zaxis on Translation Matrix of SolvePnP

Hi, I am trying to get both rotation and translation matrix of a red square with webcam using SolvePnP. All the values are correct with my measures (with +-1mm of error) except the translation value of Z axis that gives me exacly half of the suppose value.

Wrong measure on Zaxis on Translation Matrix of SolvePnP

Hi, I am trying to get both rotation and translation matrix of a red square with webcam using SolvePnP. All the values are correct with my measures (with +-1mm of error) except the translation value of Z axis that gives me exacly half of the suppose value.

EDIT:

This is the code that give me the pose relatively to the center of the camera

void getPose(std::vector<std::vector<cv::point> > contours) { ///Variables cv::Mat rvec; cv::Mat tvec; std::vector<cv::point2f> image_points; std::vector<cv::point3f> object_points; std::vector<cv::point> contours_center; std::vector<cv::point2f> contoursf;

for( int i = 0; i < 4; i++ )
    {
         contours_center.push_back(cv::Point(centercameraX-contours[0][i].x,centercameraY-contours[0][i].y));
    }

std::cout << contours_center << std::endl;

cv::Mat(contours_center).copyTo(contoursf);

///Model points in 3D
object_points.push_back(cv::Point3f(25, 25,0));
object_points.push_back(cv::Point3f(25, -25,0));
object_points.push_back(cv::Point3f(-25, -25,0));
object_points.push_back(cv::Point3f(-25, 25,0));

///Image points in the 2D image
image_points.push_back(cv::Point2f(contoursf[0]));
image_points.push_back(cv::Point2f(contoursf[1]));
image_points.push_back(cv::Point2f(contoursf[2]));
image_points.push_back(cv::Point2f(contoursf[3]));

//Camera matrix obtained from camera calibration
cv::Mat cam_matrix = cv::Mat(3,3,CV_32FC1,cv::Scalar::all(0));
cam_matrix.at<float>(0,0) = 338.13;
cam_matrix.at<float>(1,1) = 320.75;
cam_matrix.at<float>(2,0) = 162.73;
cam_matrix.at<float>(2,1) = 96.65;
cam_matrix.at<float>(2,2) = 1;

//Distortion matrix obtained from camera calibration
cv::Mat dist_matrix = cv::Mat(1,5,CV_32FC1,cv::Scalar::all(0));
dist_matrix.at<float>(0,0) = 0.0192;
dist_matrix.at<float>(0,1) = -0.102;
dist_matrix.at<float>(0,2) = -0.026;
dist_matrix.at<float>(0,3) = -0.0026;
dist_matrix.at<float>(0,4) = 0;

///Get rotation and translation matrix
cv::solvePnP(cv::Mat(object_points),cv::Mat(image_points),cam_matrix,dist_matrix,rvec,tvec,false,cv::ITERATIVE);
std::cout << "Rotaion: " << rvec << std::endl << std::endl;
std::cout << "Translation: " << tvec << std::endl << std::endl << std::endl;

}

Result: link text Z distance in the translation axis is 158mm when it should be 320mm, the rest of values are correct

click to hide/show revision 3
No.3 Revision

updated 2016-08-08 03:25:07 -0600

berak gravatar image

Wrong measure on Zaxis on Translation Matrix of SolvePnP

Hi, I am trying to get both rotation and translation matrix of a red square with webcam using SolvePnP. All the values are correct with my measures (with +-1mm of error) except the translation value of Z axis that gives me exacly half of the suppose value.

EDIT:

This is the code that give me the pose relatively to the center of the camera

void getPose(std::vector<std::vector<cv::point> getPose(std::vector<std::vector<cv::Point> > contours)
{
///Variables
cv::Mat rvec;
cv::Mat tvec;
std::vector<cv::point2f> std::vector<cv::Point2f> image_points;
std::vector<cv::point3f> std::vector<cv::Point3f> object_points;
std::vector<cv::point> std::vector<cv::Point> contours_center;
std::vector<cv::point2f> contoursf;

std::vector<cv::Point2f> contoursf;
for( int i = 0; i < 4; i++ )
 {
 contours_center.push_back(cv::Point(centercameraX-contours[0][i].x,centercameraY-contours[0][i].y));
 }
 std::cout << contours_center << std::endl;
 cv::Mat(contours_center).copyTo(contoursf);
 ///Model points in 3D
 object_points.push_back(cv::Point3f(25, 25,0));
 object_points.push_back(cv::Point3f(25, -25,0));
 object_points.push_back(cv::Point3f(-25, -25,0));
 object_points.push_back(cv::Point3f(-25, 25,0));
 ///Image points in the 2D image
 image_points.push_back(cv::Point2f(contoursf[0]));
 image_points.push_back(cv::Point2f(contoursf[1]));
 image_points.push_back(cv::Point2f(contoursf[2]));
 image_points.push_back(cv::Point2f(contoursf[3]));
 //Camera matrix obtained from camera calibration
 cv::Mat cam_matrix = cv::Mat(3,3,CV_32FC1,cv::Scalar::all(0));
 cam_matrix.at<float>(0,0) = 338.13;
 cam_matrix.at<float>(1,1) = 320.75;
 cam_matrix.at<float>(2,0) = 162.73;
 cam_matrix.at<float>(2,1) = 96.65;
 cam_matrix.at<float>(2,2) = 1;
 //Distortion matrix obtained from camera calibration
 cv::Mat dist_matrix = cv::Mat(1,5,CV_32FC1,cv::Scalar::all(0));
 dist_matrix.at<float>(0,0) = 0.0192;
 dist_matrix.at<float>(0,1) = -0.102;
 dist_matrix.at<float>(0,2) = -0.026;
 dist_matrix.at<float>(0,3) = -0.0026;
 dist_matrix.at<float>(0,4) = 0;
 ///Get rotation and translation matrix
 cv::solvePnP(cv::Mat(object_points),cv::Mat(image_points),cam_matrix,dist_matrix,rvec,tvec,false,cv::ITERATIVE);
 std::cout << "Rotaion: " << rvec << std::endl << std::endl;
 std::cout << "Translation: " << tvec << std::endl << std::endl << std::endl;
}

}

Result: link textlink text Z distance in the translation axis is 158mm when it should be 320mm, the rest of values are correct

Wrong measure on Zaxis on Translation Matrix of SolvePnP

Hi, I am trying to get both rotation and translation matrix of a red square with webcam using SolvePnP. All the values are correct with my measures (with +-1mm of error) except the translation value of Z axis that gives me exacly half of the suppose value.

EDIT:

This is the code that give me the pose relatively to the center of the camera

void getPose(std::vector<std::vector<cv::Point> > contours)
{
    ///Variables
    cv::Mat rvec;
    cv::Mat tvec;
    std::vector<cv::Point2f> image_points;
    std::vector<cv::Point3f> object_points;
    std::vector<cv::Point> contours_center;
    std::vector<cv::Point2f> contoursf;

    for( int i = 0; i < 4; i++ )
        {
             contours_center.push_back(cv::Point(centercameraX-contours[0][i].x,centercameraY-contours[0][i].y));
        }

    std::cout << contours_center << std::endl;

    cv::Mat(contours_center).copyTo(contoursf);

    ///Model points in 3D
    object_points.push_back(cv::Point3f(25, 25,0));
    object_points.push_back(cv::Point3f(25, -25,0));
    object_points.push_back(cv::Point3f(-25, -25,0));
    object_points.push_back(cv::Point3f(-25, 25,0));

    ///Image points in the 2D image
    image_points.push_back(cv::Point2f(contoursf[0]));
    image_points.push_back(cv::Point2f(contoursf[1]));
    image_points.push_back(cv::Point2f(contoursf[2]));
    image_points.push_back(cv::Point2f(contoursf[3]));

    //Camera matrix obtained from camera calibration
    cv::Mat cam_matrix = cv::Mat(3,3,CV_32FC1,cv::Scalar::all(0));
    cam_matrix.at<float>(0,0) = 338.13;
    cam_matrix.at<float>(1,1) = 320.75;
    cam_matrix.at<float>(2,0) = 162.73;
    cam_matrix.at<float>(2,1) = 96.65;
    cam_matrix.at<float>(2,2) = 1;

    //Distortion matrix obtained from camera calibration
    cv::Mat dist_matrix = cv::Mat(1,5,CV_32FC1,cv::Scalar::all(0));
    dist_matrix.at<float>(0,0) = 0.0192;
    dist_matrix.at<float>(0,1) = -0.102;
    dist_matrix.at<float>(0,2) = -0.026;
    dist_matrix.at<float>(0,3) = -0.0026;
    dist_matrix.at<float>(0,4) = 0;

    ///Get rotation and translation matrix
    cv::solvePnP(cv::Mat(object_points),cv::Mat(image_points),cam_matrix,dist_matrix,rvec,tvec,false,cv::ITERATIVE);
    std::cout << "Rotaion: " << rvec << std::endl << std::endl;
    std::cout << "Translation: " << tvec << std::endl << std::endl << std::endl;
}

Result: link text Z distance in the translation axis is 158mm when it should be 320mm, the rest of values are correct

The green circle is the center of the image