Hello, I have got depth image And I need to get Camera pos matrix, I suppose my 2D and 3D points are wrong
void getWorldAndImageCoordinates(cv::Mat& image, vector<cv::Point3d>& world_coords,
vector<cv::Point2d>& pixel_coords, cv::Mat& IntrinsicMatrix)
{ float FocalInvertedX = 1/IntrinsicMatrix.at<float>(0,0); // 1/fx float FocalInvertedY = 1/IntrinsicMatrix.at<float>(1,1); // 1/fy
cv::Point3d newPoint3D;
cv::Point2d newPoint2D;
for (int i=0;i<image.rows;i++)
{
for (int j=0;j<image.cols;j++)
{
float depthValue = image.at<float>(i,j);
// if depthValue is not NaN
if ((depthValue == depthValue) && (depthValue!=0) && (depthValue!=1))
{
// Find 3D position:
newPoint3D.z = depthValue;
newPoint3D.x = (j - IntrinsicMatrix.at<float>(0,2)) * newPoint3D.z * FocalInvertedX;
newPoint3D.y = (i - IntrinsicMatrix.at<float>(1,2)) * newPoint3D.z * FocalInvertedY;
world_coords.push_back(newPoint3D);
newPoint2D.x = j - IntrinsicMatrix.at<float>(0,2);
newPoint2D.y = i - IntrinsicMatrix.at<float>(1,2);
pixel_coords.push_back(newPoint2D);
}
}
}
}
cv::Mat depthImage = cv::imread(name, CV_LOAD_IMAGE_ANYDEPTH | CV_LOAD_IMAGE_ANYCOLOR);
vector<cv::Point3d> world_coords;
vector<cv::Point2d> pixel_coords;
getWorldAndImageCoordinates(depthImage, world_coords, pixel_coords, IntrinsicMatrix);
cv::Mat distCoeffs = (cv::Mat_<double>(1,5) << 0, 0, 0, 0, 0);
cv::Mat rotation_vector, translation_vector;
solvePnP(world_coords, pixel_coords, IntrinsicMatrix, distCoeffs, rotation_vector, translation_vector, false, 0 );
cv::Mat Rt;
cv::Rodrigues(rotation_vector, Rt);
cv::Mat R;
cv::transpose(Rt, R);
cv::Mat coords = -R * translation_vector;