Projecting points using OpenCV
I'm trying to:
do ray-plane intersection against several points that are in world coordinates, then I get those points that are intersected then I try to project these points from that world coordinates into image coordinates, but I get points in the range 0.4, 0.1, 0.5..etc.
Here is what I'm doing, hope that you spot an error
Mat cameraIntrinsics( 3, 3, CV_32F );
cameraIntrinsics.at<float>( 0, 0 ) = 1.6003814935684204;
cameraIntrinsics.at<float>( 0, 1 ) = 0;
cameraIntrinsics.at<float>( 0, 2 ) = -0.0021958351135253906;
cameraIntrinsics.at<float>( 1, 0 ) = 0;
cameraIntrinsics.at<float>( 1, 1 ) = 1.6003814935684204;
cameraIntrinsics.at<float>( 1, 2 ) = -0.0044271680526435375;
cameraIntrinsics.at<float>( 2, 0 ) = 0;
cameraIntrinsics.at<float>( 2, 1 ) = 0;
cameraIntrinsics.at<float>( 2, 2 ) = 1;
Mat invCameraIntrinsics = cameraIntrinsics.inv();
std::vector<cv::Point3f> points3D;
std::vector<Ray> rays;
for ( int i = 0; i < corners.size(); i++ )
{
cv::Point3f pt;
pt.z = -1.0f;
pt.x = corners[i].x;
pt.y = corners[i].y;
points3D.push_back( pt );
Ray ray;
ray.origin = Vec3f( 0, 0, 0);
ray.direction = Vec3f( pt.x, pt.y, pt.z );
rays.push_back( ray );
}
std::vector<cv::Point3f> pointsTransformed3D;
cv::transform( points3D, pointsTransformed3D, invCameraIntrinsics );
std::vector<cv::Vec3f> contacts;
for ( int i = 0; i < pointsTransformed3D.size(); i++ )
{
Vec3f pt( pointsTransformed3D[i].x, pointsTransformed3D[i].y, pointsTransformed3D[i].z );
cv::Vec3f contact;
std::pair<bool, double> test = linePlaneIntersection( contact, rays[i].direction, rays[i].origin, Vec3f( 0, 1, 0 ), pt );
if (test.first == true )
{
cv::Vec3f contact( rays[i].origin + ( rays[i].direction) * test.second);
contacts.push_back( contact );
}
}
Mat rotationMatrix( 3, 3, CV_32F );
rotationMatrix.at<float>( 0, 0 ) = 0.9115078799790896;
rotationMatrix.at<float>( 0, 1 ) = -0.1883612409043686;
rotationMatrix.at<float>( 0, 2 ) = -0.3656137684237178;
rotationMatrix.at<float>( 1, 0 ) = -0.3046835686704949;
rotationMatrix.at<float>( 1, 1 ) = 0.2878667580409447;
rotationMatrix.at<float>( 1, 2 ) = -0.9079100465339108;
rotationMatrix.at<float>( 2, 0 ) = 0.2762631132059388;
rotationMatrix.at<float>( 2, 1 ) = 0.9389636694462479;
rotationMatrix.at<float>( 2, 2 ) = 0.2050022432604093;
cv::Mat rVec( 3, 1, CV_32F ); // Rotation vector
Rodrigues( rotationMatrix, rVec );
double norm = cv::norm( rVec );
float theta = (float)(sqrt( rVec.at<float>(0)*rVec.at<float>( 0 ) + rVec.at<float>( 1 )*rVec.at<float>( 1 ) + rVec.at<float>( 2 )*rVec.at<float>( 2 ) ) * 180 / 3.14567898726);
cv::Mat tVec( 3, 1, CV_32F ); // Translation vector
tVec.at<float>( 0 ) = 21.408294677734375;
tVec.at<float>( 1 ) = 531.1319580078125;
tVec.at<float>( 2 ) = 705.74224853515625;
cv::Mat distCoeffs( 5, 1, CV_32F ); // Distortion vector
distCoeffs.at<float>( 0 ) = 0;
distCoeffs.at<float>( 1 ) = 0;
distCoeffs.at<float>( 2 ) = 0;
distCoeffs.at<float>( 3 ) = 0;
distCoeffs.at<float>( 4 ) = 0;
std::vector<cv::Point2d> projectedPoints;
std::vector < cv::Point3d> ContactPoints;
for ( int i = 0; i < contacts.size(); i++ )
{
cv::Point3d pt;
pt.x = contacts[i][0];
pt.y = contacts[i][1];
pt.z = contacts[i][2];
ContactPoints.push_back( pt );
}
cv::projectPoints( ContactPoints, rVec, tVec, cameraIntrinsics, distCoeffs, projectedPoints );
for ( size_t i = 0; i < projectedPoints.size(); i++ )
{
cv::Point2d pt;
pt.x = projectedPoints[i].x;
pt.y = projectedPoints[i].y;
cv::circle( src, pt, 10, cv::Scalar( 255, 0, 255 ), -1 );
}
imshow( "My window", src );
}
}
cv:waitKey( 0 );
return 0;
}
Got some sample input that we can run through?
@Tetragrammhttps://pastebin.com/9MKx0uCT here is full code, and use that image https://imgur.com/a/eFzbM