Convert Pose from ARToolkit into OpenCV Screen Coordinates
I have modified the ARToolkit nftSimple Application to work with OpenCV Mat and without OpenGL dependencies. I have got the tracking to work successfully and i am able to extract the pose matrix ARPose.
Code for project :- https://github.com/njanirudh/OpenCV-ARTK
Output
The ARPose is of the format (16 x 1) :
_Documentation : // Position and orientation, column-major order. (position(x,y,z) = {T[12], T[13], T[14]}_
T = 0.833453 0.148213 0.532342 0.000000 -0.055585 0.980959 -0.186091 0.000000
-0.549787 0.125508 0.825822 0.000000 60.233962 -307.641967 -964.580499 1.000000
Problem
I am trying to use this to draw a pink circle on OpenCV side using the x,y,z values returned from the pose.
According to the documentation in the code
T[12] = 60.233962 , T[13] = -307.641967 , T[14] = -964.580499
The coordinates obtained from the pose matrix are not giving the correct value. How do we transform the pose matrix from ARToolkit into OpenCV Screen coordinates?
static void runDetection(Mat &input_mat)
{ ARUint8 *gARTImage = NULL; gARTImage = (uchar *)input_mat.data; float trackingTrans[3][4] = {0,0,0,0,0,0,0,0,0,0,0,0}; cv::Mat rot_vec = Mat::zeros(1,3,CV_64F), trn_vec = Mat::zeros(1,3,CV_64F) , rot_mat = Mat::zeros(3,3,CV_64F);
if(true){
// NFT results.
static int detectedPage = -2; // -2 Tracking not inited, -1 tracking inited OK, >= 0 tracking online on page.
int i, j, k;
// Grab a video frame.
if (gARTImage != NULL) {
gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.
// Run marker detection on frame
if (threadHandle) {
// Perform NFT tracking.
float err;
int ret;
int pageNo;
if( detectedPage == -2 ) {
trackingInitStart( threadHandle, gARTImage );
detectedPage = -1;
}
if( detectedPage == -1 ) {
ret = trackingInitGetResult( threadHandle, trackingTrans, &pageNo);
if( ret == 1 ) {
if (pageNo >= 0 && pageNo < surfaceSetCount) {
ARLOGd("Detected page %d.\n", pageNo);
detectedPage = pageNo;
ar2SetInitTrans(surfaceSet[detectedPage], trackingTrans);
} else {
ARLOGe("Detected bad page %d.\n", pageNo);
detectedPage = -2;
}
} else if( ret < 0 ) {
ARLOGd("No page detected.\n");
detectedPage = -2;
}
}
if( detectedPage >= 0 && detectedPage < surfaceSetCount) {
if( ar2Tracking(ar2Handle, surfaceSet[detectedPage], gARTImage, trackingTrans, &err) < 0 ) {
ARLOGd("Tracking lost.\n");
detectedPage = -2;
} else {
ARLOGd("Tracked page %d (max %d).\n", detectedPage, surfaceSetCount - 1);
}
}
} else {
ARLOGe("Error: threadHandle\n");
detectedPage = -2;
}
// Update markers.
for (i = 0; i < markersNFTCount; i++) {
markersNFT[i].validPrev = markersNFT[i].valid;
if (markersNFT[i].pageNo >= 0 && markersNFT[i].pageNo == detectedPage) {
markersNFT[i].valid = TRUE;
for (j = 0; j < 3; j++)
for (k = 0; k < 4; k++){
markersNFT[i].trans[j][k] = trackingTrans[j][k];
}
}
else markersNFT[i].valid = FALSE;
if (markersNFT[i].valid) {
// Filter the pose estimate.
if (markersNFT[i].ftmi) {
if (arFilterTransMat(markersNFT[i].ftmi, markersNFT[i].trans, !markersNFT[i].validPrev) < 0) {
ARLOGe("arFilterTransMat error with marker %d.\n", i);
}
}
if (!markersNFT[i].validPrev) {
// Marker has become visible, tell any dependent objects.
// --->
}
// We have a new pose, so set that.
arglCameraViewRH((const ARdouble (*)[4])markersNFT[i].trans, markersNFT[i].pose.T, VIEW_SCALEFACTOR ...