Hello, I'm trying to get a head pose in real time using OpenCV on android according to this tutorial: http://www.learnopencv.com/head-pose-estimation-using-opencv-and-dlib/
This is the source code of the program he used in the video: https://github.com/spmallick/dlib/blob/master/examples/webcam_head_pose.cpp
These are my parameters initialization:
public MatOfPoint3f get_3d_model_points()
{
List<Point3> objectPointsList = new ArrayList<Point3>(6);
objectPointsList.add(new Point3(0.0f, 0.0f, 0.0f));
objectPointsList.add(new Point3(0.0f, -330.0f, -65.0f));
objectPointsList.add(new Point3(-225.0f, 170.0f, -135.0f));
objectPointsList.add(new Point3(225.0f, 170.0f, -135.0f));
objectPointsList.add(new Point3(-150.0f, -150.0f, -125.0f));
objectPointsList.add(new Point3(150.0f, -150.0f, -125.0f));
MatOfPoint3f modelPoints = new MatOfPoint3f();
modelPoints.fromList(objectPointsList);
return modelPoints;
}
public MatOfPoint2f get_2d_image_points(ArrayList<Point> d)
{
List<org.opencv.core.Point> imagePointsList = new ArrayList<org.opencv.core.Point>(6);
imagePointsList.add(new org.opencv.core.Point(d.get(30).x, d.get(30).y));
imagePointsList.add(new org.opencv.core.Point(d.get(8).x, d.get(8).y));
imagePointsList.add(new org.opencv.core.Point(d.get(36).x, d.get(36).y));
imagePointsList.add(new org.opencv.core.Point(d.get(45).x, d.get(45).y));
imagePointsList.add(new org.opencv.core.Point(d.get(48).x, d.get(48).y));
imagePointsList.add(new org.opencv.core.Point(d.get(54).x, d.get(54).y));
MatOfPoint2f modelPoints = new MatOfPoint2f();
modelPoints.fromList(imagePointsList);
return modelPoints;
}
Mat get_camera_matrix(float focal_length, Point center)
{
Mat camera_matrix = Mat.eye(3, 3, CvType.CV_32F);
camera_matrix.put(0,0,focal_length);
camera_matrix.put(0,1,0.0);
camera_matrix.put(0,2,center.x);
camera_matrix.put(1,0,0.0);
camera_matrix.put(1,1,focal_length);
camera_matrix.put(1,2,center.y);
camera_matrix.put(2,0,0.0);
camera_matrix.put(2,1,0.0);
camera_matrix.put(2,2,1.0);
return camera_matrix;
}
This is my code:
MatOfPoint3f model_points = get_3d_model_points();
MatOfPoint2f image_points = get_2d_image_points(landmarks);
int focal_length = mCroppedBitmap.getHeight();
Mat camera_matrix = get_camera_matrix(focal_length, new Point(mCroppedBitmap.getWidth()/2, mCroppedBitmap.getHeight()/2));
MatOfDouble dist_coeffs = new MatOfDouble(Mat.zeros(4,1,CvType.CV_64FC1));
Mat rvec = new Mat();
Mat tvec = new Mat();
Calib3d.solvePnP(model_points, image_points, camera_matrix, new MatOfDouble(), rvec, tvec);
List<Point3> objectPointsList = new ArrayList<Point3>(1);
objectPointsList.add(new Point3(0,0,1000.0));
MatOfPoint3f nose = new MatOfPoint3f();
nose.fromList(objectPointsList);
MatOfPoint2f nose2 = new MatOfPoint2f();
Calib3d.projectPoints(nose,rvec,tvec,camera_matrix,new MatOfDouble(),nose2);
This is the error i get:
OpenCV Error: Assertion failed (_tvec.total() * _tvec.channels() == 3 && (_tvec.depth() == CV_32F || _tvec.depth() == CV_64F)) in void cv::fisheye::projectPoints(cv::InputArray, cv::OutputArray, cv::InputArray, cv::InputArray, cv::InputArray, cv::InputArray, double, cv::OutputArray), file /home/maksim/workspace/android-pack/opencv/modules/calib3d/src/fisheye.cpp, line 77
11-02 22:50:28.403 26972-27078/com.tzutalin.dlibtest E/org.opencv.calib3d: calib3d::projectPoints_11() caught cv::Exception: /home/maksim/workspace/android-pack/opencv/modules/calib3d/src/fisheye.cpp:77: error: (-215) _tvec.total() * _tvec.channels() == 3 && (_tvec.depth() == CV_32F || _tvec.depth() == CV_64F) in function void cv::fisheye::projectPoints(cv::InputArray, cv::OutputArray, cv::InputArray, cv::InputArray, cv::InputArray, cv::InputArray, double, cv::OutputArray)
Thanks.