I 'm researching about Video Stabilization field. I implement a application using OpenCV.
My progress such as:
Surf points extraction
Matching
estimateRigidTransform
warpAffine
But the result video is not be stable. Can anyone help me this problem or provide me some source code link to improve?
Here is my code:
VideoCapture cap ("test.avi");
Mat currImg, colorImg, outImg, grayImg, backupColorImg;
int winSize = 20;
int maxCorners = 200;
double qualityLevel = 0.01;
double minDistance = 5.0;
int blockSize = 3;
int frameW, frameH;
cap.read(colorImg);
cvtColor(colorImg,grayImg,CV_BGR2GRAY);
currImg = grayImg.clone();
outImg = colorImg.clone();
int fps = 25;
frameW = grayImg.cols;
frameH = grayImg.rows;
Mat ref;
cap.read(ref);
cvtColor(ref,ref,CV_BGR2GRAY);
SurfDescriptorExtractor extractor;
int minHessian = 400;
vector<KeyPoint> keyPointRef;
SurfFeatureDetector detector(minHessian);
Mat descriptorRef;
detector.detect(ref(Range(0,ref.rows), Range::all()), keyPointRef);
extractor.compute(ref, keyPointRef, descriptorRef);
VideoWriter writeVideo ("result.avi",0,fps,cvSize(frameW,frameH),false);
namedWindow("Stabilize", 0);
namedWindow("GoodMatches", 0);
while(1)
{
bool bScuccess = cap.read(colorImg);
if (!bScuccess)
{
cout<< "Cannot read the frame form video file";
break;
}
cvtColor(colorImg,grayImg,CV_BGR2GRAY);
currImg = grayImg.clone();
backupColorImg = colorImg.clone();
vector<KeyPoint> keyPointCurr;
Mat descriptorCurr;
detector.detect(currImg,keyPointCurr);
extractor.compute(currImg,keyPointCurr,descriptorCurr);
FlannBasedMatcher matcher;
vector<DMatch> matches;
matcher.match(descriptorCurr,descriptorRef,matches);
double maxDist = 0, minDist = 100;
for(int i = 0; i < descriptorCurr.rows; i++)
{
double dist = matches[i].distance;
if(dist < minDist) minDist = dist;
if(dist > maxDist) maxDist = dist;
}
vector<DMatch>good_matches;
for (int i = 0; i < descriptorCurr.rows; i++)
{
//if (matches[i].distance <= max(2*minDist, 0.02))
if(matches[i].distance < 0.2)
{
good_matches.push_back(matches[i]);
}
}
vector<Point2f>curPoint;
vector<Point2f>refPoint;
for (int i = 0; i < good_matches.size(); i++)
{
curPoint.push_back(keyPointCurr[good_matches[i].queryIdx].pt);
refPoint.push_back(keyPointRef[good_matches[i].trainIdx].pt);
}
Mat imgMatches;
drawMatches(currImg,keyPointCurr,ref,keyPointRef,good_matches,imgMatches,Scalar::all(-1),Scalar::all(-1),vector<char>(),DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
imshow("GoodMatches",imgMatches);
//Mat transformMatrix = estimateGlobalMotionRobust(curPoint,refPoint, 3);
Mat transformMatrix = estimateRigidTransform(curPoint,refPoint ,false); // false = rigid transform, no scaling/shearing
cout << transformMatrix << endl;
warpAffine(colorImg,outImg,transformMatrix,Size(frameW,frameH));
//warpPerspective(colorImg,outImg,transformMatrix,Size(frameW,frameH));
writeVideo.write(outImg);
imshow("Input",colorImg);
imshow("Stabilize",outImg);
if(waitKey(20) == 27)
{
cout<<"ESC key is pressed by user" <<endl;
break;
}