I am working on a video stabilization field. Now I calculate optical flow between 2 consecutive frames by using calcOpticalFlowPyrLK in opencv. After that, I use function estimateGlobalMotionRobust to get global motion parameters to warp the previous frame to new frame. But the result is not good. it is not stable as I desired. I read many documents for this stabilize purpose is need to remove jitter in global motion parameters. But I do not know how to... Some documents said that I should use Kalman filter, another use low pass filter, but I do not know to how to use them. Can you help me?
Here is my code:
VideoWriter writeOutputVideo ("out.avi",0,fps,cvSize(frameW,frameH),TRUE);
for(;;)
{ capture.read(colorImg);
cvtColor(colorImg,grayImg,CV_BGR2GRAY);
prevImg = currImg.clone();
currImg = grayImg.clone();
backupColorImg = colorImg.clone();
vector<Point2f>cornerPrev;
cornerPrev.reserve(maxCorners);
vector<Point2f>cornerCur;
cornerCur.reserve(maxCorners);
TermCriteria opticalFlowTermCriteria = TermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS,20,0.3);
goodFeaturesToTrack(prevImg,cornerPrev,maxCorners,qualityLevel,minDistance,Mat());
cornerSubPix(prevImg,cornerPrev,Size(winSize,winSize),Size(-1,-1),opticalFlowTermCriteria);
std::vector<uchar> features_found;
features_found.reserve(maxCorners);
std::vector<float> feature_errors;
feature_errors.reserve(maxCorners);
calcOpticalFlowPyrLK( prevImg, currImg, cornerPrev, cornerCur, features_found, feature_errors ,Size( winSize, winSize ), 5,opticalFlowTermCriteria, 0 );
//Mat transformMatrix = estimateGlobalMotionLeastSquares(cornerPrev,cornerCur,AFFINE,0);
//Mat transformMatrix = findHomography(cornerPrev,cornerCur,0,3.0,noArray());
Mat transformMatrix = estimateGlobalMotionRobust(cornerPrev,cornerCur,3,RansacParams::affine2dMotionStd(),0,0);
warpPerspective(backupColorImg,outImg,transformMatrix,Size(frameW,frameH),INTER_NEAREST|WARP_INVERSE_MAP,BORDER_CONSTANT ,0);
writeOutputVideo.write(outImg);
imshow("Input",colorImg);
imshow("Optical Flow",outImg);
}