Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

c++ - Video stabilization pipeline . I certainly missed something

I'm trying to implement a video stabilization. I'm a beginner in this field, but after reviewing lots of post, i chose to do these steps:

Each time, i have two frames: one current and another one, separeted by one frame. Frame 1 and 3, 3 and 5 etc.

  • Compute goodFeaturesToTrack()
  • Compute Optical Flow using calcOpticalFlowPyrLK()
  • Keep only good points
  • Compute the homography using findHomography()
  • Wrap the frame using wrapPerspective()

I certainly missed something because my video is still not stable. I searched on the internet but didn't find a solution. I also saw different posts of stackOverflow like video stabilization using opencv or Video Stabilization with OpenCV

Here my code:

//Read the video
VideoCapture cap(path1);
Mat currImg, colorImg, outImg, grayImg, backupColorImg;


cap.read(colorImg);
VideoUtil::geometricalCrop(colorImg,70,0);//Crop the picture
cvtColor(colorImg,grayImg,CV_BGR2GRAY);
currImg = grayImg.clone();// Current picture


Mat refFrame;
cap.read(refFrame);//Frame +1
VideoUtil::geometricalCrop(refFrame,70,0);
cvtColor(refFrame,refFrame,CV_BGR2GRAY); // Frame +1


namedWindow("Stabilize");
namedWindow("GoodMatches");
Mat temp;
Mat currentFrame=refFrame;
for (;;){
    int nbreCurrentFrame=cap.get(CV_CAP_PROP_POS_FRAMES);//Get the number of current frame
    cap.read(colorImg);

    VideoUtil::geometricalCrop(colorImg,70,0);// Crop the video
    Debug::trace("Current frame: " + to_string(nbreCurrentFrame));
    currentFrame.copyTo(refFrame);//Get the reference frame


    cap.read(colorImg);
    VideoUtil::geometricalCrop(colorImg,70,0);
    cvtColor(colorImg,grayImg,CV_BGR2GRAY);
    currentFrame =  grayImg.clone();//Get the current frame


    vector<Point2f> cornersPrevious;//Stock features of reference Frame
    cornersPrevious.reserve(400);

    vector<Point2f> cornersCurr;//Stock features of current frame
    cornersCurr.reserve(400);

    goodFeaturesToTrack(refFrame,cornersPrevious,400,0.01,5.0);
    Debug::trace("Size of feature track : " + to_string(cornersPrevious.size()));


    vector<uchar> featureFound; // status of tracked features
    featureFound.reserve(400);

    vector<float> featureErrors; // error in tracking
    featureErrors.reserve(400);


    calcOpticalFlowPyrLK(refFrame,currentFrame,cornersPrevious,cornersCurr,featureFound,featureErrors,Size(20,20),3,
                         cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.3),0,0.0001);



    // keep the good points
    std::vector<cv::Point2f> initialPoints;
    std::vector<cv::Point2f> trackedPoints;
    for (int i=0;i<cornersCurr.size();i++){
        double motion = sqrt(pow(cornersPrevious.at(i).x-cornersCurr.at(i).x,2)+pow(cornersPrevious.at(i).y-cornersCurr.at(i).y,2));
        std::cout << "Motion: " << motion << std::endl;
        if (featureFound[i] && motion < 20 ){
            //Keep this point in vector
            initialPoints.push_back(cornersPrevious.at(i));
            trackedPoints.push_back(cornersCurr.at(i));
        }
    }


    // draw the tracking effect
    cv::Mat finalImage;
    currentFrame.copyTo(finalImage);

    // for all tracked points
    for(uint i= 0; i < initialPoints.size(); i++ ) {
        // draw line and circle
        cv::line(finalImage,
                 initialPoints[i], // initial position
                 trackedPoints[i], // new position
                 cv::Scalar(255,255,255));
        cv::circle(finalImage, trackedPoints[i], 3, cv::Scalar(0,0,255),-1);
    }

    //Compute the homography
    if (initialPoints.size() >4 && trackedPoints.size()>4){
        cv::Mat mask;
        cv::Mat transformMatrix = findHomography(initialPoints,trackedPoints,CV_RANSAC,3);


        warpPerspective(refFrame,outImg,transformMatrix,refFrame.size(), INTER_LINEAR |WARP_INVERSE_MAP,BORDER_CONSTANT ,0);
        namedWindow("Stabilized");
       imshow("stabilizedVideo",outImg);
    }


    namedWindow("Tracking features");
    imshow("tracking",finalImage);




    if(waitKey(27) >= 0) break;

}

Thank

c++ - Video stabilization pipeline . I certainly missed something

I'm trying to implement a video stabilization. I'm a beginner in this field, but after reviewing lots of post, i chose to do these steps:

Each time, i have two frames: one current and another one, separeted by one frame. Frame 1 and 3, 3 and 5 etc.

  • Compute goodFeaturesToTrack()
  • Compute Optical Flow using calcOpticalFlowPyrLK()
  • Keep only good points
  • Compute the homography using findHomography()
  • Wrap the frame using wrapPerspective()

I certainly missed something because my video is still not stable. I searched on the internet but didn't find a solution. I also saw different posts of stackOverflow like video stabilization using opencv or Video Stabilization with OpenCV

Here my code:

//Read the video
VideoCapture cap(path1);
Mat currImg, colorImg, outImg, grayImg, backupColorImg;


cap.read(colorImg);
VideoUtil::geometricalCrop(colorImg,70,0);//Crop the picture
cvtColor(colorImg,grayImg,CV_BGR2GRAY);
currImg = grayImg.clone();// Current picture


Mat refFrame;
cap.read(refFrame);//Frame +1
VideoUtil::geometricalCrop(refFrame,70,0);
cvtColor(refFrame,refFrame,CV_BGR2GRAY); // Frame +1


namedWindow("Stabilize");
namedWindow("GoodMatches");
Mat temp;
Mat currentFrame=refFrame;
for (;;){
    int nbreCurrentFrame=cap.get(CV_CAP_PROP_POS_FRAMES);//Get the number of current frame
    cap.read(colorImg);

    VideoUtil::geometricalCrop(colorImg,70,0);// Crop the video
    Debug::trace("Current frame: " + to_string(nbreCurrentFrame));
    currentFrame.copyTo(refFrame);//Get the reference frame


    cap.read(colorImg);
    VideoUtil::geometricalCrop(colorImg,70,0);
    cvtColor(colorImg,grayImg,CV_BGR2GRAY);
    currentFrame =  grayImg.clone();//Get the current frame


    vector<Point2f> cornersPrevious;//Stock features of reference Frame
    cornersPrevious.reserve(400);

    vector<Point2f> cornersCurr;//Stock features of current frame
    cornersCurr.reserve(400);

    goodFeaturesToTrack(refFrame,cornersPrevious,400,0.01,5.0);
    Debug::trace("Size of feature track : " + to_string(cornersPrevious.size()));


    vector<uchar> featureFound; // status of tracked features
    featureFound.reserve(400);

    vector<float> featureErrors; // error in tracking
    featureErrors.reserve(400);


    calcOpticalFlowPyrLK(refFrame,currentFrame,cornersPrevious,cornersCurr,featureFound,featureErrors,Size(20,20),3,
                         cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.3),0,0.0001);



    // keep the good points
    std::vector<cv::Point2f> initialPoints;
    std::vector<cv::Point2f> trackedPoints;
    for (int i=0;i<cornersCurr.size();i++){
        double motion = sqrt(pow(cornersPrevious.at(i).x-cornersCurr.at(i).x,2)+pow(cornersPrevious.at(i).y-cornersCurr.at(i).y,2));
        std::cout << "Motion: " << motion << std::endl;
        if (featureFound[i] && motion < 20 ){
            //Keep this point in vector
            initialPoints.push_back(cornersPrevious.at(i));
            trackedPoints.push_back(cornersCurr.at(i));
        }
    }


    // draw the tracking effect
    cv::Mat finalImage;
    currentFrame.copyTo(finalImage);

    // for all tracked points
    for(uint i= 0; i < initialPoints.size(); i++ ) {
        // draw line and circle
        cv::line(finalImage,
                 initialPoints[i], // initial position
                 trackedPoints[i], // new position
                 cv::Scalar(255,255,255));
        cv::circle(finalImage, trackedPoints[i], 3, cv::Scalar(0,0,255),-1);
    }

    //Compute the homography
    if (initialPoints.size() >4 && trackedPoints.size()>4){
        cv::Mat mask;
        cv::Mat transformMatrix = findHomography(initialPoints,trackedPoints,CV_RANSAC,3);


        warpPerspective(refFrame,outImg,transformMatrix,refFrame.size(), INTER_LINEAR |WARP_INVERSE_MAP,BORDER_CONSTANT ,0);
        namedWindow("Stabilized");
       imshow("stabilizedVideo",outImg);
    }


    namedWindow("Tracking features");
    imshow("tracking",finalImage);




    if(waitKey(27) >= 0) break;

}

Thank