Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

bad results with findCirclesGrid

Hello everybody,

I'm trying to write a program for a camera-calibration using the 4x11 asymmetric circle grid. I use a camera with 3840x2750 pixels. Although the pattern is recognized by the program most of the time, I'm not satisfied with the result of the calibration. Very often the undistorted picture seems to be more distorted than the original one. Nevertheless, Accuracy reaches values between 1 and 5 pixels. Also I'm interested in what is the best flag-setting for the calibrateCamera() command I added a simplified version of my code (especially the "while Camera is grabbing" part, but this is not the problem).

I'm grateful for every hint. Kind regards!

int main(int argv, char** argc)
{

    vector<Mat> savedImages;

    vector<vector<Point2f>> markerCorners, rejectedCandidates;

    patternDimensions = Size(4, 11);
    calibrationSquareDimension = 0.0177f;

    vector<Vec2f> foundPoints;

    // while Camera is grabbing
    {
        switch (character)
        {
        case ' ':                                           
        {
            Mat temp;
            frame.copyTo(temp);

            savedImages.push_back(temp);
            cout << savedImages.size() << ". Bild gespeichert" << endl;
        }
        break;

        case 13:                
            if (savedImages.size() > 9)
            {
                string SaveFileName;
                cameraCalibration(savedImages, CalibrationType, patternDimensions, calibrationSquareDimension, cameraMatrix, distanceCoefficients);
                cin >> SaveFileName;
                saveCameraCalibration(SaveFileName, cameraMatrix, distanceCoefficients);    
                mode = 0;
                return;
            }
            break;

        case 27:                

            return;
            break;

        }
    }

}

void cameraCalibration(vector<Mat> CalibrationImages, int CalibrationMode, Size boardSize, float squareEdgeLength, Mat& cameraMatrix, Mat& distanceCoefficients)
{
    vector<vector<Point2f>> checkerboardImageSpacePoints;
    getPatternCorners(CalibrationImages, CalibrationMode, checkerboardImageSpacePoints, PostFoundPattern);

    vector<vector<Point3f>> worldSpaceCornerPoints(1);

    createKnownBoardPositions(boardSize, CalibrationMode, squareEdgeLength, worldSpaceCornerPoints[0]);
    worldSpaceCornerPoints.resize(checkerboardImageSpacePoints.size(), worldSpaceCornerPoints[0]);

    vector<Mat> rVectors, tVectors;
    distanceCoefficients = Mat::zeros(8, 1, CV_64F);

    double AccuracyOfCalibration = calibrateCamera(worldSpaceCornerPoints, checkerboardImageSpacePoints, boardSize, cameraMatrix, distanceCoefficients, rVectors, tVectors);
}

void getPatternCorners(vector<Mat> images, int CalibrationMode, vector<vector<Point2f>>& allFoundCorners, bool showResults = false)
{
    SimpleBlobDetector::Params parameters;
    parameters.maxArea = 100000;
    parameters.minArea = 500;
    parameters.filterByColor = false;
    parameters.minDistBetweenBlobs = 25;
    Ptr<FeatureDetector> blobDetector = SimpleBlobDetector::create(parameters);

    for (vector<Mat>::iterator iter = images.begin(); iter != images.end(); iter++)
    {
        bool found = false;
        vector<Point2f> pointBuf;

        found = findCirclesGrid(*iter, patternDimensions, pointBuf, CALIB_CB_ASYMMETRIC_GRID | CALIB_CB_CLUSTERING, blobDetector);

        if (found)
        {
            allFoundCorners.push_back(pointBuf);
        }
    }
}

void createKnownBoardPositions(Size boardSize, int CalibrationMode, float squareEdgeLength, vector<Point3f>& corners)
{
        for (int i = 0; i < boardSize.height; i++)
        {
            for (int j = 0; j < boardSize.width; j++)
            {
                corners.push_back(Point3f((2 * j + i % 2)*squareEdgeLength, i*squareEdgeLength, 0.0f));
            }
        }
}

bool saveCameraCalibration(string FileName, Mat cameraMatrix, Mat distanceCoefficients)
{
    ofstream outStream(FileName);
    if (outStream)
    {
        int rows = cameraMatrix.rows;
        int columns = cameraMatrix.cols;

        outStream << rows << endl;
        outStream << columns << endl;

        double value;

        for (int r = 0; r < rows; r++)
        {
            for (int c = 0; c < columns; c++)
            {
                value = cameraMatrix.at<double>(r, c);
                outStream << value << endl;
            }
        }

        rows = distanceCoefficients.rows;
        columns = distanceCoefficients.cols;

        outStream << rows << endl;
        outStream << columns << endl;

        for (int r = 0; r < rows; r++)
        {
            for (int c = 0; c < columns; c++)
            {
                double value = distanceCoefficients.at<double>(r, c);
                outStream << value << endl;
            }
        }

        outStream.close();
        return true;
    }
    return false;
}

bad results with findCirclesGrid

Hello everybody,

I'm trying to write a program for a camera-calibration using the 4x11 asymmetric circle grid. I use a camera with 3840x2750 pixels. Although the pattern is recognized by the program most of the time, I'm not satisfied with the result of the calibration. Very often the undistorted picture seems to be more distorted than the original one. Nevertheless, Accuracy reaches values between 1 and 5 pixels. Also I'm interested in what is the best flag-setting for the calibrateCamera() command I added a simplified version of my code (especially the "while Camera is grabbing" part, but this is not the problem).

To explain my calibration procedure: I printed out the pattern on thick paper and stuck it to the wall. I know that is not perfectly plain, but in my opinion it shouldn't have such an enormous negative effect. For the calibration I take many pictures, over 50, from different angles and covering the whole sensor area by moving the camera as far away from the pattern as the working distance will be. Are there any possible improvements for the code or my method?

original picture: image description

undistorted picture: image description

I'm grateful for every hint. Kind regards!

int main(int argv, char** argc)
{

    vector<Mat> savedImages;

    vector<vector<Point2f>> markerCorners, rejectedCandidates;

    patternDimensions = Size(4, 11);
    calibrationSquareDimension = 0.0177f;

    vector<Vec2f> foundPoints;

    // while Camera is grabbing
    {
        switch (character)
        {
        case ' ':                                           
        {
            Mat temp;
            frame.copyTo(temp);

            savedImages.push_back(temp);
            cout << savedImages.size() << ". Bild gespeichert" << endl;
        }
        break;

        case 13:                
            if (savedImages.size() > 9)
            {
                string SaveFileName;
                cameraCalibration(savedImages, CalibrationType, patternDimensions, calibrationSquareDimension, cameraMatrix, distanceCoefficients);
                cin >> SaveFileName;
                saveCameraCalibration(SaveFileName, cameraMatrix, distanceCoefficients);    
                mode = 0;
                return;
            }
            break;

        case 27:                

            return;
            break;

        }
    }

}

void cameraCalibration(vector<Mat> CalibrationImages, int CalibrationMode, Size boardSize, float squareEdgeLength, Mat& cameraMatrix, Mat& distanceCoefficients)
{
    vector<vector<Point2f>> checkerboardImageSpacePoints;
    getPatternCorners(CalibrationImages, CalibrationMode, checkerboardImageSpacePoints, PostFoundPattern);

    vector<vector<Point3f>> worldSpaceCornerPoints(1);

    createKnownBoardPositions(boardSize, CalibrationMode, squareEdgeLength, worldSpaceCornerPoints[0]);
    worldSpaceCornerPoints.resize(checkerboardImageSpacePoints.size(), worldSpaceCornerPoints[0]);

    vector<Mat> rVectors, tVectors;
    distanceCoefficients = Mat::zeros(8, 1, CV_64F);

    double AccuracyOfCalibration = calibrateCamera(worldSpaceCornerPoints, checkerboardImageSpacePoints, boardSize, cameraMatrix, distanceCoefficients, rVectors, tVectors);
}

void getPatternCorners(vector<Mat> images, int CalibrationMode, vector<vector<Point2f>>& allFoundCorners, bool showResults = false)
{
    SimpleBlobDetector::Params parameters;
    parameters.maxArea = 100000;
    parameters.minArea = 500;
    parameters.filterByColor = false;
    parameters.minDistBetweenBlobs = 25;
    Ptr<FeatureDetector> blobDetector = SimpleBlobDetector::create(parameters);

    for (vector<Mat>::iterator iter = images.begin(); iter != images.end(); iter++)
    {
        bool found = false;
        vector<Point2f> pointBuf;

        found = findCirclesGrid(*iter, patternDimensions, pointBuf, CALIB_CB_ASYMMETRIC_GRID | CALIB_CB_CLUSTERING, blobDetector);

        if (found)
        {
            allFoundCorners.push_back(pointBuf);
        }
    }
}

void createKnownBoardPositions(Size boardSize, int CalibrationMode, float squareEdgeLength, vector<Point3f>& corners)
{
        for (int i = 0; i < boardSize.height; i++)
        {
            for (int j = 0; j < boardSize.width; j++)
            {
                corners.push_back(Point3f((2 * j + i % 2)*squareEdgeLength, i*squareEdgeLength, 0.0f));
            }
        }
}

bool saveCameraCalibration(string FileName, Mat cameraMatrix, Mat distanceCoefficients)
{
    ofstream outStream(FileName);
    if (outStream)
    {
        int rows = cameraMatrix.rows;
        int columns = cameraMatrix.cols;

        outStream << rows << endl;
        outStream << columns << endl;

        double value;

        for (int r = 0; r < rows; r++)
        {
            for (int c = 0; c < columns; c++)
            {
                value = cameraMatrix.at<double>(r, c);
                outStream << value << endl;
            }
        }

        rows = distanceCoefficients.rows;
        columns = distanceCoefficients.cols;

        outStream << rows << endl;
        outStream << columns << endl;

        for (int r = 0; r < rows; r++)
        {
            for (int c = 0; c < columns; c++)
            {
                double value = distanceCoefficients.at<double>(r, c);
                outStream << value << endl;
            }
        }

        outStream.close();
        return true;
    }
    return false;
}

bad How to improve results with findCirclesGrid

Hello everybody,

I'm trying to write a program for a camera-calibration using the 4x11 asymmetric circle grid. I use a camera with 3840x2750 pixels. Although the pattern is recognized by the program most of the time, I'm not satisfied with the result of the calibration. Very often the undistorted picture seems to be more distorted than the original one. Nevertheless, Accuracy reaches values between 1 and 5 pixels. Also I'm interested in what is the best flag-setting for the calibrateCamera() command I added a simplified version of my code (especially the "while Camera is grabbing" part, but this is not the problem).

To explain my calibration procedure: I printed out the pattern on thick paper and stuck it to the wall. I know that is not perfectly plain, but in my opinion it shouldn't have such an enormous negative effect. For the calibration I take many pictures, over 50, from different angles and covering the whole sensor area by moving the camera as far away from the pattern as the working distance will be. Are there any possible improvements for the code or my method?

original picture: image description

undistorted picture: image description

I'm grateful for every hint. Kind regards!

int main(int argv, char** argc)
{

    vector<Mat> savedImages;

    vector<vector<Point2f>> markerCorners, rejectedCandidates;

    patternDimensions = Size(4, 11);
    calibrationSquareDimension = 0.0177f;

    vector<Vec2f> foundPoints;

    // while Camera is grabbing
    {
        switch (character)
        {
        case ' ':                                           
        {
            Mat temp;
            frame.copyTo(temp);

            savedImages.push_back(temp);
            cout << savedImages.size() << ". Bild gespeichert" << endl;
        }
        break;

        case 13:                
            if (savedImages.size() > 9)
            {
                string SaveFileName;
                cameraCalibration(savedImages, CalibrationType, patternDimensions, calibrationSquareDimension, cameraMatrix, distanceCoefficients);
                cin >> SaveFileName;
                saveCameraCalibration(SaveFileName, cameraMatrix, distanceCoefficients);    
                mode = 0;
                return;
            }
            break;

        case 27:                

            return;
            break;

        }
    }

}

void cameraCalibration(vector<Mat> CalibrationImages, int CalibrationMode, Size boardSize, float squareEdgeLength, Mat& cameraMatrix, Mat& distanceCoefficients)
{
    vector<vector<Point2f>> checkerboardImageSpacePoints;
    getPatternCorners(CalibrationImages, CalibrationMode, checkerboardImageSpacePoints, PostFoundPattern);

    vector<vector<Point3f>> worldSpaceCornerPoints(1);

    createKnownBoardPositions(boardSize, CalibrationMode, squareEdgeLength, worldSpaceCornerPoints[0]);
    worldSpaceCornerPoints.resize(checkerboardImageSpacePoints.size(), worldSpaceCornerPoints[0]);

    vector<Mat> rVectors, tVectors;
    distanceCoefficients = Mat::zeros(8, 1, CV_64F);

    double AccuracyOfCalibration = calibrateCamera(worldSpaceCornerPoints, checkerboardImageSpacePoints, boardSize, cameraMatrix, distanceCoefficients, rVectors, tVectors);
}

void getPatternCorners(vector<Mat> images, int CalibrationMode, vector<vector<Point2f>>& allFoundCorners, bool showResults = false)
{
    SimpleBlobDetector::Params parameters;
    parameters.maxArea = 100000;
    parameters.minArea = 500;
    parameters.filterByColor = false;
    parameters.minDistBetweenBlobs = 25;
    Ptr<FeatureDetector> blobDetector = SimpleBlobDetector::create(parameters);

    for (vector<Mat>::iterator iter = images.begin(); iter != images.end(); iter++)
    {
        bool found = false;
        vector<Point2f> pointBuf;

        found = findCirclesGrid(*iter, patternDimensions, pointBuf, CALIB_CB_ASYMMETRIC_GRID | CALIB_CB_CLUSTERING, blobDetector);

        if (found)
        {
            allFoundCorners.push_back(pointBuf);
        }
    }
}

void createKnownBoardPositions(Size boardSize, int CalibrationMode, float squareEdgeLength, vector<Point3f>& corners)
{
        for (int i = 0; i < boardSize.height; i++)
        {
            for (int j = 0; j < boardSize.width; j++)
            {
                corners.push_back(Point3f((2 * j + i % 2)*squareEdgeLength, i*squareEdgeLength, 0.0f));
            }
        }
}

bool saveCameraCalibration(string FileName, Mat cameraMatrix, Mat distanceCoefficients)
{
    ofstream outStream(FileName);
    if (outStream)
    {
        int rows = cameraMatrix.rows;
        int columns = cameraMatrix.cols;

        outStream << rows << endl;
        outStream << columns << endl;

        double value;

        for (int r = 0; r < rows; r++)
        {
            for (int c = 0; c < columns; c++)
            {
                value = cameraMatrix.at<double>(r, c);
                outStream << value << endl;
            }
        }

        rows = distanceCoefficients.rows;
        columns = distanceCoefficients.cols;

        outStream << rows << endl;
        outStream << columns << endl;

        for (int r = 0; r < rows; r++)
        {
            for (int c = 0; c < columns; c++)
            {
                double value = distanceCoefficients.at<double>(r, c);
                outStream << value << endl;
            }
        }

        outStream.close();
        return true;
    }
    return false;
}

How to improve calibration results with findCirclesGrid

Hello everybody,

I'm trying to write a program for a camera-calibration using the 4x11 asymmetric circle grid. I use a camera with 3840x2750 pixels. Although the pattern is recognized by the program most of the time, I'm not satisfied with the result of the calibration. Very often the undistorted picture seems to be more distorted than the original one. Nevertheless, Accuracy reaches values between 1 and 5 pixels. Also I'm interested in what is the best flag-setting for the calibrateCamera() command I added a simplified version of my code (especially the "while Camera is grabbing" part, but this is not the problem).

To explain my calibration procedure: I printed out the pattern on thick paper and stuck it to the wall. I know that is not perfectly plain, but in my opinion it shouldn't have such an enormous negative effect. For the calibration I take many pictures, over 50, from different angles and covering the whole sensor area by moving the camera as far away from the pattern as the working distance will be. Are there any possible improvements for the code or my method?

original picture: image description

undistorted picture: image description

I'm grateful for every hint. Kind regards!

int main(int argv, char** argc)
{

    vector<Mat> savedImages;

    vector<vector<Point2f>> markerCorners, rejectedCandidates;

    patternDimensions = Size(4, 11);
    calibrationSquareDimension = 0.0177f;

    vector<Vec2f> foundPoints;

    // while Camera is grabbing
    {
        switch (character)
        {
        case ' ':                                           
        {
            Mat temp;
            frame.copyTo(temp);

            savedImages.push_back(temp);
            cout << savedImages.size() << ". Bild gespeichert" << endl;
        }
        break;

        case 13:                
            if (savedImages.size() > 9)
            {
                string SaveFileName;
                cameraCalibration(savedImages, CalibrationType, patternDimensions, calibrationSquareDimension, cameraMatrix, distanceCoefficients);
                cin >> SaveFileName;
                saveCameraCalibration(SaveFileName, cameraMatrix, distanceCoefficients);    
                mode = 0;
                return;
            }
            break;

        case 27:                

            return;
            break;

        }
    }

}

void cameraCalibration(vector<Mat> CalibrationImages, int CalibrationMode, Size boardSize, float squareEdgeLength, Mat& cameraMatrix, Mat& distanceCoefficients)
{
    vector<vector<Point2f>> checkerboardImageSpacePoints;
    getPatternCorners(CalibrationImages, CalibrationMode, checkerboardImageSpacePoints, PostFoundPattern);

    vector<vector<Point3f>> worldSpaceCornerPoints(1);

    createKnownBoardPositions(boardSize, CalibrationMode, squareEdgeLength, worldSpaceCornerPoints[0]);
    worldSpaceCornerPoints.resize(checkerboardImageSpacePoints.size(), worldSpaceCornerPoints[0]);

    vector<Mat> rVectors, tVectors;
    distanceCoefficients = Mat::zeros(8, 1, CV_64F);

    double AccuracyOfCalibration = calibrateCamera(worldSpaceCornerPoints, checkerboardImageSpacePoints, boardSize, cameraMatrix, distanceCoefficients, rVectors, tVectors);
}

void getPatternCorners(vector<Mat> images, int CalibrationMode, vector<vector<Point2f>>& allFoundCorners, bool showResults = false)
{
    SimpleBlobDetector::Params parameters;
    parameters.maxArea = 100000;
    parameters.minArea = 500;
    parameters.filterByColor = false;
    parameters.minDistBetweenBlobs = 25;
    Ptr<FeatureDetector> blobDetector = SimpleBlobDetector::create(parameters);

    for (vector<Mat>::iterator iter = images.begin(); iter != images.end(); iter++)
    {
        bool found = false;
        vector<Point2f> pointBuf;

        found = findCirclesGrid(*iter, patternDimensions, pointBuf, CALIB_CB_ASYMMETRIC_GRID | CALIB_CB_CLUSTERING, blobDetector);

        if (found)
        {
            allFoundCorners.push_back(pointBuf);
        }
    }
}

void createKnownBoardPositions(Size boardSize, int CalibrationMode, float squareEdgeLength, vector<Point3f>& corners)
{
        for (int i = 0; i < boardSize.height; i++)
        {
            for (int j = 0; j < boardSize.width; j++)
            {
                corners.push_back(Point3f((2 * j + i % 2)*squareEdgeLength, i*squareEdgeLength, 0.0f));
            }
        }
}

bool saveCameraCalibration(string FileName, Mat cameraMatrix, Mat distanceCoefficients)
{
    ofstream outStream(FileName);
    if (outStream)
    {
        int rows = cameraMatrix.rows;
        int columns = cameraMatrix.cols;

        outStream << rows << endl;
        outStream << columns << endl;

        double value;

        for (int r = 0; r < rows; r++)
        {
            for (int c = 0; c < columns; c++)
            {
                value = cameraMatrix.at<double>(r, c);
                outStream << value << endl;
            }
        }

        rows = distanceCoefficients.rows;
        columns = distanceCoefficients.cols;

        outStream << rows << endl;
        outStream << columns << endl;

        for (int r = 0; r < rows; r++)
        {
            for (int c = 0; c < columns; c++)
            {
                double value = distanceCoefficients.at<double>(r, c);
                outStream << value << endl;
            }
        }

        outStream.close();
        return true;
    }
    return false;
}