Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

chamfer Matching error in implementation

I found a implimentation of chamfer Matching here, which seems to have an error in this line:

Point& new_point = Point(model_column,model_row);

The error says the following:

"cannot bind non-const lvalue reference of type ‘cv::Point& {aka cv::Point_<int>&}’ to an rvalue of type ‘cv::Point {aka cv::Point_<int>}’ Point& new_point = Point(model_column,model_row);

If I change this line to

Point new_point = Point(model_column,model_row);

The program runs, but the results are not as i expected.

Can someone give me a hint, what the problem is?

Full code:

void ChamferMatching( Mat& chamfer_image, Mat& model, Mat& matching_image )
{
    // Extract the model points (as they are sparse).
    vector<Point> model_points;
    int image_channels = model.channels();
    for (int model_row=0; (model_row < model.rows); model_row++)
    {
        uchar *curr_point = model.ptr<uchar>(model_row);
        for (int model_column=0; (model_column < model.cols); model_column++)
        {
            if (*curr_point > 0)
            {
                Point& new_point = Point(model_column,model_row);
                model_points.push_back(new_point);
            }
            curr_point += image_channels;
        }
    }
    int num_model_points = model_points.size();
    image_channels = chamfer_image.channels();
    // Try the model in every possible position
    matching_image = Mat(chamfer_image.rows-model.rows+1, chamfer_image.cols-model.cols+1, CV_32FC1);
    for (int search_row=0; (search_row <= chamfer_image.rows-model.rows); search_row++)
    {
        float *output_point = matching_image.ptr<float>(search_row);
        for (int search_column=0; (search_column <= chamfer_image.cols-model.cols); search_column++)
        {
            float matching_score = 0.0;
            for (int point_count=0; (point_count < num_model_points); point_count++)
                matching_score += (float) *(chamfer_image.ptr<float>(model_points[point_count].y+search_row) + search_column + model_points[point_count].x*image_channels);
            *output_point = matching_score;
            output_point++;
        }
    }
}

for chamfer image i do:

Canny( queryImage, edge_image, 100, 200, 3);
threshold( edge_image, edge_image, 127, 255, THRESH_BINARY_INV );
distanceTransform( edge_image, chamfer_image, CV_DIST_L2, 3);

for the model image a simple canny-routine is used:

Canny( templateImage, model_edge, 100, 200, 3);

Thank you for your help!

chamfer Matching error in implementation

I found a implimentation of chamfer Matching here , which seems to have an error in this line:

Point& new_point = Point(model_column,model_row);

The error says the following: -> see beraks comment - thank you!

"cannot bind non-const lvalue reference of type ‘cv::Point& {aka cv::Point_<int>&}’ to an rvalue of type ‘cv::Point {aka cv::Point_<int>}’ Point& new_point = Point(model_column,model_row);

If I change this line to

Point new_point = Point(model_column,model_row);

The program runs, but the results are not as i expected.expected. I translated the image 7 Pixels in each direction and still get (0,0) as a match.

Can someone give me a hint, what the problem is?

Full code:

cv::Mat imgTranslate(cv::Mat src, int col,  int dx, int dy)
{
    cv::Mat dst(src.size(), src.type(), cv::Scalar::all(col) );
    src(cv::Rect(dy,dx, src.cols-dy,src.rows-dx)).copyTo(dst(cv::Rect(0,0,src.cols-dy,src.rows-dx)));
    return dst;
}

void ChamferMatching( Mat& chamfer_image, Mat& model, Mat& matching_image )
{
    // Extract the model points (as they are sparse).
    vector<Point> model_points;
    int image_channels = model.channels();
    for (int model_row=0; (model_row < model.rows); model_row++)
    {
        uchar *curr_point = model.ptr<uchar>(model_row);
        for (int model_column=0; (model_column < model.cols); model_column++)
        {
            if (*curr_point > 0)
            {
                const Point& new_point = Point(model_column,model_row);
                model_points.push_back(new_point);
            }
            curr_point += image_channels;
        }
    }
    int num_model_points = model_points.size();
    image_channels = chamfer_image.channels();
    // Try the model in every possible position
    matching_image = Mat(chamfer_image.rows-model.rows+1, chamfer_image.cols-model.cols+1, CV_32FC1);
    for (int search_row=0; (search_row <= chamfer_image.rows-model.rows); search_row++)
    {
        float *output_point = matching_image.ptr<float>(search_row);
        for (int search_column=0; (search_column <= chamfer_image.cols-model.cols); search_column++)
        {
            float matching_score = 0.0;
            for (int point_count=0; (point_count < num_model_points); point_count++)
                matching_score += (float) *(chamfer_image.ptr<float>(model_points[point_count].y+search_row) +  search_column + model_points[point_count].x*image_channels);
             *output_point = matching_score;
            output_point++;
        }
    }
}

for chamfer image i do:


int main()
{
    Mat templateImage = imread(img1, IMREAD_GRAYSCALE);
    Mat queryImage = imgTranslate(templateImage, 255, 7, 7);

    Mat edge_image, chamfer_image, model_edge;
    Canny( queryImage, edge_image, 100, 200, 3);
 threshold( edge_image, edge_image, 127, 255, THRESH_BINARY_INV );
 distanceTransform( edge_image, chamfer_image, CV_DIST_L2, 3);

for the model image a simple canny-routine is used:

 Canny( templateImage, model_edge, 100, 200, 3);

    Mat resultImage;
    ChamferMatching(chamfer_image, model_edge, resultImage);

    double min, max;
    cv::Point min_loc, max_loc;
    cv::minMaxLoc(resultImage, &min, &max, &min_loc, &max_loc);

    cout << min_loc << endl;
    return 0;
}

Thank you for your help!

chamfer Matching error in implementation

I found a implimentation of chamfer Matching here , which seems to have an error in this line:

Point& new_point = Point(model_column,model_row);

-> see beraks comment - thank you!

The program runs, but the results are not as i expected. I translated the image 7 Pixels in each direction and still get (0,0) as a match.

match, because the matching image is just 1x1 px. Can someone give me a hint, what the problem is?

cv::Mat imgTranslate(cv::Mat src, int col,  int dx, int dy)
{
    cv::Mat dst(src.size(), src.type(), cv::Scalar::all(col) );
    src(cv::Rect(dy,dx, src.cols-dy,src.rows-dx)).copyTo(dst(cv::Rect(0,0,src.cols-dy,src.rows-dx)));
    return dst;
}

void ChamferMatching( Mat& chamfer_image, Mat& model, Mat& matching_image )
{
    // Extract the model points (as they are sparse).
    vector<Point> model_points;
    int image_channels = model.channels();
    for (int model_row=0; (model_row < model.rows); model_row++)
    {
        uchar *curr_point = model.ptr<uchar>(model_row);
        for (int model_column=0; (model_column < model.cols); model_column++)
        {
            if (*curr_point > 0)
            {
                const Point& new_point = Point(model_column,model_row);
                model_points.push_back(new_point);
            }
            curr_point += image_channels;
        }
    }
    int num_model_points = model_points.size();
    image_channels = chamfer_image.channels();
    // Try the model in every possible position
    matching_image = Mat(chamfer_image.rows-model.rows+1, chamfer_image.cols-model.cols+1, CV_32FC1);
    for (int search_row=0; (search_row <= chamfer_image.rows-model.rows); search_row++)
    {
        float *output_point = matching_image.ptr<float>(search_row);
        for (int search_column=0; (search_column <= chamfer_image.cols-model.cols); search_column++)
        {
            float matching_score = 0.0;
            for (int point_count=0; (point_count < num_model_points); point_count++)
                matching_score += (float) *(chamfer_image.ptr<float>(model_points[point_count].y+search_row) + 
                search_column + model_points[point_count].x*image_channels);
               *output_point = matching_score;
            output_point++;
        }
    }
}

int main()
{
    Mat templateImage = imread(img1, IMREAD_GRAYSCALE);
    Mat queryImage = imgTranslate(templateImage, 255, 7, 7);

    Mat edge_image, chamfer_image, model_edge;
    Canny( queryImage, edge_image, 100, 200, 3);
    threshold( edge_image, edge_image, 127, 255, THRESH_BINARY_INV );
    distanceTransform( edge_image, chamfer_image, CV_DIST_L2, 3);

    Canny( templateImage, model_edge, 100, 200, 3);

    Mat resultImage;
    ChamferMatching(chamfer_image, model_edge, resultImage);

    double min, max;
    cv::Point min_loc, max_loc;
    cv::minMaxLoc(resultImage, &min, &max, &min_loc, &max_loc);

    cout << min_loc << endl;
    return 0;
}

chamfer Matching error in implementation

I found a implimentation of chamfer Matching here , which seems to have an error in this line:

Point& new_point = Point(model_column,model_row);

-> see beraks comment - thank you!

The program runs, but the results are not as i expected. I translated the image 7 Pixels in each direction and still get (0,0) as a match, because the matching image is just 1x1 px. Can someone give me a hint, what the problem is?

I especially don't understand the following line:

        matching_score += (float) *(chamfer_image.ptr<float>(model_points[point_count].y+search_row) +
        search_column + model_points[point_count].x*image_channels);

whole code:

cv::Mat imgTranslate(cv::Mat src, int col,  int dx, int dy)
{
    cv::Mat dst(src.size(), src.type(), cv::Scalar::all(col) );
    src(cv::Rect(dy,dx, src.cols-dy,src.rows-dx)).copyTo(dst(cv::Rect(0,0,src.cols-dy,src.rows-dx)));
    return dst;
}

void ChamferMatching( Mat& chamfer_image, Mat& model, Mat& matching_image )
{
    // Extract the model points (as they are sparse).
    vector<Point> model_points;
    int image_channels = model.channels();
    for (int model_row=0; (model_row < model.rows); model_row++)
    {
        uchar *curr_point = model.ptr<uchar>(model_row);
        for (int model_column=0; (model_column < model.cols); model_column++)
        {
            if (*curr_point > 0)
            {
                const Point& new_point = Point(model_column,model_row);
                model_points.push_back(new_point);
            }
            curr_point += image_channels;
        }
    }
    int num_model_points = model_points.size();
    image_channels = chamfer_image.channels();
    // Try the model in every possible position
    matching_image = Mat(chamfer_image.rows-model.rows+1, chamfer_image.cols-model.cols+1, CV_32FC1);
    for (int search_row=0; (search_row <= chamfer_image.rows-model.rows); search_row++)
    {
        float *output_point = matching_image.ptr<float>(search_row);
        for (int search_column=0; (search_column <= chamfer_image.cols-model.cols); search_column++)
        {
            float matching_score = 0.0;
            for (int point_count=0; (point_count < num_model_points); point_count++)
                matching_score += (float) *(chamfer_image.ptr<float>(model_points[point_count].y+search_row) + 
                search_column + model_points[point_count].x*image_channels);
               *output_point = matching_score;
            output_point++;
        }
    }
}

int main()
{
    Mat templateImage = imread(img1, IMREAD_GRAYSCALE);
    Mat queryImage = imgTranslate(templateImage, 255, 7, 7);

    Mat edge_image, chamfer_image, model_edge;
    Canny( queryImage, edge_image, 100, 200, 3);
    threshold( edge_image, edge_image, 127, 255, THRESH_BINARY_INV );
    distanceTransform( edge_image, chamfer_image, CV_DIST_L2, 3);

    Canny( templateImage, model_edge, 100, 200, 3);

    Mat resultImage;
    ChamferMatching(chamfer_image, model_edge, resultImage);

    double min, max;
    cv::Point min_loc, max_loc;
    cv::minMaxLoc(resultImage, &min, &max, &min_loc, &max_loc);

    cout << min_loc << endl;
    return 0;
}

chamfer Matching error in implementation

I found a implimentation of chamfer Matching here , which seems to have an error in this line:

Point& new_point = Point(model_column,model_row);

-> see beraks comment - thank you!

The program runs, but the results are not as i expected. I translated the image 7 Pixels in each direction and still get (0,0) as a match, because the matching image is just 1x1 px. Can someone give me a hint, what the problem is?px.

I would divide the matching part in the following steps: 1. model points from canny output are stored in a vector 2. A matching space is created ->*if the model dimensions are substracted, does this mean, that the template has to fit on the image ? * 3. For every templatepoint the value of distance transform is added to a matching score. This is where i especially don't understand the following line:

        matching_score += (float) *(chamfer_image.ptr<float>(model_points[point_count].y+search_row) +
        search_column + model_points[point_count].x*image_channels);

Thank you for your help:

whole code:

cv::Mat imgTranslate(cv::Mat src, int col,  int dx, int dy)
{
    cv::Mat dst(src.size(), src.type(), cv::Scalar::all(col) );
    src(cv::Rect(dy,dx, src.cols-dy,src.rows-dx)).copyTo(dst(cv::Rect(0,0,src.cols-dy,src.rows-dx)));
    return dst;
}

void ChamferMatching( Mat& chamfer_image, Mat& model, Mat& matching_image )
{
    // Extract the model points (as they are sparse).
    vector<Point> model_points;
    int image_channels = model.channels();
    for (int model_row=0; (model_row < model.rows); model_row++)
    {
        uchar *curr_point = model.ptr<uchar>(model_row);
        for (int model_column=0; (model_column < model.cols); model_column++)
        {
            if (*curr_point > 0)
            {
                const Point& new_point = Point(model_column,model_row);
                model_points.push_back(new_point);
            }
            curr_point += image_channels;
        }
    }
    int num_model_points = model_points.size();
    image_channels = chamfer_image.channels();
    // Try the model in every possible position
    matching_image = Mat(chamfer_image.rows-model.rows+1, chamfer_image.cols-model.cols+1, CV_32FC1);
    for (int search_row=0; (search_row <= chamfer_image.rows-model.rows); search_row++)
    {
        float *output_point = matching_image.ptr<float>(search_row);
        for (int search_column=0; (search_column <= chamfer_image.cols-model.cols); search_column++)
        {
            float matching_score = 0.0;
            for (int point_count=0; (point_count < num_model_points); point_count++)
                matching_score += (float) *(chamfer_image.ptr<float>(model_points[point_count].y+search_row) + 
                search_column + model_points[point_count].x*image_channels);
               *output_point = matching_score;
            output_point++;
        }
    }
}

int main()
{
    Mat templateImage = imread(img1, IMREAD_GRAYSCALE);
    Mat queryImage = imgTranslate(templateImage, 255, 7, 7);

    Mat edge_image, chamfer_image, model_edge;
    Canny( queryImage, edge_image, 100, 200, 3);
    threshold( edge_image, edge_image, 127, 255, THRESH_BINARY_INV );
    distanceTransform( edge_image, chamfer_image, CV_DIST_L2, 3);

    Canny( templateImage, model_edge, 100, 200, 3);

    Mat resultImage;
    ChamferMatching(chamfer_image, model_edge, resultImage);

    double min, max;
    cv::Point min_loc, max_loc;
    cv::minMaxLoc(resultImage, &min, &max, &min_loc, &max_loc);

    cout << min_loc << endl;
    return 0;
}

chamfer Matching error in implementation

I found a implimentation of chamfer Matching here , which seems to have an error in this line:

Point& new_point = Point(model_column,model_row);

-> see beraks comment - thank you!

The program runs, but the results are not as i expected. I translated the image 7 Pixels in each direction and still get (0,0) as a match, because the matching image is just 1x1 px.

I would divide the matching part in the following steps: 1. model points from canny output are stored in a vector 2. A matching space is created ->*if the model dimensions are substracted, does this mean, that the template has to fit on the image ? * 3. For every templatepoint the value of distance transform is added to a matching score. This is where i especially don't understand the following line:

        matching_score += (float) *(chamfer_image.ptr<float>(model_points[point_count].y+search_row) +
        search_column + model_points[point_count].x*image_channels);

Thank you for your help:

whole code:

cv::Mat imgTranslate(cv::Mat src, int col,  int dx, int dy)
{
    cv::Mat dst(src.size(), src.type(), cv::Scalar::all(col) );
    src(cv::Rect(dy,dx, src.cols-dy,src.rows-dx)).copyTo(dst(cv::Rect(0,0,src.cols-dy,src.rows-dx)));
    return dst;
}

void ChamferMatching( Mat& chamfer_image, Mat& model, Mat& matching_image )
{
    // Extract the model points (as they are sparse).
    vector<Point> model_points;
    int image_channels = model.channels();
    for (int model_row=0; (model_row < model.rows); model_row++)
    {
        uchar *curr_point = model.ptr<uchar>(model_row);
        for (int model_column=0; (model_column < model.cols); model_column++)
        {
            if (*curr_point > 0)
            {
                const Point& new_point = Point(model_column,model_row);
                model_points.push_back(new_point);
            }
            curr_point += image_channels;
        }
    }
    int num_model_points = model_points.size();
    image_channels = chamfer_image.channels();
    // Try the model in every possible position
    matching_image = Mat(chamfer_image.rows-model.rows+1, chamfer_image.cols-model.cols+1, CV_32FC1);
    for (int search_row=0; (search_row <= chamfer_image.rows-model.rows); search_row++)
    {
        float *output_point = matching_image.ptr<float>(search_row);
        for (int search_column=0; (search_column <= chamfer_image.cols-model.cols); search_column++)
        {
            float matching_score = 0.0;
            for (int point_count=0; (point_count < num_model_points); point_count++)
                matching_score += (float) *(chamfer_image.ptr<float>(model_points[point_count].y+search_row) + 
                search_column + model_points[point_count].x*image_channels);
               *output_point = matching_score;
            output_point++;
        }
    }
}

int main()
{
    Mat templateImage = imread(img1, IMREAD_GRAYSCALE);
    Mat queryImage = imgTranslate(templateImage, 255, 7, 7);

    Mat edge_image, chamfer_image, model_edge;
    Canny( queryImage, edge_image, 100, 200, 3);
    threshold( edge_image, edge_image, 127, 255, THRESH_BINARY_INV );
    distanceTransform( edge_image, chamfer_image, CV_DIST_L2, 3);

    Canny( templateImage, model_edge, 100, 200, 3);

    Mat resultImage;
    ChamferMatching(chamfer_image, model_edge, resultImage);

    double min, max;
    cv::Point min_loc, max_loc;
    cv::minMaxLoc(resultImage, &min, &max, &min_loc, &max_loc);

    cout << min_loc << endl;
    return 0;
}