Ask Your Question

Revision history [back]

with two images

int main( int argc, char** argv )
{

    Mat img1 = imread( "pano3.png", CV_LOAD_IMAGE_GRAYSCALE );
    Mat img2 = imread( "pano2.png", CV_LOAD_IMAGE_GRAYSCALE );

    std::vector<cv::KeyPoint> kOrb1;        /*<! Point clef de ORB */
    std::vector<cv::KeyPoint> kOrb2;        /*<! Point clef de ORB */
    cv::Mat descORB1;                   /*<! Descripteur associé à l'un des points clé ORB*/
    cv::Mat descORB2;                   /*<! Descripteur associé à l'un des points clé ORB*/
    vector<std::vector<cv::DMatch> > matches;   /*<! Descripteur appariés */

    cv::Ptr<cv::Feature2D> b;
    b = cv::ORB::create();


    b->detectAndCompute(img1, Mat(), kOrb1, descORB1);
    b->detectAndCompute(img2, Mat(), kOrb2, descORB2);


    cv::Ptr<cv::DescriptorMatcher> descriptorMatcher = cv::DescriptorMatcher::create("BruteForce");
    matches.clear();
    descriptorMatcher->knnMatch(descORB1, descORB2, matches, 1,Mat());
    vector<Point> p,q;
    Point2f dt;
    int nb=0;
    for (int i=0;i<matches.size();i++)
    {

        if (matches[i][0].distance<100)
            {
            cout << cv::norm(kOrb1[matches[i][0].queryIdx].pt-kOrb2[matches[i][0].trainIdx].pt)<<"\t"<<matches[i][0].distance<<"\n";
            p.push_back(kOrb1[matches[i][0].queryIdx].pt);
            q.push_back(kOrb2[matches[i][0].trainIdx].pt);
            dt+=kOrb1[matches[i][0].queryIdx].pt-kOrb2[matches[i][0].trainIdx].pt;
            nb++;
            }
    }
    Mat_<double> v(3,3);//=findHomography(p, q);// q=v*p; img2=v*img1
    cout<<v<<endl;
    v.at<double>(0,0)=1;v.at<double>(0,1)=0;
    v.at<double>(1,0)=0;v.at<double>(1,1)=1;
    v.at<double>(2,0)=0;v.at<double>(2,1)=0;
    v.at<double>(0,2)=-dt.x/nb;
    v.at<double>(1,2)=-dt.y/nb;
    v.at<double>(2,2)=1;
    vector<Point> ref(4);
    ref[0]=Point(0,0);
    ref[1]=Point(0,img1.rows);
    ref[2]=Point(img1.cols,img1.rows);
    ref[3]=Point(img1.cols,0);
    Mat_<double> pha(3, 1);
    Mat_<double> phb(3, 1);
    double minx=img1.cols,maxx=0;
    double miny=img1.rows,maxy=0;

    for (int i = 0; i < ref.size(); i++)
    {
        cout<<"Point ref"<<i<<"\n";
        pha(0, 0) = ref[i].x; pha(1, 0) = ref[i].y; pha(2, 0) = 1;
        phb=v*pha ;
        if (phb(0,0)<=minx)
            minx=phb(0,0);
        if (phb(1,0)<=miny)
            miny=phb(1,0);
        if (phb(0,0)>=maxx)
            maxx=phb(0,0);
        if (phb(1,0)>=maxy)
            maxy=phb(1,0);

    }
    cout<<minx<<"\t"<<miny<<"\t"<<maxx<<"\t"<<maxy<<"\n";
Mat res(max(img1.rows+int(maxy-miny),img2.rows),max(img1.cols+int(maxx-minx),img1.cols),img1.type());
warpPerspective(img2, res, v,Size(res.cols,res.rows),WARP_INVERSE_MAP);
for (int i=0;i<img1.rows;i++)
{
    unsigned char *ptrSrc=img1.ptr(i);
    unsigned char *ptrDst=res.ptr(i);
    for (int j=0;j<img1.cols;j++,ptrSrc++,ptrDst++)
        *ptrDst=*ptrSrc;
}


imshow("Res",res);
imshow("img1",img1);
imshow("img2",img2);
    waitKey();
return 0;
}

with two images

int main( int argc, char** argv )
{

    Mat img1 = imread( "pano3.png", CV_LOAD_IMAGE_GRAYSCALE );
    Mat img2 = imread( "pano2.png", CV_LOAD_IMAGE_GRAYSCALE );

 std::vector<cv::KeyPoint> kOrb1;        /*<! Point clef de ORB */
 std::vector<cv::KeyPoint> kOrb2;        /*<! Point clef de ORB */
 cv::Mat descORB1;                   /*<! Descripteur associé à l'un des points clé ORB*/
 cv::Mat descORB2;                   /*<! Descripteur associé à l'un des points clé ORB*/
 vector<std::vector<cv::DMatch> > matches;   /*<! Descripteur appariés */

 cv::Ptr<cv::Feature2D> b;
 b = cv::ORB::create();


    cv::ORB::create(2000);


b->detectAndCompute(img1, Mat(), kOrb1, descORB1);
 b->detectAndCompute(img2, Mat(), kOrb2, descORB2);


 cv::Ptr<cv::DescriptorMatcher> descriptorMatcher = cv::DescriptorMatcher::create("BruteForce");
 matches.clear();
 descriptorMatcher->knnMatch(descORB1, descORB2, matches, 1,Mat());
    1, Mat());
vector<Point> p,q;
    p, q;
Point2f dt;
    u;
int nb=0;
    nb = 0;
double seuil = 0;
for (int i=0;i<matches.size();i++)
    {

    i = 0; i<matches.size(); i++)
    seuil += matches[i][0].distance;

seuil = seuil / matches.size()*0.5;
for (int i = 0; i<matches.size(); i++)
{

    if (matches[i][0].distance<100)
            {
    (matches[i][0].distance<seuil)
    {
        u += kOrb1[matches[i][0].queryIdx].pt - kOrb2[matches[i][0].trainIdx].pt;
        nb++;
    }
}
u = u / nb;
for (int i = 0; i<matches.size(); i++)
{

    if (matches[i][0].distance<seuil)
    {
        p.push_back(kOrb1[matches[i][0].queryIdx].pt);
        cout << cv::norm(kOrb1[matches[i][0].queryIdx].pt-kOrb2[matches[i][0].trainIdx].pt)<<"\t"<<matches[i][0].distance<<"\n";
            p.push_back(kOrb1[matches[i][0].queryIdx].pt);
    "Avant "<<cv::norm(kOrb1[matches[i][0].queryIdx].pt - kOrb2[matches[i][0].trainIdx].pt) << "\t" ;
        kOrb2[matches[i][0].trainIdx].pt += u ;
        q.push_back(kOrb2[matches[i][0].trainIdx].pt);
            dt+=kOrb1[matches[i][0].queryIdx].pt-kOrb2[matches[i][0].trainIdx].pt;
            nb++;
            }
    }
    cout << "Apres "<<cv::norm(kOrb1[matches[i][0].queryIdx].pt - kOrb2[matches[i][0].trainIdx].pt) << "\t" << matches[i][0].distance << "\n";
    }
}
Mat_<double> v(3,3);//=findHomography(p, v(3, 3);//=findHomography(p, q);// q=v*p; img2=v*img1
    cout<<v<<endl;
    v.at<double>(0,0)=1;v.at<double>(0,1)=0;
    v.at<double>(1,0)=0;v.at<double>(1,1)=1;
    v.at<double>(2,0)=0;v.at<double>(2,1)=0;
    v.at<double>(0,2)=-dt.x/nb;
    v.at<double>(1,2)=-dt.y/nb;
    v.at<double>(2,2)=1;
    Mat h=findHomography(p, q);// q=v*p; img2=v*img1
cout << h << endl;
v.at<double>(0, 0) = 1; v.at<double>(0, 1) = 0;
v.at<double>(1, 0) = 0; v.at<double>(1, 1) = 1;
v.at<double>(2, 0) = 0; v.at<double>(2, 1) = 0;
v.at<double>(0, 2) = -u.x ;
v.at<double>(1, 2) = -u.y;
v.at<double>(2, 2) = 1;
cout << v << endl;
vector<Point> ref(4);
    ref[0]=Point(0,0);
    ref[1]=Point(0,img1.rows);
    ref[2]=Point(img1.cols,img1.rows);
    ref[3]=Point(img1.cols,0);
    ref[0] = Point(0, 0);
ref[1] = Point(0, img2.rows);
ref[2] = Point(img2.cols, img2.rows);
ref[3] = Point(img2.cols, 0);
Mat_<double> pha(3, 1);
 Mat_<double> phb(3, 1);
 double minx=img1.cols,maxx=0;
    minx = img1.cols, maxx = 0;
double miny=img1.rows,maxy=0;

    miny = img1.rows, maxy = 0;

for (int i = 0; i < ref.size(); i++)
    {
        cout<<"Point ref"<<i<<"\n";
    {
    pha(0, 0) = ref[i].x; pha(1, 0) = ref[i].y; pha(2, 0) = 1;
        phb=v*pha ;
     phb = v*pha;
     cout << "Point ref" << i << "\t"<<phb<<"\n";
   if (phb(0,0)<=minx)
            minx=phb(0,0);
    (phb(0, 0) <= minx)
        minx = phb(0, 0);
    if (phb(1,0)<=miny)
            miny=phb(1,0);
    (phb(1, 0) <= miny)
        miny = phb(1, 0);
    if (phb(0,0)>=maxx)
            maxx=phb(0,0);
    (phb(0, 0) >= maxx)
        maxx = phb(0, 0);
    if (phb(1,0)>=maxy)
            maxy=phb(1,0);

    }
    cout<<minx<<"\t"<<miny<<"\t"<<maxx<<"\t"<<maxy<<"\n";
(phb(1, 0) >= maxy)
        maxy = phb(1, 0);

}
cout << minx << "\t" << miny << "\t" << maxx << "\t" << maxy << "\n";
Mat res(max(img1.rows+int(maxy-miny),img2.rows),max(img1.cols+int(maxx-minx),img1.cols),img1.type());
res(max(img1.rows + int(maxy - miny), img2.rows), max(img1.cols + int(maxx - minx), img1.cols), img1.type());
if (miny < 0)
{
    warpPerspective(img2, res, v,Size(res.cols,res.rows),WARP_INVERSE_MAP);
h*v, Size(res.cols, res.rows),WARP_INVERSE_MAP);
    for (int i=0;i<img1.rows;i++)
{
i = 0; i<img1.rows; i++)
    {
        unsigned char *ptrSrc=img1.ptr(i);
*ptrSrc = img1.ptr(i);
        unsigned char *ptrDst=res.ptr(i);
*ptrDst = res.ptr(i);
        for (int j=0;j<img1.cols;j++,ptrSrc++,ptrDst++)
        *ptrDst=*ptrSrc;
}


imshow("Res",res);
imshow("img1",img1);
imshow("img2",img2);
    j = 0; j<img1.cols; j++, ptrSrc++, ptrDst++)
            *ptrDst = *ptrSrc;
    }

}
else
{
    warpPerspective(img1, res, h*v, Size(res.cols, res.rows));
    for (int i = 0; i<img2.rows; i++)
    {
        unsigned char *ptrSrc = img2.ptr(i);
        unsigned char *ptrDst = res.ptr(i);
        for (int j = 0; j<img2.cols; j++, ptrSrc++, ptrDst++)
            *ptrDst = *ptrSrc;
    }

}

imshow("Res", res);
imshow("img1", img1);
imshow("img2", img2);
waitKey();
return 0;
}

with two images

 int main( int main(int argc, char** argv )
{

    argv)
 {
Mat img1 = imread( "pano3.png", CV_LOAD_IMAGE_GRAYSCALE );
    imread("mosaic_3.jpg", CV_LOAD_IMAGE_GRAYSCALE);
Mat img2 = imread( "pano2.png", CV_LOAD_IMAGE_GRAYSCALE );
imread("mosaic_2.jpg", CV_LOAD_IMAGE_GRAYSCALE);

std::vector<cv::KeyPoint> kOrb1;        /*<! Point clef de ORB */
std::vector<cv::KeyPoint> kOrb2;        /*<! Point clef de ORB */
cv::Mat descORB1;                   /*<! Descripteur associé à l'un des points clé ORB*/
cv::Mat descORB2;                   /*<! Descripteur associé à l'un des points clé ORB*/
vector<std::vector<cv::DMatch> > matches;   /*<! Descripteur appariés */

cv::Ptr<cv::Feature2D> b;
b = cv::ORB::create(2000);


b->detectAndCompute(img1, Mat(), kOrb1, descORB1);
b->detectAndCompute(img2, Mat(), kOrb2, descORB2);


cv::Ptr<cv::DescriptorMatcher> descriptorMatcher = cv::DescriptorMatcher::create("BruteForce");
matches.clear();
descriptorMatcher->knnMatch(descORB1, descORB2, matches, 1, Mat());
vector<Point> p, q;
Point2f u;
int nb = 0;
double seuil = 0;
for (int i = 0; i<matches.size(); i++)
    seuil += matches[i][0].distance;

seuil = seuil / matches.size()*0.5;
for (int i = 0; i<matches.size(); i++)
{

    if (matches[i][0].distance<seuil)
    {
        u += kOrb1[matches[i][0].queryIdx].pt - kOrb2[matches[i][0].trainIdx].pt;
        nb++;
    }
}
u = u / nb;
for (int i = 0; i<matches.size(); i++)
{

    if (matches[i][0].distance<seuil)
    {
        p.push_back(kOrb1[matches[i][0].queryIdx].pt);
        cout << "Avant "<<cv::norm(kOrb1[matches[i][0].queryIdx].pt - kOrb2[matches[i][0].trainIdx].pt) << "\t" ;
        kOrb2[matches[i][0].trainIdx].pt += u ;
        q.push_back(kOrb2[matches[i][0].trainIdx].pt);
        cout << "Apres "<<cv::norm(kOrb1[matches[i][0].queryIdx].pt - kOrb2[matches[i][0].trainIdx].pt) << "\t" << matches[i][0].distance << "\n";
    }
}
Mat_<double> v(3, 3);//=findHomography(p, q);// q=v*p; img2=v*img1
Mat    //Mat h=findHomography(p, q);// q=v*p; img2=v*img1
h = Mat::eye(3, 3,CV_64F);
cout << h << endl;
v.at<double>(0, 0) = 1; v.at<double>(0, 1) = 0;
v.at<double>(1, 0) = 0; v.at<double>(1, 1) = 1;
v.at<double>(2, 0) = 0; v.at<double>(2, 1) = 0;
v.at<double>(0, 2) = -u.x ;
v.at<double>(1, 2) = -u.y;
v.at<double>(2, 2) = 1;
cout << v << endl;
vector<Point> ref(4);
ref[0] = Point(0, 0);
ref[1] = Point(0, img2.rows);
ref[2] = Point(img2.cols, img2.rows);
ref[3] = Point(img2.cols, 0);
Mat_<double> pha(3, 1);
Mat_<double> phb(3, 1);
double minx = img1.cols, maxx = 0;
double miny = img1.rows, maxy = 0;

for (int i = 0; i < ref.size(); i++)
{
    pha(0, 0) = ref[i].x; pha(1, 0) = ref[i].y; pha(2, 0) = 1;
    phb = v*pha;
     cout << "Point ref" << i << "\t"<<phb<<"\n";
   if (phb(0, 0) <= minx)
        minx = phb(0, 0);
    if (phb(1, 0) <= miny)
        miny = phb(1, 0);
    if (phb(0, 0) >= maxx)
        maxx = phb(0, 0);
    if (phb(1, 0) >= maxy)
        maxy = phb(1, 0);

}
cout << minx << "\t" << miny << "\t" << maxx << "\t" << maxy << "\n";
Mat res(max(img1.rows + int(maxy - miny), img2.rows), max(img1.cols + int(maxx - minx), img1.cols), img1.type());
if (miny < 0)
{
    warpPerspective(img2, res, h*v, v*h, Size(res.cols, res.rows),WARP_INVERSE_MAP);
    for (int i = 0; i<img1.rows; i++)
    {
        unsigned char *ptrSrc = img1.ptr(i);
        unsigned char *ptrDst = res.ptr(i);
        for (int j = 0; j<img1.cols; j++, ptrSrc++, ptrDst++)
            *ptrDst = *ptrSrc;
    }

}
else
{
    warpPerspective(img1, res, h*v, Size(res.cols, res.rows));
    for (int i = 0; i<img2.rows; i++)
    {
        unsigned char *ptrSrc = img2.ptr(i);
        unsigned char *ptrDst = res.ptr(i);
        for (int j = 0; j<img2.cols; j++, ptrSrc++, ptrDst++)
            *ptrDst = *ptrSrc;
    }

}

imshow("Res", res);
imshow("img1", img1);
imshow("img2", img2);
waitKey();
return 0;
}

with two images

 

int main(int argc, char** argv) { {

Mat img1 = imread("mosaic_3.jpg", imread(mosaic_3.jpg", CV_LOAD_IMAGE_GRAYSCALE);
Mat img2 = imread("mosaic_2.jpg", CV_LOAD_IMAGE_GRAYSCALE);

std::vector<cv::KeyPoint> kOrb1;        /*<! Point clef de ORB */
std::vector<cv::KeyPoint> kOrb2;        /*<! Point clef de ORB */
cv::Mat descORB1;                   /*<! Descripteur associé à l'un des points clé ORB*/
cv::Mat descORB2;                   /*<! Descripteur associé à l'un des points clé ORB*/
vector<std::vector<cv::DMatch> > matches;   /*<! Descripteur appariés */

cv::Ptr<cv::Feature2D> b;
b = cv::ORB::create(2000);


b->detectAndCompute(img1, Mat(), kOrb1, descORB1);
b->detectAndCompute(img2, Mat(), kOrb2, descORB2);


cv::Ptr<cv::DescriptorMatcher> descriptorMatcher = cv::DescriptorMatcher::create("BruteForce");
matches.clear();
descriptorMatcher->knnMatch(descORB1, descORB2, matches, 1, Mat());
vector<Point> p, q;
Point2f u;
int nb = 0;
double seuil = 0;
for (int i = 0; i<matches.size(); i++)
    seuil += matches[i][0].distance;

seuil = seuil / matches.size()*0.5;
double d = 0;
for (int i = 0; i<matches.size(); i++)
{

    if (matches[i][0].distance<seuil)
    {
        u += kOrb1[matches[i][0].queryIdx].pt - kOrb2[matches[i][0].trainIdx].pt;
        p.push_back(kOrb1[matches[i][0].queryIdx].pt);
        q.push_back(kOrb2[matches[i][0].trainIdx].pt);
        nb++;
        if (nb>1)
            d += norm(p[nb - 1] - p[nb - 2]) / norm(q[nb - 1] - q[nb - 2]);
    }
}
u = u d = d / nb;
(nb - 1);
Mat img;
resize(img2, img, Size(0, 0), 1, 1);
kOrb2.clear();
Mat descORB;
b->detectAndCompute(img, Mat(), kOrb2, descORB);
matches.clear();
descriptorMatcher->knnMatch(descORB1, descORB, matches, 1, Mat());
p.clear(); q.clear();
seuil = 0;
for (int i = 0; i<matches.size(); i++)
    seuil += matches[i][0].distance;
nb = 0;
seuil = seuil / matches.size()*0.5;
d = 0;
for (int i = 0; i<matches.size(); i++)
{

    if (matches[i][0].distance<seuil)
    {
        p.push_back(kOrb1[matches[i][0].queryIdx].pt);
        cout << "Avant "<<cv::norm(kOrb1[matches[i][0].queryIdx].pt - kOrb2[matches[i][0].trainIdx].pt) << "\t" ;
        kOrb2[matches[i][0].trainIdx].pt q.push_back(kOrb2[matches[i][0].trainIdx].pt);
        nb++;
        if (nb>1)
            d += u ;
        q.push_back(kOrb2[matches[i][0].trainIdx].pt);
        cout << "Apres "<<cv::norm(kOrb1[matches[i][0].queryIdx].pt - kOrb2[matches[i][0].trainIdx].pt) << "\t" << matches[i][0].distance << "\n";
norm(p[nb - 1] - p[nb - 2]) / norm(q[nb - 1] - q[nb - 2]);
    }
}
Mat_<double> v(3, 3);//=findHomography(p, q);// v = estimateRigidTransform(p, q, false);// q=v*p; img2=v*img1
   //Mat h=findHomography(p, q);// q=v*p; img2=v*img1
Mat_<double> h(3, 3);
h = Mat::eye(2, 3,CV_64F);
Mat id = Mat::eye(3, 3,CV_64F);
cout << h << endl;
v.at<double>(0, 0) = 1; v.at<double>(0, 1) = 0;
v.at<double>(1, 0) 3, CV_64F);
for (int i = 0; v.at<double>(1, 1) = 1;
v.at<double>(2, 0) i < v.rows; i++)
for (int j = 0; v.at<double>(2, 1) = 0;
v.at<double>(0, 2) = -u.x ;
v.at<double>(1, 2) = -u.y;
v.at<double>(2, 2) = 1;
i < v.cols; i++)
    h(j, i) = v(j, i);
cout << v << endl;
vector<Point> ref(4);
ref[0] = Point(0, 0);
ref[1] = Point(0, img2.rows);
ref[2] = Point(img2.cols, img2.rows);
ref[3] = Point(img2.cols, 0);
Mat_<double> pha(3, 1);
Mat_<double> phb(3, 1);
double minx = img1.cols, maxx = 0;
double miny = img1.rows, maxy = 0;

for (int i = 0; i < ref.size(); i++)
{
    pha(0, 0) = ref[i].x; pha(1, 0) = ref[i].y; pha(2, 0) = 1;
    phb = v*pha;
     cout << "Point ref" << i << "\t"<<phb<<"\n";
   if (phb(0, 0) <= minx)
        minx = phb(0, 0);
    if (phb(1, 0) <= miny)
        miny = phb(1, 0);
    if (phb(0, 0) >= maxx)
        maxx = phb(0, 0);
    if (phb(1, 0) >= maxy)
        maxy = phb(1, 0);

}
cout << minx << "\t" << miny << "\t" << maxx << "\t" << maxy << "\n";
Mat res(max(img1.rows Mat res1((img1.rows + int(maxy - miny), img2.rows), max(img1.cols (img1.cols + int(maxx - minx), img1.cols), img2.cols), img1.type());
if (miny < 0)
{
    warpPerspective(img2, res, v*h, Mat res2((img1.rows + img2.rows), (img1.cols + img2.cols), img1.type());
//    warpPerspective(img1, img1, id, Size(res.cols, res.rows),WARP_INVERSE_MAP);
    for (int i = 0; i<img1.rows; i++)
    {
        unsigned char *ptrSrc = img1.ptr(i);
        unsigned char *ptrDst = res.ptr(i);
        for (int j = 0; j<img1.cols; j++, ptrSrc++, ptrDst++)
            *ptrDst = *ptrSrc;
    }

}
else
{
    warpPerspective(img1, res, h*v, Size(res.cols, res.rows));
    for (int i = 0; i<img2.rows; i++)
    {
        unsigned char *ptrSrc = img2.ptr(i);
        unsigned char *ptrDst = res.ptr(i);
        for (int j = 0; j<img2.cols; j++, ptrSrc++, ptrDst++)
            *ptrDst = *ptrSrc;
    }

}

res.rows), WARP_INVERSE_MAP);
warpAffine(img, res2, v, res2.size(), WARP_INVERSE_MAP);
warpAffine(img1, res1, h,res1.size(), WARP_INVERSE_MAP);
imshow("Res", res);
max(res1,res2));
imshow("img1", img1);
imshow("img2", img2);
waitKey();
return 0;
}

}