Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

How to detect rectangle from image crop that rectangle and remove other background area ?

Hello all,

I am developing an application for detect driving license and capture image of driving license using surface view and detect driving license and crop from those it's four corner using openCV.

so right now i am using canny edge detection and find the edges but i am not able to crop the image because canny edge detection return black and white image i am crop my original license image from it's edges.

Here is my code snippet

public Bitmap findEdges(Bitmap img)
{

    Mat rgba = new Mat();
    Utils.bitmapToMat(img, rgba);


    Mat edges = new Mat(rgba.size(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgba, edges, Imgproc.COLOR_RGB2GRAY, 4);
    Imgproc.Canny(edges, edges, 40, 40);

    Bitmap resultBitmap = Bitmap.createBitmap(edges.cols(), edges.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(edges, resultBitmap);

    Mat rgbMat = new Mat();
    Mat grayMat = new Mat();
    Mat cannyMat;
    Mat linesMat = new Mat();
    BitmapFactory.Options o = new BitmapFactory.Options();

    // define the destination image size: A4 - 200 PPI
    int w_a4 = 1654, h_a4 = 2339;

    // TODO: 29/08/2016  May need to check sample size https://developer.android.com/training/displaying-bitmaps/load-bitmap.html
    o.inSampleSize = 4;
    o.inDither = false;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    resultBitmap.compress(Bitmap.CompressFormat.PNG, 75, baos);
    byte[] b = baos.toByteArray();


    resultBitmap = BitmapFactory.decodeByteArray(b, 0, b.length);

    int w = resultBitmap.getWidth();
    int h = resultBitmap.getHeight();
    int min_w = 800;
    double scale = Math.min(10.0, w * 1.0 / min_w);
    int w_proc = (int) (w * 1.0 / scale);
    int h_proc = (int) (h * 1.0 / scale);
    Bitmap srcBitmap = Bitmap.createScaledBitmap(resultBitmap, w_proc, h_proc, false);
    Bitmap grayBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap cannyBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap linesBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);


    Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B.


    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);//rgbMat to gray grayMat


    cannyMat = getCanny(grayMat);


    Imgproc.HoughLinesP(cannyMat, linesMat, 1, Math.PI / 180, w_proc / 12, w_proc / 12, 20);

    // Calculate horizontal lines and vertical lines
    Log.e("opencv", "lines.cols " + linesMat.cols() + " w_proc/3: " + w_proc / 3);
    Log.e("opencv", "lines.rows" + linesMat.rows() + " w_proc/3: " + w_proc / 3);

    List<EdgesLine> horizontals = new ArrayList<>();
    List<EdgesLine> verticals = new ArrayList<>();
    for (int x = 0; x < linesMat.rows(); x++) {
        double[] vec = linesMat.get(x, 0);
        double x1 = vec[0],
                y1 = vec[1],
                x2 = vec[2],
                y2 = vec[3];
        Point start = new Point(x1, y1);
        Point end = new Point(x2, y2);

        EdgesLine line = new EdgesLine(start, end);
        if (Math.abs(x1 - x2) > Math.abs(y1 - y2)) {
            horizontals.add(line);
        } else {
            verticals.add(line);
        }

        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("HoughLines", "completed HoughLines");
    Log.e("HoughLines", "linesMat size: " + linesMat.size());
    Log.e("HoughLines", "linesBitmap size: " + Integer.toString(linesBitmap.getHeight()) + " x " + Integer.toString(linesBitmap.getWidth()));
    Log.e("Lines Detected", Integer.toString(linesMat.rows()));

    if (linesMat.rows() > 400) {
        Context context = getApplicationContext();
        int duration = Toast.LENGTH_SHORT;
        Toast toast = Toast.makeText(context, "Please use a cleaner background", duration);
        toast.show();
    }

    if (horizontals.size() < 2) {
        if (horizontals.size() == 0 || horizontals.get(0)._center.y > h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, 0), new Point(w_proc - 1, 0)));
        }
        if (horizontals.size() == 0 || horizontals.get(0)._center.y <= h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, h_proc - 1), new Point(w_proc - 1, h_proc - 1)));
        }
    }
    if (verticals.size() < 2) {
        if (verticals.size() == 0 || verticals.get(0)._center.x > w_proc / 2) {
            verticals.add(new EdgesLine(new Point(0, 0), new Point(h_proc - 1, 0)));
        }
        if (verticals.size() == 0 || verticals.get(0)._center.x <= w_proc / 2) {
            verticals.add(new EdgesLine(new Point(w_proc - 1, 0), new Point(w_proc - 1, h_proc - 1)));
        }
    }

    Collections.sort(horizontals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.y - rhs._center.y);
        }
    });

    Collections.sort(verticals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.x - rhs._center.x);
        }
    });

    if (BuildConfig.DEBUG) {
    }

    // compute intersections
    List<Point> intersections = new ArrayList<>();
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(verticals.size() - 1)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(verticals.size() - 1)));

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    for (Point point : intersections) {
        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    double w1 = Math.sqrt(Math.pow(intersections.get(3).x - intersections.get(2).x, 2) + Math.pow(intersections.get(3).x - intersections.get(2).x, 2));
    double w2 = Math.sqrt(Math.pow(intersections.get(1).x - intersections.get(0).x, 2) + Math.pow(intersections.get(1).x - intersections.get(0).x, 2));
    double h1 = Math.sqrt(Math.pow(intersections.get(1).y - intersections.get(3).y, 2) + Math.pow(intersections.get(1).y - intersections.get(3).y, 2));
    double h2 = Math.sqrt(Math.pow(intersections.get(0).y - intersections.get(2).y, 2) + Math.pow(intersections.get(0).y - intersections.get(2).y, 2));

    double maxWidth = (w1 < w2) ? w1 : w2;
    double maxHeight = (h1 < h2) ? h1 : h2;

    Mat srcMat = new Mat(4, 1, CvType.CV_32FC2);
    srcMat.put(0, 0, intersections.get(0).x, intersections.get(0).y, intersections.get(1).x, intersections.get(1).y, intersections.get(2).x, intersections.get(2).y, intersections.get(3).x, intersections.get(3).y);

    Mat dstMat = new Mat(4, 1, CvType.CV_32FC2);
    dstMat.put(0, 0, 0.0, 0.0, maxWidth - 1, 0.0, 0.0, maxHeight - 1, maxWidth - 1, maxHeight - 1);

    Log.e("FinalDisplay", "srcMat: " + srcMat.size());
    Log.e("FinalDisplay", "dstMat: " + dstMat.size());

    Mat transformMatrix = Imgproc.getPerspectiveTransform(srcMat, dstMat);

    finalMat = Mat.zeros((int) maxHeight, (int) maxWidth, CvType.CV_32FC2);
    Imgproc.warpPerspective(rgbMat, finalMat, transformMatrix, finalMat.size());
    Log.e("FinalDisplay", "finalMat: " + finalMat.size());

    // display final results
    Bitmap dstBitmap = Bitmap.createBitmap(finalMat.width(), finalMat.height(), Bitmap.Config.RGB_565);
    Log.e("FinalDisplay", "dstBitmap: " + img.getWidth() + " x " + img.getHeight());
    Utils.matToBitmap(finalMat, dstBitmap); //convert mat to bitmap
        try {

        Bitmap crop = Bitmap.createBitmap(rotateandscalebitmap, 0, 0, dstBitmap.getWidth(), dstBitmap.getHeight());
         croppedbitmap = crop;
        doRecognize(croppedbitmap);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return croppedbitmap;
}


protected Mat getCanny(Mat gray) {
    Mat threshold = new Mat();
    Mat canny = new Mat();
    // last paramter 8 is using OTSU algorithm
    double high_threshold = Imgproc.threshold(gray, threshold, 0, 255, 8);
    double low_threshold = high_threshold * 0.5;
    Imgproc.Canny(gray, canny, low_threshold, high_threshold);
    return canny;
}

protected Point computeIntersection(EdgesLine l1, EdgesLine l2) {
    double x1 = l1._p1.x, x2 = l1._p2.x, y1 = l1._p1.y, y2 = l1._p2.y;
    double x3 = l2._p1.x, x4 = l2._p2.x, y3 = l2._p1.y, y4 = l2._p2.y;
    double d = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4);
    Point pt = new Point();
    pt.x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / d;
    pt.y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / d;
    return pt;
}

class EdgesLine {
    Point _p1;
    Point _p2;
    Point _center;

    EdgesLine(Point p1, Point p2) {
        _p1 = p1;
        _p2 = p2;
        _center = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
    }
}

How to detect rectangle from image crop that rectangle and remove other background area ?

Hello all,

I am developing an application for detect driving license and capture image of driving license using surface view and detect driving license and crop from those it's four corner using openCV.

so right now i am using canny edge detection and find the edges but i am not able to crop the image because canny edge detection return black and white image i am crop my original license image from it's edges.

Please suggest any best solution.

Here is my code snippet

public Bitmap findEdges(Bitmap img)
{

    Mat rgba = new Mat();
    Utils.bitmapToMat(img, rgba);


    Mat edges = new Mat(rgba.size(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgba, edges, Imgproc.COLOR_RGB2GRAY, 4);
    Imgproc.Canny(edges, edges, 40, 40);

    Bitmap resultBitmap = Bitmap.createBitmap(edges.cols(), edges.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(edges, resultBitmap);

    Mat rgbMat = new Mat();
    Mat grayMat = new Mat();
    Mat cannyMat;
    Mat linesMat = new Mat();
    BitmapFactory.Options o = new BitmapFactory.Options();

    // define the destination image size: A4 - 200 PPI
    int w_a4 = 1654, h_a4 = 2339;

    // TODO: 29/08/2016  May need to check sample size https://developer.android.com/training/displaying-bitmaps/load-bitmap.html
    o.inSampleSize = 4;
    o.inDither = false;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    resultBitmap.compress(Bitmap.CompressFormat.PNG, 75, baos);
    byte[] b = baos.toByteArray();


    resultBitmap = BitmapFactory.decodeByteArray(b, 0, b.length);

    int w = resultBitmap.getWidth();
    int h = resultBitmap.getHeight();
    int min_w = 800;
    double scale = Math.min(10.0, w * 1.0 / min_w);
    int w_proc = (int) (w * 1.0 / scale);
    int h_proc = (int) (h * 1.0 / scale);
    Bitmap srcBitmap = Bitmap.createScaledBitmap(resultBitmap, w_proc, h_proc, false);
    Bitmap grayBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap cannyBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap linesBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);


    Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B.


    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);//rgbMat to gray grayMat


    cannyMat = getCanny(grayMat);


    Imgproc.HoughLinesP(cannyMat, linesMat, 1, Math.PI / 180, w_proc / 12, w_proc / 12, 20);

    // Calculate horizontal lines and vertical lines
    Log.e("opencv", "lines.cols " + linesMat.cols() + " w_proc/3: " + w_proc / 3);
    Log.e("opencv", "lines.rows" + linesMat.rows() + " w_proc/3: " + w_proc / 3);

    List<EdgesLine> horizontals = new ArrayList<>();
    List<EdgesLine> verticals = new ArrayList<>();
    for (int x = 0; x < linesMat.rows(); x++) {
        double[] vec = linesMat.get(x, 0);
        double x1 = vec[0],
                y1 = vec[1],
                x2 = vec[2],
                y2 = vec[3];
        Point start = new Point(x1, y1);
        Point end = new Point(x2, y2);

        EdgesLine line = new EdgesLine(start, end);
        if (Math.abs(x1 - x2) > Math.abs(y1 - y2)) {
            horizontals.add(line);
        } else {
            verticals.add(line);
        }

        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("HoughLines", "completed HoughLines");
    Log.e("HoughLines", "linesMat size: " + linesMat.size());
    Log.e("HoughLines", "linesBitmap size: " + Integer.toString(linesBitmap.getHeight()) + " x " + Integer.toString(linesBitmap.getWidth()));
    Log.e("Lines Detected", Integer.toString(linesMat.rows()));

    if (linesMat.rows() > 400) {
        Context context = getApplicationContext();
        int duration = Toast.LENGTH_SHORT;
        Toast toast = Toast.makeText(context, "Please use a cleaner background", duration);
        toast.show();
    }

    if (horizontals.size() < 2) {
        if (horizontals.size() == 0 || horizontals.get(0)._center.y > h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, 0), new Point(w_proc - 1, 0)));
        }
        if (horizontals.size() == 0 || horizontals.get(0)._center.y <= h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, h_proc - 1), new Point(w_proc - 1, h_proc - 1)));
        }
    }
    if (verticals.size() < 2) {
        if (verticals.size() == 0 || verticals.get(0)._center.x > w_proc / 2) {
            verticals.add(new EdgesLine(new Point(0, 0), new Point(h_proc - 1, 0)));
        }
        if (verticals.size() == 0 || verticals.get(0)._center.x <= w_proc / 2) {
            verticals.add(new EdgesLine(new Point(w_proc - 1, 0), new Point(w_proc - 1, h_proc - 1)));
        }
    }

    Collections.sort(horizontals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.y - rhs._center.y);
        }
    });

    Collections.sort(verticals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.x - rhs._center.x);
        }
    });

    if (BuildConfig.DEBUG) {
    }

    // compute intersections
    List<Point> intersections = new ArrayList<>();
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(verticals.size() - 1)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(verticals.size() - 1)));

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    for (Point point : intersections) {
        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    double w1 = Math.sqrt(Math.pow(intersections.get(3).x - intersections.get(2).x, 2) + Math.pow(intersections.get(3).x - intersections.get(2).x, 2));
    double w2 = Math.sqrt(Math.pow(intersections.get(1).x - intersections.get(0).x, 2) + Math.pow(intersections.get(1).x - intersections.get(0).x, 2));
    double h1 = Math.sqrt(Math.pow(intersections.get(1).y - intersections.get(3).y, 2) + Math.pow(intersections.get(1).y - intersections.get(3).y, 2));
    double h2 = Math.sqrt(Math.pow(intersections.get(0).y - intersections.get(2).y, 2) + Math.pow(intersections.get(0).y - intersections.get(2).y, 2));

    double maxWidth = (w1 < w2) ? w1 : w2;
    double maxHeight = (h1 < h2) ? h1 : h2;

    Mat srcMat = new Mat(4, 1, CvType.CV_32FC2);
    srcMat.put(0, 0, intersections.get(0).x, intersections.get(0).y, intersections.get(1).x, intersections.get(1).y, intersections.get(2).x, intersections.get(2).y, intersections.get(3).x, intersections.get(3).y);

    Mat dstMat = new Mat(4, 1, CvType.CV_32FC2);
    dstMat.put(0, 0, 0.0, 0.0, maxWidth - 1, 0.0, 0.0, maxHeight - 1, maxWidth - 1, maxHeight - 1);

    Log.e("FinalDisplay", "srcMat: " + srcMat.size());
    Log.e("FinalDisplay", "dstMat: " + dstMat.size());

    Mat transformMatrix = Imgproc.getPerspectiveTransform(srcMat, dstMat);

    finalMat = Mat.zeros((int) maxHeight, (int) maxWidth, CvType.CV_32FC2);
    Imgproc.warpPerspective(rgbMat, finalMat, transformMatrix, finalMat.size());
    Log.e("FinalDisplay", "finalMat: " + finalMat.size());

    // display final results
    Bitmap dstBitmap = Bitmap.createBitmap(finalMat.width(), finalMat.height(), Bitmap.Config.RGB_565);
    Log.e("FinalDisplay", "dstBitmap: " + img.getWidth() + " x " + img.getHeight());
    Utils.matToBitmap(finalMat, dstBitmap); //convert mat to bitmap
        try {

        Bitmap crop = Bitmap.createBitmap(rotateandscalebitmap, 0, 0, dstBitmap.getWidth(), dstBitmap.getHeight());
         croppedbitmap = crop;
        doRecognize(croppedbitmap);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return croppedbitmap;
}


protected Mat getCanny(Mat gray) {
    Mat threshold = new Mat();
    Mat canny = new Mat();
    // last paramter 8 is using OTSU algorithm
    double high_threshold = Imgproc.threshold(gray, threshold, 0, 255, 8);
    double low_threshold = high_threshold * 0.5;
    Imgproc.Canny(gray, canny, low_threshold, high_threshold);
    return canny;
}

protected Point computeIntersection(EdgesLine l1, EdgesLine l2) {
    double x1 = l1._p1.x, x2 = l1._p2.x, y1 = l1._p1.y, y2 = l1._p2.y;
    double x3 = l2._p1.x, x4 = l2._p2.x, y3 = l2._p1.y, y4 = l2._p2.y;
    double d = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4);
    Point pt = new Point();
    pt.x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / d;
    pt.y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / d;
    return pt;
}

class EdgesLine {
    Point _p1;
    Point _p2;
    Point _center;

    EdgesLine(Point p1, Point p2) {
        _p1 = p1;
        _p2 = p2;
        _center = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
    }
}

How to detect rectangle from image crop that rectangle and remove other background area ?

Hello all,

I am developing an application for detect driving license and capture image of driving license using surface view and detect driving license and crop from those it's four corner using openCV.

so right now i am using canny edge detection and find the edges but i am not able to crop the image because canny edge detection return black and white image i am crop my original license image from it's edges.

Please suggest any best solution.

Here is my code snippet

public Bitmap findEdges(Bitmap img)
{

    Mat rgba = new Mat();
    Utils.bitmapToMat(img, rgba);


    Mat edges = new Mat(rgba.size(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgba, edges, Imgproc.COLOR_RGB2GRAY, 4);
    Imgproc.Canny(edges, edges, 40, 40);

    Bitmap resultBitmap = Bitmap.createBitmap(edges.cols(), edges.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(edges, resultBitmap);

    Mat rgbMat = new Mat();
    Mat grayMat = new Mat();
    Mat cannyMat;
    Mat linesMat = new Mat();
    BitmapFactory.Options o = new BitmapFactory.Options();

    // define the destination image size: A4 - 200 PPI
    int w_a4 = 1654, h_a4 = 2339;

    // TODO: 29/08/2016  May need to check sample size https://developer.android.com/training/displaying-bitmaps/load-bitmap.html
    o.inSampleSize = 4;
    o.inDither = false;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    resultBitmap.compress(Bitmap.CompressFormat.PNG, 75, baos);
    byte[] b = baos.toByteArray();


    resultBitmap = BitmapFactory.decodeByteArray(b, 0, b.length);

    int w = resultBitmap.getWidth();
    int h = resultBitmap.getHeight();
    int min_w = 800;
    double scale = Math.min(10.0, w * 1.0 / min_w);
    int w_proc = (int) (w * 1.0 / scale);
    int h_proc = (int) (h * 1.0 / scale);
    Bitmap srcBitmap = Bitmap.createScaledBitmap(resultBitmap, w_proc, h_proc, false);
    Bitmap grayBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap cannyBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap linesBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);


    Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B.


    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);//rgbMat to gray grayMat


    cannyMat = getCanny(grayMat);


    Imgproc.HoughLinesP(cannyMat, linesMat, 1, Math.PI / 180, w_proc / 12, w_proc / 12, 20);

    // Calculate horizontal lines and vertical lines
    Log.e("opencv", "lines.cols " + linesMat.cols() + " w_proc/3: " + w_proc / 3);
    Log.e("opencv", "lines.rows" + linesMat.rows() + " w_proc/3: " + w_proc / 3);

    List<EdgesLine> horizontals = new ArrayList<>();
    List<EdgesLine> verticals = new ArrayList<>();
    for (int x = 0; x < linesMat.rows(); x++) {
        double[] vec = linesMat.get(x, 0);
        double x1 = vec[0],
                y1 = vec[1],
                x2 = vec[2],
                y2 = vec[3];
        Point start = new Point(x1, y1);
        Point end = new Point(x2, y2);

        EdgesLine line = new EdgesLine(start, end);
        if (Math.abs(x1 - x2) > Math.abs(y1 - y2)) {
            horizontals.add(line);
        } else {
            verticals.add(line);
        }

        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("HoughLines", "completed HoughLines");
    Log.e("HoughLines", "linesMat size: " + linesMat.size());
    Log.e("HoughLines", "linesBitmap size: " + Integer.toString(linesBitmap.getHeight()) + " x " + Integer.toString(linesBitmap.getWidth()));
    Log.e("Lines Detected", Integer.toString(linesMat.rows()));

    if (linesMat.rows() > 400) {
        Context context = getApplicationContext();
        int duration = Toast.LENGTH_SHORT;
        Toast toast = Toast.makeText(context, "Please use a cleaner background", duration);
        toast.show();
    }

    if (horizontals.size() < 2) {
        if (horizontals.size() == 0 || horizontals.get(0)._center.y > h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, 0), new Point(w_proc - 1, 0)));
        }
        if (horizontals.size() == 0 || horizontals.get(0)._center.y <= h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, h_proc - 1), new Point(w_proc - 1, h_proc - 1)));
        }
    }
    if (verticals.size() < 2) {
        if (verticals.size() == 0 || verticals.get(0)._center.x > w_proc / 2) {
            verticals.add(new EdgesLine(new Point(0, 0), new Point(h_proc - 1, 0)));
        }
        if (verticals.size() == 0 || verticals.get(0)._center.x <= w_proc / 2) {
            verticals.add(new EdgesLine(new Point(w_proc - 1, 0), new Point(w_proc - 1, h_proc - 1)));
        }
    }

    Collections.sort(horizontals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.y - rhs._center.y);
        }
    });

    Collections.sort(verticals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.x - rhs._center.x);
        }
    });

    if (BuildConfig.DEBUG) {
    }

    // compute intersections
    List<Point> intersections = new ArrayList<>();
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(verticals.size() - 1)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(verticals.size() - 1)));

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    for (Point point : intersections) {
        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    double w1 = Math.sqrt(Math.pow(intersections.get(3).x - intersections.get(2).x, 2) + Math.pow(intersections.get(3).x - intersections.get(2).x, 2));
    double w2 = Math.sqrt(Math.pow(intersections.get(1).x - intersections.get(0).x, 2) + Math.pow(intersections.get(1).x - intersections.get(0).x, 2));
    double h1 = Math.sqrt(Math.pow(intersections.get(1).y - intersections.get(3).y, 2) + Math.pow(intersections.get(1).y - intersections.get(3).y, 2));
    double h2 = Math.sqrt(Math.pow(intersections.get(0).y - intersections.get(2).y, 2) + Math.pow(intersections.get(0).y - intersections.get(2).y, 2));

    double maxWidth = (w1 < w2) ? w1 : w2;
    double maxHeight = (h1 < h2) ? h1 : h2;

    Mat srcMat = new Mat(4, 1, CvType.CV_32FC2);
    srcMat.put(0, 0, intersections.get(0).x, intersections.get(0).y, intersections.get(1).x, intersections.get(1).y, intersections.get(2).x, intersections.get(2).y, intersections.get(3).x, intersections.get(3).y);

    Mat dstMat = new Mat(4, 1, CvType.CV_32FC2);
    dstMat.put(0, 0, 0.0, 0.0, maxWidth - 1, 0.0, 0.0, maxHeight - 1, maxWidth - 1, maxHeight - 1);

    Log.e("FinalDisplay", "srcMat: " + srcMat.size());
    Log.e("FinalDisplay", "dstMat: " + dstMat.size());

    Mat transformMatrix = Imgproc.getPerspectiveTransform(srcMat, dstMat);

    finalMat = Mat.zeros((int) maxHeight, (int) maxWidth, CvType.CV_32FC2);
    Imgproc.warpPerspective(rgbMat, finalMat, transformMatrix, finalMat.size());
    Log.e("FinalDisplay", "finalMat: " + finalMat.size());

    // display final results
    Bitmap dstBitmap = Bitmap.createBitmap(finalMat.width(), finalMat.height(), Bitmap.Config.RGB_565);
    Log.e("FinalDisplay", "dstBitmap: " + img.getWidth() + " x " + img.getHeight());
    Utils.matToBitmap(finalMat, dstBitmap); //convert mat to bitmap
        try {

        Bitmap crop = Bitmap.createBitmap(rotateandscalebitmap, 0, 0, dstBitmap.getWidth(), dstBitmap.getHeight());
         croppedbitmap = crop;
        doRecognize(croppedbitmap);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return croppedbitmap;
}


protected Mat getCanny(Mat gray) {
    Mat threshold = new Mat();
    Mat canny = new Mat();
    // last paramter 8 is using OTSU algorithm
    double high_threshold = Imgproc.threshold(gray, threshold, 0, 255, 8);
    double low_threshold = high_threshold * 0.5;
    Imgproc.Canny(gray, canny, low_threshold, high_threshold);
    return canny;
}

protected Point computeIntersection(EdgesLine l1, EdgesLine l2) {
    double x1 = l1._p1.x, x2 = l1._p2.x, y1 = l1._p1.y, y2 = l1._p2.y;
    double x3 = l2._p1.x, x4 = l2._p2.x, y3 = l2._p1.y, y4 = l2._p2.y;
    double d = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4);
    Point pt = new Point();
    pt.x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / d;
    pt.y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / d;
    return pt;
}

class EdgesLine {
    Point _p1;
    Point _p2;
    Point _center;

    EdgesLine(Point p1, Point p2) {
        _p1 = p1;
        _p2 = p2;
        _center = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
    }
}

How to detect rectangle from image crop that rectangle and remove other background area ?

Hello all,

I am developing an application for detect driving license and capture image of driving license using surface view and detect driving license and crop from those it's four corner using openCV.

so right now i am using canny edge detection and find the edges but i am not able to crop the image because canny edge detection return black and white image i am crop my original license image from it's edges.

Please suggest any best solution.

Here is my code snippet

public Bitmap findEdges(Bitmap img)
{

    Mat rgba = new Mat();
    Utils.bitmapToMat(img, rgba);


    Mat edges = new Mat(rgba.size(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgba, edges, Imgproc.COLOR_RGB2GRAY, 4);
    Imgproc.Canny(edges, edges, 40, 40);

    Bitmap resultBitmap = Bitmap.createBitmap(edges.cols(), edges.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(edges, resultBitmap);

    Mat rgbMat = new Mat();
    Mat grayMat = new Mat();
    Mat cannyMat;
    Mat linesMat = new Mat();
    BitmapFactory.Options o = new BitmapFactory.Options();

    // define the destination image size: A4 - 200 PPI
    int w_a4 = 1654, h_a4 = 2339;

    // TODO: 29/08/2016  May need to check sample size https://developer.android.com/training/displaying-bitmaps/load-bitmap.html
    o.inSampleSize = 4;
    o.inDither = false;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    resultBitmap.compress(Bitmap.CompressFormat.PNG, 75, baos);
    byte[] b = baos.toByteArray();


    resultBitmap = BitmapFactory.decodeByteArray(b, 0, b.length);

    int w = resultBitmap.getWidth();
    int h = resultBitmap.getHeight();
    int min_w = 800;
    double scale = Math.min(10.0, w * 1.0 / min_w);
    int w_proc = (int) (w * 1.0 / scale);
    int h_proc = (int) (h * 1.0 / scale);
    Bitmap srcBitmap = Bitmap.createScaledBitmap(resultBitmap, w_proc, h_proc, false);
    Bitmap grayBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap cannyBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap linesBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);


    Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B.


    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);//rgbMat to gray grayMat


    cannyMat = getCanny(grayMat);


    Imgproc.HoughLinesP(cannyMat, linesMat, 1, Math.PI / 180, w_proc / 12, w_proc / 12, 20);

    // Calculate horizontal lines and vertical lines
    Log.e("opencv", "lines.cols " + linesMat.cols() + " w_proc/3: " + w_proc / 3);
    Log.e("opencv", "lines.rows" + linesMat.rows() + " w_proc/3: " + w_proc / 3);

    List<EdgesLine> horizontals = new ArrayList<>();
    List<EdgesLine> verticals = new ArrayList<>();
    for (int x = 0; x < linesMat.rows(); x++) {
        double[] vec = linesMat.get(x, 0);
        double x1 = vec[0],
                y1 = vec[1],
                x2 = vec[2],
                y2 = vec[3];
        Point start = new Point(x1, y1);
        Point end = new Point(x2, y2);

        EdgesLine line = new EdgesLine(start, end);
        if (Math.abs(x1 - x2) > Math.abs(y1 - y2)) {
            horizontals.add(line);
        } else {
            verticals.add(line);
        }

        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("HoughLines", "completed HoughLines");
    Log.e("HoughLines", "linesMat size: " + linesMat.size());
    Log.e("HoughLines", "linesBitmap size: " + Integer.toString(linesBitmap.getHeight()) + " x " + Integer.toString(linesBitmap.getWidth()));
    Log.e("Lines Detected", Integer.toString(linesMat.rows()));

    if (linesMat.rows() > 400) {
        Context context = getApplicationContext();
        int duration = Toast.LENGTH_SHORT;
        Toast toast = Toast.makeText(context, "Please use a cleaner background", duration);
        toast.show();
    }

    if (horizontals.size() < 2) {
        if (horizontals.size() == 0 || horizontals.get(0)._center.y > h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, 0), new Point(w_proc - 1, 0)));
        }
        if (horizontals.size() == 0 || horizontals.get(0)._center.y <= h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, h_proc - 1), new Point(w_proc - 1, h_proc - 1)));
        }
    }
    if (verticals.size() < 2) {
        if (verticals.size() == 0 || verticals.get(0)._center.x > w_proc / 2) {
            verticals.add(new EdgesLine(new Point(0, 0), new Point(h_proc - 1, 0)));
        }
        if (verticals.size() == 0 || verticals.get(0)._center.x <= w_proc / 2) {
            verticals.add(new EdgesLine(new Point(w_proc - 1, 0), new Point(w_proc - 1, h_proc - 1)));
        }
    }

    Collections.sort(horizontals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.y - rhs._center.y);
        }
    });

    Collections.sort(verticals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.x - rhs._center.x);
        }
    });

    if (BuildConfig.DEBUG) {
    }

    // compute intersections
    List<Point> intersections = new ArrayList<>();
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(verticals.size() - 1)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(verticals.size() - 1)));

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    for (Point point : intersections) {
        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    double w1 = Math.sqrt(Math.pow(intersections.get(3).x - intersections.get(2).x, 2) + Math.pow(intersections.get(3).x - intersections.get(2).x, 2));
    double w2 = Math.sqrt(Math.pow(intersections.get(1).x - intersections.get(0).x, 2) + Math.pow(intersections.get(1).x - intersections.get(0).x, 2));
    double h1 = Math.sqrt(Math.pow(intersections.get(1).y - intersections.get(3).y, 2) + Math.pow(intersections.get(1).y - intersections.get(3).y, 2));
    double h2 = Math.sqrt(Math.pow(intersections.get(0).y - intersections.get(2).y, 2) + Math.pow(intersections.get(0).y - intersections.get(2).y, 2));

    double maxWidth = (w1 < w2) ? w1 : w2;
    double maxHeight = (h1 < h2) ? h1 : h2;

    Mat srcMat = new Mat(4, 1, CvType.CV_32FC2);
    srcMat.put(0, 0, intersections.get(0).x, intersections.get(0).y, intersections.get(1).x, intersections.get(1).y, intersections.get(2).x, intersections.get(2).y, intersections.get(3).x, intersections.get(3).y);

    Mat dstMat = new Mat(4, 1, CvType.CV_32FC2);
    dstMat.put(0, 0, 0.0, 0.0, maxWidth - 1, 0.0, 0.0, maxHeight - 1, maxWidth - 1, maxHeight - 1);

    Log.e("FinalDisplay", "srcMat: " + srcMat.size());
    Log.e("FinalDisplay", "dstMat: " + dstMat.size());

    Mat transformMatrix = Imgproc.getPerspectiveTransform(srcMat, dstMat);

    finalMat = Mat.zeros((int) maxHeight, (int) maxWidth, CvType.CV_32FC2);
    Imgproc.warpPerspective(rgbMat, finalMat, transformMatrix, finalMat.size());
    Log.e("FinalDisplay", "finalMat: " + finalMat.size());

    // display final results
    Bitmap dstBitmap = Bitmap.createBitmap(finalMat.width(), finalMat.height(), Bitmap.Config.RGB_565);
    Log.e("FinalDisplay", "dstBitmap: " + img.getWidth() + " x " + img.getHeight());
    Utils.matToBitmap(finalMat, dstBitmap); //convert mat to bitmap
        try {

        Bitmap crop = Bitmap.createBitmap(rotateandscalebitmap, 0, 0, dstBitmap.getWidth(), dstBitmap.getHeight());
         croppedbitmap = crop;
        doRecognize(croppedbitmap);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return croppedbitmap;
}


protected Mat getCanny(Mat gray) {
    Mat threshold = new Mat();
    Mat canny = new Mat();
    // last paramter 8 is using OTSU algorithm
    double high_threshold = Imgproc.threshold(gray, threshold, 0, 255, 8);
    double low_threshold = high_threshold * 0.5;
    Imgproc.Canny(gray, canny, low_threshold, high_threshold);
    return canny;
}

protected Point computeIntersection(EdgesLine l1, EdgesLine l2) {
    double x1 = l1._p1.x, x2 = l1._p2.x, y1 = l1._p1.y, y2 = l1._p2.y;
    double x3 = l2._p1.x, x4 = l2._p2.x, y3 = l2._p1.y, y4 = l2._p2.y;
    double d = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4);
    Point pt = new Point();
    pt.x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / d;
    pt.y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / d;
    return pt;
}

class EdgesLine {
    Point _p1;
    Point _p2;
    Point _center;

    EdgesLine(Point p1, Point p2) {
        _p1 = p1;
        _p2 = p2;
        _center = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
    }
}

How to detect rectangle from image crop that rectangle and remove other background area ?

Hello all,

I am developing an application for detect driving license and capture image of driving license using surface view and detect driving license and crop from those it's four corner using openCV.

so right now i am using canny edge detection and find the edges but i am not able to crop the image because canny edge detection return black and white image i am crop my original license image from it's edges.

Please suggest any best solution.

Here is my code snippet

public Bitmap findEdges(Bitmap img)
{

    Mat rgba = new Mat();
    Utils.bitmapToMat(img, rgba);


    Mat edges = new Mat(rgba.size(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgba, edges, Imgproc.COLOR_RGB2GRAY, 4);
    Imgproc.Canny(edges, edges, 40, 40);

    Bitmap resultBitmap = Bitmap.createBitmap(edges.cols(), edges.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(edges, resultBitmap);

    Mat rgbMat = new Mat();
    Mat grayMat = new Mat();
    Mat cannyMat;
    Mat linesMat = new Mat();
    BitmapFactory.Options o = new BitmapFactory.Options();

    // define the destination image size: A4 - 200 PPI
    int w_a4 = 1654, h_a4 = 2339;

    // TODO: 29/08/2016  May need to check sample size https://developer.android.com/training/displaying-bitmaps/load-bitmap.html
    o.inSampleSize = 4;
    o.inDither = false;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    resultBitmap.compress(Bitmap.CompressFormat.PNG, 75, baos);
    byte[] b = baos.toByteArray();


    resultBitmap = BitmapFactory.decodeByteArray(b, 0, b.length);

    int w = resultBitmap.getWidth();
    int h = resultBitmap.getHeight();
    int min_w = 800;
    double scale = Math.min(10.0, w * 1.0 / min_w);
    int w_proc = (int) (w * 1.0 / scale);
    int h_proc = (int) (h * 1.0 / scale);
    Bitmap srcBitmap = Bitmap.createScaledBitmap(resultBitmap, w_proc, h_proc, false);
    Bitmap grayBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap cannyBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap linesBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);


    Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B.


    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);//rgbMat to gray grayMat


    cannyMat = getCanny(grayMat);


    Imgproc.HoughLinesP(cannyMat, linesMat, 1, Math.PI / 180, w_proc / 12, w_proc / 12, 20);

    // Calculate horizontal lines and vertical lines
    Log.e("opencv", "lines.cols " + linesMat.cols() + " w_proc/3: " + w_proc / 3);
    Log.e("opencv", "lines.rows" + linesMat.rows() + " w_proc/3: " + w_proc / 3);

    List<EdgesLine> horizontals = new ArrayList<>();
    List<EdgesLine> verticals = new ArrayList<>();
    for (int x = 0; x < linesMat.rows(); x++) {
        double[] vec = linesMat.get(x, 0);
        double x1 = vec[0],
                y1 = vec[1],
                x2 = vec[2],
                y2 = vec[3];
        Point start = new Point(x1, y1);
        Point end = new Point(x2, y2);

        EdgesLine line = new EdgesLine(start, end);
        if (Math.abs(x1 - x2) > Math.abs(y1 - y2)) {
            horizontals.add(line);
        } else {
            verticals.add(line);
        }

        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("HoughLines", "completed HoughLines");
    Log.e("HoughLines", "linesMat size: " + linesMat.size());
    Log.e("HoughLines", "linesBitmap size: " + Integer.toString(linesBitmap.getHeight()) + " x " + Integer.toString(linesBitmap.getWidth()));
    Log.e("Lines Detected", Integer.toString(linesMat.rows()));

    if (linesMat.rows() > 400) {
        Context context = getApplicationContext();
        int duration = Toast.LENGTH_SHORT;
        Toast toast = Toast.makeText(context, "Please use a cleaner background", duration);
        toast.show();
    }

    if (horizontals.size() < 2) {
        if (horizontals.size() == 0 || horizontals.get(0)._center.y > h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, 0), new Point(w_proc - 1, 0)));
        }
        if (horizontals.size() == 0 || horizontals.get(0)._center.y <= h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, h_proc - 1), new Point(w_proc - 1, h_proc - 1)));
        }
    }
    if (verticals.size() < 2) {
        if (verticals.size() == 0 || verticals.get(0)._center.x > w_proc / 2) {
            verticals.add(new EdgesLine(new Point(0, 0), new Point(h_proc - 1, 0)));
        }
        if (verticals.size() == 0 || verticals.get(0)._center.x <= w_proc / 2) {
            verticals.add(new EdgesLine(new Point(w_proc - 1, 0), new Point(w_proc - 1, h_proc - 1)));
        }
    }

    Collections.sort(horizontals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.y - rhs._center.y);
        }
    });

    Collections.sort(verticals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.x - rhs._center.x);
        }
    });

    if (BuildConfig.DEBUG) {
    }

    // compute intersections
    List<Point> intersections = new ArrayList<>();
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(verticals.size() - 1)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(verticals.size() - 1)));

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    for (Point point : intersections) {
        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    double w1 = Math.sqrt(Math.pow(intersections.get(3).x - intersections.get(2).x, 2) + Math.pow(intersections.get(3).x - intersections.get(2).x, 2));
    double w2 = Math.sqrt(Math.pow(intersections.get(1).x - intersections.get(0).x, 2) + Math.pow(intersections.get(1).x - intersections.get(0).x, 2));
    double h1 = Math.sqrt(Math.pow(intersections.get(1).y - intersections.get(3).y, 2) + Math.pow(intersections.get(1).y - intersections.get(3).y, 2));
    double h2 = Math.sqrt(Math.pow(intersections.get(0).y - intersections.get(2).y, 2) + Math.pow(intersections.get(0).y - intersections.get(2).y, 2));

    double maxWidth = (w1 < w2) ? w1 : w2;
    double maxHeight = (h1 < h2) ? h1 : h2;

    Mat srcMat = new Mat(4, 1, CvType.CV_32FC2);
    srcMat.put(0, 0, intersections.get(0).x, intersections.get(0).y, intersections.get(1).x, intersections.get(1).y, intersections.get(2).x, intersections.get(2).y, intersections.get(3).x, intersections.get(3).y);

    Mat dstMat = new Mat(4, 1, CvType.CV_32FC2);
    dstMat.put(0, 0, 0.0, 0.0, maxWidth - 1, 0.0, 0.0, maxHeight - 1, maxWidth - 1, maxHeight - 1);

    Log.e("FinalDisplay", "srcMat: " + srcMat.size());
    Log.e("FinalDisplay", "dstMat: " + dstMat.size());

    Mat transformMatrix = Imgproc.getPerspectiveTransform(srcMat, dstMat);

    finalMat = Mat.zeros((int) maxHeight, (int) maxWidth, CvType.CV_32FC2);
    Imgproc.warpPerspective(rgbMat, finalMat, transformMatrix, finalMat.size());
    Log.e("FinalDisplay", "finalMat: " + finalMat.size());

    // display final results
    Bitmap dstBitmap = Bitmap.createBitmap(finalMat.width(), finalMat.height(), Bitmap.Config.RGB_565);
    Log.e("FinalDisplay", "dstBitmap: " + img.getWidth() + " x " + img.getHeight());
    Utils.matToBitmap(finalMat, dstBitmap); //convert mat to bitmap
        try {

        Bitmap crop = Bitmap.createBitmap(rotateandscalebitmap, 0, 0, dstBitmap.getWidth(), dstBitmap.getHeight());
         croppedbitmap = crop;
        doRecognize(croppedbitmap);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return croppedbitmap;
}


protected Mat getCanny(Mat gray) {
    Mat threshold = new Mat();
    Mat canny = new Mat();
    // last paramter 8 is using OTSU algorithm
    double high_threshold = Imgproc.threshold(gray, threshold, 0, 255, 8);
    double low_threshold = high_threshold * 0.5;
    Imgproc.Canny(gray, canny, low_threshold, high_threshold);
    return canny;
}

protected Point computeIntersection(EdgesLine l1, EdgesLine l2) {
    double x1 = l1._p1.x, x2 = l1._p2.x, y1 = l1._p1.y, y2 = l1._p2.y;
    double x3 = l2._p1.x, x4 = l2._p2.x, y3 = l2._p1.y, y4 = l2._p2.y;
    double d = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4);
    Point pt = new Point();
    pt.x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / d;
    pt.y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / d;
    return pt;
}

class EdgesLine {
    Point _p1;
    Point _p2;
    Point _center;

    EdgesLine(Point p1, Point p2) {
        _p1 = p1;
        _p2 = p2;
        _center = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
    }
}

How to detect rectangle from image and crop that rectangle and remove other background area ?from it's four edges?

Hello all,

I am developing an application for detect driving license and capture image of driving license using surface view and detect driving license and crop from those it's four corner using openCV.

so right now i am using canny edge detection and find the edges but i am not able to crop the image because canny edge detection return black and white image i am crop my original license image from it's edges.

Please suggest any best solution.

Here following is my code snippet

public Bitmap findEdges(Bitmap img)
{

    Mat rgba = new Mat();
    Utils.bitmapToMat(img, rgba);


    Mat edges = new Mat(rgba.size(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgba, edges, Imgproc.COLOR_RGB2GRAY, 4);
    Imgproc.Canny(edges, edges, 40, 40);

    Bitmap resultBitmap = Bitmap.createBitmap(edges.cols(), edges.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(edges, resultBitmap);

    Mat rgbMat = new Mat();
    Mat grayMat = new Mat();
    Mat cannyMat;
    Mat linesMat = new Mat();
    BitmapFactory.Options o = new BitmapFactory.Options();

    // define the destination image size: A4 - 200 PPI
    int w_a4 = 1654, h_a4 = 2339;

    // TODO: 29/08/2016  May need to check sample size https://developer.android.com/training/displaying-bitmaps/load-bitmap.html
    o.inSampleSize = 4;
    o.inDither = false;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    resultBitmap.compress(Bitmap.CompressFormat.PNG, 75, baos);
    byte[] b = baos.toByteArray();


    resultBitmap = BitmapFactory.decodeByteArray(b, 0, b.length);

    int w = resultBitmap.getWidth();
    int h = resultBitmap.getHeight();
    int min_w = 800;
    double scale = Math.min(10.0, w * 1.0 / min_w);
    int w_proc = (int) (w * 1.0 / scale);
    int h_proc = (int) (h * 1.0 / scale);
    Bitmap srcBitmap = Bitmap.createScaledBitmap(resultBitmap, w_proc, h_proc, false);
    Bitmap grayBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap cannyBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap linesBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);


    Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B.


    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);//rgbMat to gray grayMat


    cannyMat = getCanny(grayMat);


    Imgproc.HoughLinesP(cannyMat, linesMat, 1, Math.PI / 180, w_proc / 12, w_proc / 12, 20);

    // Calculate horizontal lines and vertical lines
    Log.e("opencv", "lines.cols " + linesMat.cols() + " w_proc/3: " + w_proc / 3);
    Log.e("opencv", "lines.rows" + linesMat.rows() + " w_proc/3: " + w_proc / 3);

    List<EdgesLine> horizontals = new ArrayList<>();
    List<EdgesLine> verticals = new ArrayList<>();
    for (int x = 0; x < linesMat.rows(); x++) {
        double[] vec = linesMat.get(x, 0);
        double x1 = vec[0],
                y1 = vec[1],
                x2 = vec[2],
                y2 = vec[3];
        Point start = new Point(x1, y1);
        Point end = new Point(x2, y2);

        EdgesLine line = new EdgesLine(start, end);
        if (Math.abs(x1 - x2) > Math.abs(y1 - y2)) {
            horizontals.add(line);
        } else {
            verticals.add(line);
        }

        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("HoughLines", "completed HoughLines");
    Log.e("HoughLines", "linesMat size: " + linesMat.size());
    Log.e("HoughLines", "linesBitmap size: " + Integer.toString(linesBitmap.getHeight()) + " x " + Integer.toString(linesBitmap.getWidth()));
    Log.e("Lines Detected", Integer.toString(linesMat.rows()));

    if (linesMat.rows() > 400) {
        Context context = getApplicationContext();
        int duration = Toast.LENGTH_SHORT;
        Toast toast = Toast.makeText(context, "Please use a cleaner background", duration);
        toast.show();
    }

    if (horizontals.size() < 2) {
        if (horizontals.size() == 0 || horizontals.get(0)._center.y > h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, 0), new Point(w_proc - 1, 0)));
        }
        if (horizontals.size() == 0 || horizontals.get(0)._center.y <= h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, h_proc - 1), new Point(w_proc - 1, h_proc - 1)));
        }
    }
    if (verticals.size() < 2) {
        if (verticals.size() == 0 || verticals.get(0)._center.x > w_proc / 2) {
            verticals.add(new EdgesLine(new Point(0, 0), new Point(h_proc - 1, 0)));
        }
        if (verticals.size() == 0 || verticals.get(0)._center.x <= w_proc / 2) {
            verticals.add(new EdgesLine(new Point(w_proc - 1, 0), new Point(w_proc - 1, h_proc - 1)));
        }
    }

    Collections.sort(horizontals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.y - rhs._center.y);
        }
    });

    Collections.sort(verticals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.x - rhs._center.x);
        }
    });

    if (BuildConfig.DEBUG) {
    }

    // compute intersections
    List<Point> intersections = new ArrayList<>();
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(verticals.size() - 1)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(verticals.size() - 1)));

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    for (Point point : intersections) {
        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    double w1 = Math.sqrt(Math.pow(intersections.get(3).x - intersections.get(2).x, 2) + Math.pow(intersections.get(3).x - intersections.get(2).x, 2));
    double w2 = Math.sqrt(Math.pow(intersections.get(1).x - intersections.get(0).x, 2) + Math.pow(intersections.get(1).x - intersections.get(0).x, 2));
    double h1 = Math.sqrt(Math.pow(intersections.get(1).y - intersections.get(3).y, 2) + Math.pow(intersections.get(1).y - intersections.get(3).y, 2));
    double h2 = Math.sqrt(Math.pow(intersections.get(0).y - intersections.get(2).y, 2) + Math.pow(intersections.get(0).y - intersections.get(2).y, 2));

    double maxWidth = (w1 < w2) ? w1 : w2;
    double maxHeight = (h1 < h2) ? h1 : h2;

    Mat srcMat = new Mat(4, 1, CvType.CV_32FC2);
    srcMat.put(0, 0, intersections.get(0).x, intersections.get(0).y, intersections.get(1).x, intersections.get(1).y, intersections.get(2).x, intersections.get(2).y, intersections.get(3).x, intersections.get(3).y);

    Mat dstMat = new Mat(4, 1, CvType.CV_32FC2);
    dstMat.put(0, 0, 0.0, 0.0, maxWidth - 1, 0.0, 0.0, maxHeight - 1, maxWidth - 1, maxHeight - 1);

    Log.e("FinalDisplay", "srcMat: " + srcMat.size());
    Log.e("FinalDisplay", "dstMat: " + dstMat.size());

    Mat transformMatrix = Imgproc.getPerspectiveTransform(srcMat, dstMat);

    finalMat = Mat.zeros((int) maxHeight, (int) maxWidth, CvType.CV_32FC2);
    Imgproc.warpPerspective(rgbMat, finalMat, transformMatrix, finalMat.size());
    Log.e("FinalDisplay", "finalMat: " + finalMat.size());

    // display final results
    Bitmap dstBitmap = Bitmap.createBitmap(finalMat.width(), finalMat.height(), Bitmap.Config.RGB_565);
    Log.e("FinalDisplay", "dstBitmap: " + img.getWidth() + " x " + img.getHeight());
    Utils.matToBitmap(finalMat, dstBitmap); //convert mat to bitmap
        try {

        Bitmap crop = Bitmap.createBitmap(rotateandscalebitmap, 0, 0, dstBitmap.getWidth(), dstBitmap.getHeight());
         croppedbitmap = crop;
        doRecognize(croppedbitmap);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return croppedbitmap;
}


protected Mat getCanny(Mat gray) {
    Mat threshold = new Mat();
    Mat canny = new Mat();
    // last paramter 8 is using OTSU algorithm
    double high_threshold = Imgproc.threshold(gray, threshold, 0, 255, 8);
    double low_threshold = high_threshold * 0.5;
    Imgproc.Canny(gray, canny, low_threshold, high_threshold);
    return canny;
}

protected Point computeIntersection(EdgesLine l1, EdgesLine l2) {
    double x1 = l1._p1.x, x2 = l1._p2.x, y1 = l1._p1.y, y2 = l1._p2.y;
    double x3 = l2._p1.x, x4 = l2._p2.x, y3 = l2._p1.y, y4 = l2._p2.y;
    double d = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4);
    Point pt = new Point();
    pt.x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / d;
    pt.y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / d;
    return pt;
}

class EdgesLine {
    Point _p1;
    Point _p2;
    Point _center;

    EdgesLine(Point p1, Point p2) {
        _p1 = p1;
        _p2 = p2;
        _center = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
    }
}

How to detect rectangle from image and crop from it's four edges?

Hello all,

I am developing an application for detect driving license and capture image of driving license using surface view and detect driving license and crop from those it's four corner using openCV.

so right now i am using canny edge detection and find the edges but i am not able to crop the image because canny edge detection return black and white image i am crop my original license image from it's edges.

Please suggest any best solution.

Here following is my code snippet

public Bitmap findEdges(Bitmap img)
{

    Mat rgba = new Mat();
    Utils.bitmapToMat(img, rgba);


    Mat edges = new Mat(rgba.size(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgba, edges, Imgproc.COLOR_RGB2GRAY, 4);
    Imgproc.Canny(edges, edges, 40, 40);

    Bitmap resultBitmap = Bitmap.createBitmap(edges.cols(), edges.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(edges, resultBitmap);

    Mat rgbMat = new Mat();
    Mat grayMat = new Mat();
    Mat cannyMat;
    Mat linesMat = new Mat();
    BitmapFactory.Options o = new BitmapFactory.Options();

    // define the destination image size: A4 - 200 PPI
    int w_a4 = 1654, h_a4 = 2339;

    // TODO: 29/08/2016  May need to check sample size https://developer.android.com/training/displaying-bitmaps/load-bitmap.html
    o.inSampleSize = 4;
    o.inDither = false;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    resultBitmap.compress(Bitmap.CompressFormat.PNG, 75, baos);
    byte[] b = baos.toByteArray();


    resultBitmap = BitmapFactory.decodeByteArray(b, 0, b.length);

    int w = resultBitmap.getWidth();
    int h = resultBitmap.getHeight();
    int min_w = 800;
    double scale = Math.min(10.0, w * 1.0 / min_w);
    int w_proc = (int) (w * 1.0 / scale);
    int h_proc = (int) (h * 1.0 / scale);
    Bitmap srcBitmap = Bitmap.createScaledBitmap(resultBitmap, w_proc, h_proc, false);
    Bitmap grayBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap cannyBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap linesBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);


    Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B.


    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);//rgbMat to gray grayMat


    cannyMat = getCanny(grayMat);


    Imgproc.HoughLinesP(cannyMat, linesMat, 1, Math.PI / 180, w_proc / 12, w_proc / 12, 20);

    // Calculate horizontal lines and vertical lines
    Log.e("opencv", "lines.cols " + linesMat.cols() + " w_proc/3: " + w_proc / 3);
    Log.e("opencv", "lines.rows" + linesMat.rows() + " w_proc/3: " + w_proc / 3);

    List<EdgesLine> horizontals = new ArrayList<>();
    List<EdgesLine> verticals = new ArrayList<>();
    for (int x = 0; x < linesMat.rows(); x++) {
        double[] vec = linesMat.get(x, 0);
        double x1 = vec[0],
                y1 = vec[1],
                x2 = vec[2],
                y2 = vec[3];
        Point start = new Point(x1, y1);
        Point end = new Point(x2, y2);

        EdgesLine line = new EdgesLine(start, end);
        if (Math.abs(x1 - x2) > Math.abs(y1 - y2)) {
            horizontals.add(line);
        } else {
            verticals.add(line);
        }

        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("HoughLines", "completed HoughLines");
    Log.e("HoughLines", "linesMat size: " + linesMat.size());
    Log.e("HoughLines", "linesBitmap size: " + Integer.toString(linesBitmap.getHeight()) + " x " + Integer.toString(linesBitmap.getWidth()));
    Log.e("Lines Detected", Integer.toString(linesMat.rows()));

    if (linesMat.rows() > 400) {
        Context context = getApplicationContext();
        int duration = Toast.LENGTH_SHORT;
        Toast toast = Toast.makeText(context, "Please use a cleaner background", duration);
        toast.show();
    }

    if (horizontals.size() < 2) {
        if (horizontals.size() == 0 || horizontals.get(0)._center.y > h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, 0), new Point(w_proc - 1, 0)));
        }
        if (horizontals.size() == 0 || horizontals.get(0)._center.y <= h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, h_proc - 1), new Point(w_proc - 1, h_proc - 1)));
        }
    }
    if (verticals.size() < 2) {
        if (verticals.size() == 0 || verticals.get(0)._center.x > w_proc / 2) {
            verticals.add(new EdgesLine(new Point(0, 0), new Point(h_proc - 1, 0)));
        }
        if (verticals.size() == 0 || verticals.get(0)._center.x <= w_proc / 2) {
            verticals.add(new EdgesLine(new Point(w_proc - 1, 0), new Point(w_proc - 1, h_proc - 1)));
        }
    }

    Collections.sort(horizontals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.y - rhs._center.y);
        }
    });

    Collections.sort(verticals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.x - rhs._center.x);
        }
    });

    if (BuildConfig.DEBUG) {
    }

    // compute intersections
    List<Point> intersections = new ArrayList<>();
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(verticals.size() - 1)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(verticals.size() - 1)));

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    for (Point point : intersections) {
        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    double w1 = Math.sqrt(Math.pow(intersections.get(3).x - intersections.get(2).x, 2) + Math.pow(intersections.get(3).x - intersections.get(2).x, 2));
    double w2 = Math.sqrt(Math.pow(intersections.get(1).x - intersections.get(0).x, 2) + Math.pow(intersections.get(1).x - intersections.get(0).x, 2));
    double h1 = Math.sqrt(Math.pow(intersections.get(1).y - intersections.get(3).y, 2) + Math.pow(intersections.get(1).y - intersections.get(3).y, 2));
    double h2 = Math.sqrt(Math.pow(intersections.get(0).y - intersections.get(2).y, 2) + Math.pow(intersections.get(0).y - intersections.get(2).y, 2));

    double maxWidth = (w1 < w2) ? w1 : w2;
    double maxHeight = (h1 < h2) ? h1 : h2;

    Mat srcMat = new Mat(4, 1, CvType.CV_32FC2);
    srcMat.put(0, 0, intersections.get(0).x, intersections.get(0).y, intersections.get(1).x, intersections.get(1).y, intersections.get(2).x, intersections.get(2).y, intersections.get(3).x, intersections.get(3).y);

    Mat dstMat = new Mat(4, 1, CvType.CV_32FC2);
    dstMat.put(0, 0, 0.0, 0.0, maxWidth - 1, 0.0, 0.0, maxHeight - 1, maxWidth - 1, maxHeight - 1);

    Log.e("FinalDisplay", "srcMat: " + srcMat.size());
    Log.e("FinalDisplay", "dstMat: " + dstMat.size());

    Mat transformMatrix = Imgproc.getPerspectiveTransform(srcMat, dstMat);

    finalMat = Mat.zeros((int) maxHeight, (int) maxWidth, CvType.CV_32FC2);
    Imgproc.warpPerspective(rgbMat, finalMat, transformMatrix, finalMat.size());
    Log.e("FinalDisplay", "finalMat: " + finalMat.size());

    // display final results
    Bitmap dstBitmap = Bitmap.createBitmap(finalMat.width(), finalMat.height(), Bitmap.Config.RGB_565);
    Log.e("FinalDisplay", "dstBitmap: " + img.getWidth() + " x " + img.getHeight());
    Utils.matToBitmap(finalMat, dstBitmap); //convert mat to bitmap
        try {

        Bitmap crop = Bitmap.createBitmap(rotateandscalebitmap, 0, 0, dstBitmap.getWidth(), dstBitmap.getHeight());
         croppedbitmap = crop;
        doRecognize(croppedbitmap);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return croppedbitmap;
}


protected Mat getCanny(Mat gray) {
    Mat threshold = new Mat();
    Mat canny = new Mat();
    // last paramter 8 is using OTSU algorithm
    double high_threshold = Imgproc.threshold(gray, threshold, 0, 255, 8);
    double low_threshold = high_threshold * 0.5;
    Imgproc.Canny(gray, canny, low_threshold, high_threshold);
    return canny;
}

protected Point computeIntersection(EdgesLine l1, EdgesLine l2) {
    double x1 = l1._p1.x, x2 = l1._p2.x, y1 = l1._p1.y, y2 = l1._p2.y;
    double x3 = l2._p1.x, x4 = l2._p2.x, y3 = l2._p1.y, y4 = l2._p2.y;
    double d = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4);
    Point pt = new Point();
    pt.x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / d;
    pt.y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / d;
    return pt;
}

class EdgesLine {
    Point _p1;
    Point _p2;
    Point _center;

    EdgesLine(Point p1, Point p2) {
        _p1 = p1;
        _p2 = p2;
        _center = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
    }
}

How to detect rectangle from image and crop rectangle and apply transformation from it's four edges?image?

Hello all,

I am developing an application for detect driving license and capture image of driving license using surface view and detect driving license and crop from those it's four corner using openCV.

so right now i am using canny edge detection and find the edges but i am not able to crop the image because canny edge detection return black and white image i am crop my original license image from it's edges.

Please suggest any best solution.

Here following is my code snippet

public Bitmap findEdges(Bitmap img)
{

    Mat rgba = new Mat();
    Utils.bitmapToMat(img, rgba);


    Mat edges = new Mat(rgba.size(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgba, edges, Imgproc.COLOR_RGB2GRAY, 4);
    Imgproc.Canny(edges, edges, 40, 40);

    Bitmap resultBitmap = Bitmap.createBitmap(edges.cols(), edges.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(edges, resultBitmap);

    Mat rgbMat = new Mat();
    Mat grayMat = new Mat();
    Mat cannyMat;
    Mat linesMat = new Mat();
    BitmapFactory.Options o = new BitmapFactory.Options();

    // define the destination image size: A4 - 200 PPI
    int w_a4 = 1654, h_a4 = 2339;

    // TODO: 29/08/2016  May need to check sample size https://developer.android.com/training/displaying-bitmaps/load-bitmap.html
    o.inSampleSize = 4;
    o.inDither = false;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    resultBitmap.compress(Bitmap.CompressFormat.PNG, 75, baos);
    byte[] b = baos.toByteArray();


    resultBitmap = BitmapFactory.decodeByteArray(b, 0, b.length);

    int w = resultBitmap.getWidth();
    int h = resultBitmap.getHeight();
    int min_w = 800;
    double scale = Math.min(10.0, w * 1.0 / min_w);
    int w_proc = (int) (w * 1.0 / scale);
    int h_proc = (int) (h * 1.0 / scale);
    Bitmap srcBitmap = Bitmap.createScaledBitmap(resultBitmap, w_proc, h_proc, false);
    Bitmap grayBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap cannyBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap linesBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);


    Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B.


    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);//rgbMat to gray grayMat


    cannyMat = getCanny(grayMat);


    Imgproc.HoughLinesP(cannyMat, linesMat, 1, Math.PI / 180, w_proc / 12, w_proc / 12, 20);

    // Calculate horizontal lines and vertical lines
    Log.e("opencv", "lines.cols " + linesMat.cols() + " w_proc/3: " + w_proc / 3);
    Log.e("opencv", "lines.rows" + linesMat.rows() + " w_proc/3: " + w_proc / 3);

    List<EdgesLine> horizontals = new ArrayList<>();
    List<EdgesLine> verticals = new ArrayList<>();
    for (int x = 0; x < linesMat.rows(); x++) {
        double[] vec = linesMat.get(x, 0);
        double x1 = vec[0],
                y1 = vec[1],
                x2 = vec[2],
                y2 = vec[3];
        Point start = new Point(x1, y1);
        Point end = new Point(x2, y2);

        EdgesLine line = new EdgesLine(start, end);
        if (Math.abs(x1 - x2) > Math.abs(y1 - y2)) {
            horizontals.add(line);
        } else {
            verticals.add(line);
        }

        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("HoughLines", "completed HoughLines");
    Log.e("HoughLines", "linesMat size: " + linesMat.size());
    Log.e("HoughLines", "linesBitmap size: " + Integer.toString(linesBitmap.getHeight()) + " x " + Integer.toString(linesBitmap.getWidth()));
    Log.e("Lines Detected", Integer.toString(linesMat.rows()));

    if (linesMat.rows() > 400) {
        Context context = getApplicationContext();
        int duration = Toast.LENGTH_SHORT;
        Toast toast = Toast.makeText(context, "Please use a cleaner background", duration);
        toast.show();
    }

    if (horizontals.size() < 2) {
        if (horizontals.size() == 0 || horizontals.get(0)._center.y > h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, 0), new Point(w_proc - 1, 0)));
        }
        if (horizontals.size() == 0 || horizontals.get(0)._center.y <= h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, h_proc - 1), new Point(w_proc - 1, h_proc - 1)));
        }
    }
    if (verticals.size() < 2) {
        if (verticals.size() == 0 || verticals.get(0)._center.x > w_proc / 2) {
            verticals.add(new EdgesLine(new Point(0, 0), new Point(h_proc - 1, 0)));
        }
        if (verticals.size() == 0 || verticals.get(0)._center.x <= w_proc / 2) {
            verticals.add(new EdgesLine(new Point(w_proc - 1, 0), new Point(w_proc - 1, h_proc - 1)));
        }
    }

    Collections.sort(horizontals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.y - rhs._center.y);
        }
    });

    Collections.sort(verticals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.x - rhs._center.x);
        }
    });

    if (BuildConfig.DEBUG) {
    }

    // compute intersections
    List<Point> intersections = new ArrayList<>();
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(verticals.size() - 1)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(verticals.size() - 1)));

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    for (Point point : intersections) {
        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    double w1 = Math.sqrt(Math.pow(intersections.get(3).x - intersections.get(2).x, 2) + Math.pow(intersections.get(3).x - intersections.get(2).x, 2));
    double w2 = Math.sqrt(Math.pow(intersections.get(1).x - intersections.get(0).x, 2) + Math.pow(intersections.get(1).x - intersections.get(0).x, 2));
    double h1 = Math.sqrt(Math.pow(intersections.get(1).y - intersections.get(3).y, 2) + Math.pow(intersections.get(1).y - intersections.get(3).y, 2));
    double h2 = Math.sqrt(Math.pow(intersections.get(0).y - intersections.get(2).y, 2) + Math.pow(intersections.get(0).y - intersections.get(2).y, 2));

    double maxWidth = (w1 < w2) ? w1 : w2;
    double maxHeight = (h1 < h2) ? h1 : h2;

    Mat srcMat = new Mat(4, 1, CvType.CV_32FC2);
    srcMat.put(0, 0, intersections.get(0).x, intersections.get(0).y, intersections.get(1).x, intersections.get(1).y, intersections.get(2).x, intersections.get(2).y, intersections.get(3).x, intersections.get(3).y);

    Mat dstMat = new Mat(4, 1, CvType.CV_32FC2);
    dstMat.put(0, 0, 0.0, 0.0, maxWidth - 1, 0.0, 0.0, maxHeight - 1, maxWidth - 1, maxHeight - 1);

    Log.e("FinalDisplay", "srcMat: " + srcMat.size());
    Log.e("FinalDisplay", "dstMat: " + dstMat.size());

    Mat transformMatrix = Imgproc.getPerspectiveTransform(srcMat, dstMat);

    finalMat = Mat.zeros((int) maxHeight, (int) maxWidth, CvType.CV_32FC2);
    Imgproc.warpPerspective(rgbMat, finalMat, transformMatrix, finalMat.size());
    Log.e("FinalDisplay", "finalMat: " + finalMat.size());

    // display final results
    Bitmap dstBitmap = Bitmap.createBitmap(finalMat.width(), finalMat.height(), Bitmap.Config.RGB_565);
    Log.e("FinalDisplay", "dstBitmap: " + img.getWidth() + " x " + img.getHeight());
    Utils.matToBitmap(finalMat, dstBitmap); //convert mat to bitmap
        try {

        Bitmap crop = Bitmap.createBitmap(rotateandscalebitmap, 0, 0, dstBitmap.getWidth(), dstBitmap.getHeight());
         croppedbitmap = crop;
        doRecognize(croppedbitmap);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return croppedbitmap;
}


protected Mat getCanny(Mat gray) {
    Mat threshold = new Mat();
    Mat canny = new Mat();
    // last paramter 8 is using OTSU algorithm
    double high_threshold = Imgproc.threshold(gray, threshold, 0, 255, 8);
    double low_threshold = high_threshold * 0.5;
    Imgproc.Canny(gray, canny, low_threshold, high_threshold);
    return canny;
}

protected Point computeIntersection(EdgesLine l1, EdgesLine l2) {
    double x1 = l1._p1.x, x2 = l1._p2.x, y1 = l1._p1.y, y2 = l1._p2.y;
    double x3 = l2._p1.x, x4 = l2._p2.x, y3 = l2._p1.y, y4 = l2._p2.y;
    double d = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4);
    Point pt = new Point();
    pt.x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / d;
    pt.y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / d;
    return pt;
}

class EdgesLine {
    Point _p1;
    Point _p2;
    Point _center;

    EdgesLine(Point p1, Point p2) {
        _p1 = p1;
        _p2 = p2;
        _center = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
    }
}

How to detect and crop rectangle and apply transformation from image?

Hello all,

I am developing an application for detect driving license and capture image of driving license using surface view and detect driving license and crop from those it's four corner using openCV.

so right now i am using canny edge detection and find the edges but i am not able to crop the image because canny edge detection return black and white image i am crop my original license image from it's edges.

Please suggest any best solution.

Here following is my code snippet

public Bitmap findEdges(Bitmap img)
{

    Mat rgba = new Mat();
    Utils.bitmapToMat(img, rgba);


    Mat edges = new Mat(rgba.size(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgba, edges, Imgproc.COLOR_RGB2GRAY, 4);
    Imgproc.Canny(edges, edges, 40, 40);

    Bitmap resultBitmap = Bitmap.createBitmap(edges.cols(), edges.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(edges, resultBitmap);

    Mat rgbMat = new Mat();
    Mat grayMat = new Mat();
    Mat cannyMat;
    Mat linesMat = new Mat();
    BitmapFactory.Options o = new BitmapFactory.Options();

    // define the destination image size: A4 - 200 PPI
    int w_a4 = 1654, h_a4 = 2339;

    // TODO: 29/08/2016  May need to check sample size https://developer.android.com/training/displaying-bitmaps/load-bitmap.html
    o.inSampleSize = 4;
    o.inDither = false;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    resultBitmap.compress(Bitmap.CompressFormat.PNG, 75, baos);
    byte[] b = baos.toByteArray();


    resultBitmap = BitmapFactory.decodeByteArray(b, 0, b.length);

    int w = resultBitmap.getWidth();
    int h = resultBitmap.getHeight();
    int min_w = 800;
    double scale = Math.min(10.0, w * 1.0 / min_w);
    int w_proc = (int) (w * 1.0 / scale);
    int h_proc = (int) (h * 1.0 / scale);
    Bitmap srcBitmap = Bitmap.createScaledBitmap(resultBitmap, w_proc, h_proc, false);
    Bitmap grayBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap cannyBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap linesBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);


    Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B.


    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);//rgbMat to gray grayMat


    cannyMat = getCanny(grayMat);


    Imgproc.HoughLinesP(cannyMat, linesMat, 1, Math.PI / 180, w_proc / 12, w_proc / 12, 20);

    // Calculate horizontal lines and vertical lines
    Log.e("opencv", "lines.cols " + linesMat.cols() + " w_proc/3: " + w_proc / 3);
    Log.e("opencv", "lines.rows" + linesMat.rows() + " w_proc/3: " + w_proc / 3);

    List<EdgesLine> horizontals = new ArrayList<>();
    List<EdgesLine> verticals = new ArrayList<>();
    for (int x = 0; x < linesMat.rows(); x++) {
        double[] vec = linesMat.get(x, 0);
        double x1 = vec[0],
                y1 = vec[1],
                x2 = vec[2],
                y2 = vec[3];
        Point start = new Point(x1, y1);
        Point end = new Point(x2, y2);

        EdgesLine line = new EdgesLine(start, end);
        if (Math.abs(x1 - x2) > Math.abs(y1 - y2)) {
            horizontals.add(line);
        } else {
            verticals.add(line);
        }

        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("HoughLines", "completed HoughLines");
    Log.e("HoughLines", "linesMat size: " + linesMat.size());
    Log.e("HoughLines", "linesBitmap size: " + Integer.toString(linesBitmap.getHeight()) + " x " + Integer.toString(linesBitmap.getWidth()));
    Log.e("Lines Detected", Integer.toString(linesMat.rows()));

    if (linesMat.rows() > 400) {
        Context context = getApplicationContext();
        int duration = Toast.LENGTH_SHORT;
        Toast toast = Toast.makeText(context, "Please use a cleaner background", duration);
        toast.show();
    }

    if (horizontals.size() < 2) {
        if (horizontals.size() == 0 || horizontals.get(0)._center.y > h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, 0), new Point(w_proc - 1, 0)));
        }
        if (horizontals.size() == 0 || horizontals.get(0)._center.y <= h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, h_proc - 1), new Point(w_proc - 1, h_proc - 1)));
        }
    }
    if (verticals.size() < 2) {
        if (verticals.size() == 0 || verticals.get(0)._center.x > w_proc / 2) {
            verticals.add(new EdgesLine(new Point(0, 0), new Point(h_proc - 1, 0)));
        }
        if (verticals.size() == 0 || verticals.get(0)._center.x <= w_proc / 2) {
            verticals.add(new EdgesLine(new Point(w_proc - 1, 0), new Point(w_proc - 1, h_proc - 1)));
        }
    }

    Collections.sort(horizontals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.y - rhs._center.y);
        }
    });

    Collections.sort(verticals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.x - rhs._center.x);
        }
    });

    if (BuildConfig.DEBUG) {
    }

    // compute intersections
    List<Point> intersections = new ArrayList<>();
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(verticals.size() - 1)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(verticals.size() - 1)));

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    for (Point point : intersections) {
        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    double w1 = Math.sqrt(Math.pow(intersections.get(3).x - intersections.get(2).x, 2) + Math.pow(intersections.get(3).x - intersections.get(2).x, 2));
    double w2 = Math.sqrt(Math.pow(intersections.get(1).x - intersections.get(0).x, 2) + Math.pow(intersections.get(1).x - intersections.get(0).x, 2));
    double h1 = Math.sqrt(Math.pow(intersections.get(1).y - intersections.get(3).y, 2) + Math.pow(intersections.get(1).y - intersections.get(3).y, 2));
    double h2 = Math.sqrt(Math.pow(intersections.get(0).y - intersections.get(2).y, 2) + Math.pow(intersections.get(0).y - intersections.get(2).y, 2));

    double maxWidth = (w1 < w2) ? w1 : w2;
    double maxHeight = (h1 < h2) ? h1 : h2;

    Mat srcMat = new Mat(4, 1, CvType.CV_32FC2);
    srcMat.put(0, 0, intersections.get(0).x, intersections.get(0).y, intersections.get(1).x, intersections.get(1).y, intersections.get(2).x, intersections.get(2).y, intersections.get(3).x, intersections.get(3).y);

    Mat dstMat = new Mat(4, 1, CvType.CV_32FC2);
    dstMat.put(0, 0, 0.0, 0.0, maxWidth - 1, 0.0, 0.0, maxHeight - 1, maxWidth - 1, maxHeight - 1);

    Log.e("FinalDisplay", "srcMat: " + srcMat.size());
    Log.e("FinalDisplay", "dstMat: " + dstMat.size());

    Mat transformMatrix = Imgproc.getPerspectiveTransform(srcMat, dstMat);

    finalMat = Mat.zeros((int) maxHeight, (int) maxWidth, CvType.CV_32FC2);
    Imgproc.warpPerspective(rgbMat, finalMat, transformMatrix, finalMat.size());
    Log.e("FinalDisplay", "finalMat: " + finalMat.size());

    // display final results
    Bitmap dstBitmap = Bitmap.createBitmap(finalMat.width(), finalMat.height(), Bitmap.Config.RGB_565);
    Log.e("FinalDisplay", "dstBitmap: " + img.getWidth() + " x " + img.getHeight());
    Utils.matToBitmap(finalMat, dstBitmap); //convert mat to bitmap
        try {

        Bitmap crop = Bitmap.createBitmap(rotateandscalebitmap, 0, 0, dstBitmap.getWidth(), dstBitmap.getHeight());
         croppedbitmap = crop;
        doRecognize(croppedbitmap);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return croppedbitmap;
}


protected Mat getCanny(Mat gray) {
    Mat threshold = new Mat();
    Mat canny = new Mat();
    // last paramter 8 is using OTSU algorithm
    double high_threshold = Imgproc.threshold(gray, threshold, 0, 255, 8);
    double low_threshold = high_threshold * 0.5;
    Imgproc.Canny(gray, canny, low_threshold, high_threshold);
    return canny;
}

protected Point computeIntersection(EdgesLine l1, EdgesLine l2) {
    double x1 = l1._p1.x, x2 = l1._p2.x, y1 = l1._p1.y, y2 = l1._p2.y;
    double x3 = l2._p1.x, x4 = l2._p2.x, y3 = l2._p1.y, y4 = l2._p2.y;
    double d = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4);
    Point pt = new Point();
    pt.x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / d;
    pt.y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / d;
    return pt;
}

class EdgesLine {
    Point _p1;
    Point _p2;
    Point _center;

    EdgesLine(Point p1, Point p2) {
        _p1 = p1;
        _p2 = p2;
        _center = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
    }
}

How to detect and crop rectangle and apply transformation from an image?

Hello all,

I am developing an application for detect driving license and capture image of driving license using surface view and detect driving license and crop from those it's four corner using openCV. OpenCV.

so right now i am using canny edge detection and find the edges but i am not able to crop the image because canny edge detection return black and white image i am crop my original license image from it's edges.

Please suggest any best solution.

Here following is my code snippet

public Bitmap findEdges(Bitmap img)
{

    Mat rgba = new Mat();
    Utils.bitmapToMat(img, rgba);


    Mat edges = new Mat(rgba.size(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgba, edges, Imgproc.COLOR_RGB2GRAY, 4);
    Imgproc.Canny(edges, edges, 40, 40);

    Bitmap resultBitmap = Bitmap.createBitmap(edges.cols(), edges.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(edges, resultBitmap);

    Mat rgbMat = new Mat();
    Mat grayMat = new Mat();
    Mat cannyMat;
    Mat linesMat = new Mat();
    BitmapFactory.Options o = new BitmapFactory.Options();

    // define the destination image size: A4 - 200 PPI
    int w_a4 = 1654, h_a4 = 2339;

    // TODO: 29/08/2016  May need to check sample size https://developer.android.com/training/displaying-bitmaps/load-bitmap.html
    o.inSampleSize = 4;
    o.inDither = false;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    resultBitmap.compress(Bitmap.CompressFormat.PNG, 75, baos);
    byte[] b = baos.toByteArray();


    resultBitmap = BitmapFactory.decodeByteArray(b, 0, b.length);

    int w = resultBitmap.getWidth();
    int h = resultBitmap.getHeight();
    int min_w = 800;
    double scale = Math.min(10.0, w * 1.0 / min_w);
    int w_proc = (int) (w * 1.0 / scale);
    int h_proc = (int) (h * 1.0 / scale);
    Bitmap srcBitmap = Bitmap.createScaledBitmap(resultBitmap, w_proc, h_proc, false);
    Bitmap grayBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap cannyBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap linesBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);


    Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B.


    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);//rgbMat to gray grayMat


    cannyMat = getCanny(grayMat);


    Imgproc.HoughLinesP(cannyMat, linesMat, 1, Math.PI / 180, w_proc / 12, w_proc / 12, 20);

    // Calculate horizontal lines and vertical lines
    Log.e("opencv", "lines.cols " + linesMat.cols() + " w_proc/3: " + w_proc / 3);
    Log.e("opencv", "lines.rows" + linesMat.rows() + " w_proc/3: " + w_proc / 3);

    List<EdgesLine> horizontals = new ArrayList<>();
    List<EdgesLine> verticals = new ArrayList<>();
    for (int x = 0; x < linesMat.rows(); x++) {
        double[] vec = linesMat.get(x, 0);
        double x1 = vec[0],
                y1 = vec[1],
                x2 = vec[2],
                y2 = vec[3];
        Point start = new Point(x1, y1);
        Point end = new Point(x2, y2);

        EdgesLine line = new EdgesLine(start, end);
        if (Math.abs(x1 - x2) > Math.abs(y1 - y2)) {
            horizontals.add(line);
        } else {
            verticals.add(line);
        }

        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("HoughLines", "completed HoughLines");
    Log.e("HoughLines", "linesMat size: " + linesMat.size());
    Log.e("HoughLines", "linesBitmap size: " + Integer.toString(linesBitmap.getHeight()) + " x " + Integer.toString(linesBitmap.getWidth()));
    Log.e("Lines Detected", Integer.toString(linesMat.rows()));

    if (linesMat.rows() > 400) {
        Context context = getApplicationContext();
        int duration = Toast.LENGTH_SHORT;
        Toast toast = Toast.makeText(context, "Please use a cleaner background", duration);
        toast.show();
    }

    if (horizontals.size() < 2) {
        if (horizontals.size() == 0 || horizontals.get(0)._center.y > h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, 0), new Point(w_proc - 1, 0)));
        }
        if (horizontals.size() == 0 || horizontals.get(0)._center.y <= h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, h_proc - 1), new Point(w_proc - 1, h_proc - 1)));
        }
    }
    if (verticals.size() < 2) {
        if (verticals.size() == 0 || verticals.get(0)._center.x > w_proc / 2) {
            verticals.add(new EdgesLine(new Point(0, 0), new Point(h_proc - 1, 0)));
        }
        if (verticals.size() == 0 || verticals.get(0)._center.x <= w_proc / 2) {
            verticals.add(new EdgesLine(new Point(w_proc - 1, 0), new Point(w_proc - 1, h_proc - 1)));
        }
    }

    Collections.sort(horizontals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.y - rhs._center.y);
        }
    });

    Collections.sort(verticals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.x - rhs._center.x);
        }
    });

    if (BuildConfig.DEBUG) {
    }

    // compute intersections
    List<Point> intersections = new ArrayList<>();
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(verticals.size() - 1)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(verticals.size() - 1)));

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    for (Point point : intersections) {
        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    double w1 = Math.sqrt(Math.pow(intersections.get(3).x - intersections.get(2).x, 2) + Math.pow(intersections.get(3).x - intersections.get(2).x, 2));
    double w2 = Math.sqrt(Math.pow(intersections.get(1).x - intersections.get(0).x, 2) + Math.pow(intersections.get(1).x - intersections.get(0).x, 2));
    double h1 = Math.sqrt(Math.pow(intersections.get(1).y - intersections.get(3).y, 2) + Math.pow(intersections.get(1).y - intersections.get(3).y, 2));
    double h2 = Math.sqrt(Math.pow(intersections.get(0).y - intersections.get(2).y, 2) + Math.pow(intersections.get(0).y - intersections.get(2).y, 2));

    double maxWidth = (w1 < w2) ? w1 : w2;
    double maxHeight = (h1 < h2) ? h1 : h2;

    Mat srcMat = new Mat(4, 1, CvType.CV_32FC2);
    srcMat.put(0, 0, intersections.get(0).x, intersections.get(0).y, intersections.get(1).x, intersections.get(1).y, intersections.get(2).x, intersections.get(2).y, intersections.get(3).x, intersections.get(3).y);

    Mat dstMat = new Mat(4, 1, CvType.CV_32FC2);
    dstMat.put(0, 0, 0.0, 0.0, maxWidth - 1, 0.0, 0.0, maxHeight - 1, maxWidth - 1, maxHeight - 1);

    Log.e("FinalDisplay", "srcMat: " + srcMat.size());
    Log.e("FinalDisplay", "dstMat: " + dstMat.size());

    Mat transformMatrix = Imgproc.getPerspectiveTransform(srcMat, dstMat);

    finalMat = Mat.zeros((int) maxHeight, (int) maxWidth, CvType.CV_32FC2);
    Imgproc.warpPerspective(rgbMat, finalMat, transformMatrix, finalMat.size());
    Log.e("FinalDisplay", "finalMat: " + finalMat.size());

    // display final results
    Bitmap dstBitmap = Bitmap.createBitmap(finalMat.width(), finalMat.height(), Bitmap.Config.RGB_565);
    Log.e("FinalDisplay", "dstBitmap: " + img.getWidth() + " x " + img.getHeight());
    Utils.matToBitmap(finalMat, dstBitmap); //convert mat to bitmap
        try {

        Bitmap crop = Bitmap.createBitmap(rotateandscalebitmap, 0, 0, dstBitmap.getWidth(), dstBitmap.getHeight());
         croppedbitmap = crop;
        doRecognize(croppedbitmap);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return croppedbitmap;
}


protected Mat getCanny(Mat gray) {
    Mat threshold = new Mat();
    Mat canny = new Mat();
    // last paramter 8 is using OTSU algorithm
    double high_threshold = Imgproc.threshold(gray, threshold, 0, 255, 8);
    double low_threshold = high_threshold * 0.5;
    Imgproc.Canny(gray, canny, low_threshold, high_threshold);
    return canny;
}

protected Point computeIntersection(EdgesLine l1, EdgesLine l2) {
    double x1 = l1._p1.x, x2 = l1._p2.x, y1 = l1._p1.y, y2 = l1._p2.y;
    double x3 = l2._p1.x, x4 = l2._p2.x, y3 = l2._p1.y, y4 = l2._p2.y;
    double d = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4);
    Point pt = new Point();
    pt.x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / d;
    pt.y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / d;
    return pt;
}

class EdgesLine {
    Point _p1;
    Point _p2;
    Point _center;

    EdgesLine(Point p1, Point p2) {
        _p1 = p1;
        _p2 = p2;
        _center = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
    }
}

How to detect and crop rectangle and apply transformation from an image?

Hello all,

I am developing an application for detect driving license and capture image of driving license using surface view and detect driving license and crop from those it's four corner using OpenCV.

so right now i am using canny edge detection and find the edges but i am not able to crop the image because canny edge detection return black and white image i am crop my original license image from it's edges.

Please suggest any best solution.

public Bitmap findEdges(Bitmap img)
{

    Mat rgba = new Mat();
    Utils.bitmapToMat(img, rgba);


    Mat edges = new Mat(rgba.size(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgba, edges, Imgproc.COLOR_RGB2GRAY, 4);
    Imgproc.Canny(edges, edges, 40, 40);

    Bitmap resultBitmap = Bitmap.createBitmap(edges.cols(), edges.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(edges, resultBitmap);

    Mat rgbMat = new Mat();
    Mat grayMat = new Mat();
    Mat cannyMat;
    Mat linesMat = new Mat();
    BitmapFactory.Options o = new BitmapFactory.Options();

    // define the destination image size: A4 - 200 PPI
    int w_a4 = 1654, h_a4 = 2339;

    // TODO: 29/08/2016  May need to check sample size https://developer.android.com/training/displaying-bitmaps/load-bitmap.html
    o.inSampleSize = 4;
    o.inDither = false;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    resultBitmap.compress(Bitmap.CompressFormat.PNG, 75, baos);
    byte[] b = baos.toByteArray();


    resultBitmap = BitmapFactory.decodeByteArray(b, 0, b.length);

    int w = resultBitmap.getWidth();
    int h = resultBitmap.getHeight();
    int min_w = 800;
    double scale = Math.min(10.0, w * 1.0 / min_w);
    int w_proc = (int) (w * 1.0 / scale);
    int h_proc = (int) (h * 1.0 / scale);
    Bitmap srcBitmap = Bitmap.createScaledBitmap(resultBitmap, w_proc, h_proc, false);
    Bitmap grayBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap cannyBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap linesBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);


    Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B.


    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);//rgbMat to gray grayMat


    cannyMat = getCanny(grayMat);


    Imgproc.HoughLinesP(cannyMat, linesMat, 1, Math.PI / 180, w_proc / 12, w_proc / 12, 20);

    // Calculate horizontal lines and vertical lines
    Log.e("opencv", "lines.cols " + linesMat.cols() + " w_proc/3: " + w_proc / 3);
    Log.e("opencv", "lines.rows" + linesMat.rows() + " w_proc/3: " + w_proc / 3);

    List<EdgesLine> horizontals = new ArrayList<>();
    List<EdgesLine> verticals = new ArrayList<>();
    for (int x = 0; x < linesMat.rows(); x++) {
        double[] vec = linesMat.get(x, 0);
        double x1 = vec[0],
                y1 = vec[1],
                x2 = vec[2],
                y2 = vec[3];
        Point start = new Point(x1, y1);
        Point end = new Point(x2, y2);

        EdgesLine line = new EdgesLine(start, end);
        if (Math.abs(x1 - x2) > Math.abs(y1 - y2)) {
            horizontals.add(line);
        } else {
            verticals.add(line);
        }

        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("HoughLines", "completed HoughLines");
    Log.e("HoughLines", "linesMat size: " + linesMat.size());
    Log.e("HoughLines", "linesBitmap size: " + Integer.toString(linesBitmap.getHeight()) + " x " + Integer.toString(linesBitmap.getWidth()));
    Log.e("Lines Detected", Integer.toString(linesMat.rows()));

    if (linesMat.rows() > 400) {
        Context context = getApplicationContext();
        int duration = Toast.LENGTH_SHORT;
        Toast toast = Toast.makeText(context, "Please use a cleaner background", duration);
        toast.show();
    }

    if (horizontals.size() < 2) {
        if (horizontals.size() == 0 || horizontals.get(0)._center.y > h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, 0), new Point(w_proc - 1, 0)));
        }
        if (horizontals.size() == 0 || horizontals.get(0)._center.y <= h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, h_proc - 1), new Point(w_proc - 1, h_proc - 1)));
        }
    }
    if (verticals.size() < 2) {
        if (verticals.size() == 0 || verticals.get(0)._center.x > w_proc / 2) {
            verticals.add(new EdgesLine(new Point(0, 0), new Point(h_proc - 1, 0)));
        }
        if (verticals.size() == 0 || verticals.get(0)._center.x <= w_proc / 2) {
            verticals.add(new EdgesLine(new Point(w_proc - 1, 0), new Point(w_proc - 1, h_proc - 1)));
        }
    }

    Collections.sort(horizontals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.y - rhs._center.y);
        }
    });

    Collections.sort(verticals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.x - rhs._center.x);
        }
    });

    if (BuildConfig.DEBUG) {
    }

    // compute intersections
    List<Point> intersections = new ArrayList<>();
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(verticals.size() - 1)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(verticals.size() - 1)));

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    for (Point point : intersections) {
        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    double w1 = Math.sqrt(Math.pow(intersections.get(3).x - intersections.get(2).x, 2) + Math.pow(intersections.get(3).x - intersections.get(2).x, 2));
    double w2 = Math.sqrt(Math.pow(intersections.get(1).x - intersections.get(0).x, 2) + Math.pow(intersections.get(1).x - intersections.get(0).x, 2));
    double h1 = Math.sqrt(Math.pow(intersections.get(1).y - intersections.get(3).y, 2) + Math.pow(intersections.get(1).y - intersections.get(3).y, 2));
    double h2 = Math.sqrt(Math.pow(intersections.get(0).y - intersections.get(2).y, 2) + Math.pow(intersections.get(0).y - intersections.get(2).y, 2));

    double maxWidth = (w1 < w2) ? w1 : w2;
    double maxHeight = (h1 < h2) ? h1 : h2;

    Mat srcMat = new Mat(4, 1, CvType.CV_32FC2);
    srcMat.put(0, 0, intersections.get(0).x, intersections.get(0).y, intersections.get(1).x, intersections.get(1).y, intersections.get(2).x, intersections.get(2).y, intersections.get(3).x, intersections.get(3).y);

    Mat dstMat = new Mat(4, 1, CvType.CV_32FC2);
    dstMat.put(0, 0, 0.0, 0.0, maxWidth - 1, 0.0, 0.0, maxHeight - 1, maxWidth - 1, maxHeight - 1);

    Log.e("FinalDisplay", "srcMat: " + srcMat.size());
    Log.e("FinalDisplay", "dstMat: " + dstMat.size());

    Mat transformMatrix = Imgproc.getPerspectiveTransform(srcMat, dstMat);

    finalMat = Mat.zeros((int) maxHeight, (int) maxWidth, CvType.CV_32FC2);
    Imgproc.warpPerspective(rgbMat, finalMat, transformMatrix, finalMat.size());
    Log.e("FinalDisplay", "finalMat: " + finalMat.size());

    // display final results
    Bitmap dstBitmap = Bitmap.createBitmap(finalMat.width(), finalMat.height(), Bitmap.Config.RGB_565);
    Log.e("FinalDisplay", "dstBitmap: " + img.getWidth() + " x " + img.getHeight());
    Utils.matToBitmap(finalMat, dstBitmap); //convert mat to bitmap
        try {

        Bitmap crop = Bitmap.createBitmap(rotateandscalebitmap, 0, 0, dstBitmap.getWidth(), dstBitmap.getHeight());
         croppedbitmap = crop;
        doRecognize(croppedbitmap);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return croppedbitmap;
}


protected Mat getCanny(Mat gray) {
    Mat threshold = new Mat();
    Mat canny = new Mat();
    // last paramter 8 is using OTSU algorithm
    double high_threshold = Imgproc.threshold(gray, threshold, 0, 255, 8);
    double low_threshold = high_threshold * 0.5;
    Imgproc.Canny(gray, canny, low_threshold, high_threshold);
    return canny;
}

protected Point computeIntersection(EdgesLine l1, EdgesLine l2) {
    double x1 = l1._p1.x, x2 = l1._p2.x, y1 = l1._p1.y, y2 = l1._p2.y;
    double x3 = l2._p1.x, x4 = l2._p2.x, y3 = l2._p1.y, y4 = l2._p2.y;
    double d = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4);
    Point pt = new Point();
    pt.x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / d;
    pt.y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / d;
    return pt;
}

class EdgesLine {
    Point _p1;
    Point _p2;
    Point _center;

    EdgesLine(Point p1, Point p2) {
        _p1 = p1;
        _p2 = p2;
        _center = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
    }
}

How to detect and crop rectangle and apply transformation from an image?

Hello all,

I am developing an application for detect driving license and capture image of driving license using surface view and detect driving license and crop from those it's four corner using OpenCV.

so right now i am using canny edge detection and find the edges but i am not able to crop the image because canny edge detection return black and white image i am crop my original license image from it's edges.

Please suggest any best solution.

public Bitmap findEdges(Bitmap img)
{

    Mat rgba = new Mat();
    Utils.bitmapToMat(img, rgba);


    Mat edges = new Mat(rgba.size(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgba, edges, Imgproc.COLOR_RGB2GRAY, 4);
    Imgproc.Canny(edges, edges, 40, 40);

    Bitmap resultBitmap = Bitmap.createBitmap(edges.cols(), edges.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(edges, resultBitmap);

    Mat rgbMat = new Mat();
    Mat grayMat = new Mat();
    Mat cannyMat;
    Mat linesMat = new Mat();
    BitmapFactory.Options o = new BitmapFactory.Options();

    // define the destination image size: A4 - 200 PPI
    int w_a4 = 1654, h_a4 = 2339;

    // TODO: 29/08/2016  May need to check sample size https://developer.android.com/training/displaying-bitmaps/load-bitmap.html
    o.inSampleSize = 4;
    o.inDither = false;

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    resultBitmap.compress(Bitmap.CompressFormat.PNG, 75, baos);
    byte[] b = baos.toByteArray();


    resultBitmap = BitmapFactory.decodeByteArray(b, 0, b.length);

    int w = resultBitmap.getWidth();
    int h = resultBitmap.getHeight();
    int min_w = 800;
    double scale = Math.min(10.0, w * 1.0 / min_w);
    int w_proc = (int) (w * 1.0 / scale);
    int h_proc = (int) (h * 1.0 / scale);
    Bitmap srcBitmap = Bitmap.createScaledBitmap(resultBitmap, w_proc, h_proc, false);
    Bitmap grayBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap cannyBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);
    Bitmap linesBitmap = Bitmap.createBitmap(w_proc, h_proc, Bitmap.Config.RGB_565);


    Utils.bitmapToMat(srcBitmap, rgbMat);//convert original bitmap to Mat, R G B.


    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);//rgbMat to gray grayMat


    cannyMat = getCanny(grayMat);


    Imgproc.HoughLinesP(cannyMat, linesMat, 1, Math.PI / 180, w_proc / 12, w_proc / 12, 20);

    // Calculate horizontal lines and vertical lines
    Log.e("opencv", "lines.cols " + linesMat.cols() + " w_proc/3: " + w_proc / 3);
    Log.e("opencv", "lines.rows" + linesMat.rows() + " w_proc/3: " + w_proc / 3);

    List<EdgesLine> horizontals = new ArrayList<>();
    List<EdgesLine> verticals = new ArrayList<>();
    for (int x = 0; x < linesMat.rows(); x++) {
        double[] vec = linesMat.get(x, 0);
        double x1 = vec[0],
                y1 = vec[1],
                x2 = vec[2],
                y2 = vec[3];
        Point start = new Point(x1, y1);
        Point end = new Point(x2, y2);

        EdgesLine line = new EdgesLine(start, end);
        if (Math.abs(x1 - x2) > Math.abs(y1 - y2)) {
            horizontals.add(line);
        } else {
            verticals.add(line);
        }

        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("HoughLines", "completed HoughLines");
    Log.e("HoughLines", "linesMat size: " + linesMat.size());
    Log.e("HoughLines", "linesBitmap size: " + Integer.toString(linesBitmap.getHeight()) + " x " + Integer.toString(linesBitmap.getWidth()));
    Log.e("Lines Detected", Integer.toString(linesMat.rows()));

    if (linesMat.rows() > 400) {
        Context context = getApplicationContext();
        int duration = Toast.LENGTH_SHORT;
        Toast toast = Toast.makeText(context, "Please use a cleaner background", duration);
        toast.show();
    }

    if (horizontals.size() < 2) {
        if (horizontals.size() == 0 || horizontals.get(0)._center.y > h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, 0), new Point(w_proc - 1, 0)));
        }
        if (horizontals.size() == 0 || horizontals.get(0)._center.y <= h_proc / 2) {
            horizontals.add(new EdgesLine(new Point(0, h_proc - 1), new Point(w_proc - 1, h_proc - 1)));
        }
    }
    if (verticals.size() < 2) {
        if (verticals.size() == 0 || verticals.get(0)._center.x > w_proc / 2) {
            verticals.add(new EdgesLine(new Point(0, 0), new Point(h_proc - 1, 0)));
        }
        if (verticals.size() == 0 || verticals.get(0)._center.x <= w_proc / 2) {
            verticals.add(new EdgesLine(new Point(w_proc - 1, 0), new Point(w_proc - 1, h_proc - 1)));
        }
    }

    Collections.sort(horizontals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.y - rhs._center.y);
        }
    });

    Collections.sort(verticals, new Comparator<EdgesLine>() {
        @Override
        public int compare(EdgesLine lhs, EdgesLine rhs) {
            return (int) (lhs._center.x - rhs._center.x);
        }
    });

    if (BuildConfig.DEBUG) {
    }

    // compute intersections
    List<Point> intersections = new ArrayList<>();
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(0), verticals.get(verticals.size() - 1)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(0)));
    intersections.add(computeIntersection(horizontals.get(horizontals.size() - 1), verticals.get(verticals.size() - 1)));

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    for (Point point : intersections) {
        if (BuildConfig.DEBUG) {
        }
    }

    Log.e("Intersections", Double.toString(intersections.get(0).x));

    double w1 = Math.sqrt(Math.pow(intersections.get(3).x - intersections.get(2).x, 2) + Math.pow(intersections.get(3).x - intersections.get(2).x, 2));
    double w2 = Math.sqrt(Math.pow(intersections.get(1).x - intersections.get(0).x, 2) + Math.pow(intersections.get(1).x - intersections.get(0).x, 2));
    double h1 = Math.sqrt(Math.pow(intersections.get(1).y - intersections.get(3).y, 2) + Math.pow(intersections.get(1).y - intersections.get(3).y, 2));
    double h2 = Math.sqrt(Math.pow(intersections.get(0).y - intersections.get(2).y, 2) + Math.pow(intersections.get(0).y - intersections.get(2).y, 2));

    double maxWidth = (w1 < w2) ? w1 : w2;
    double maxHeight = (h1 < h2) ? h1 : h2;

    Mat srcMat = new Mat(4, 1, CvType.CV_32FC2);
    srcMat.put(0, 0, intersections.get(0).x, intersections.get(0).y, intersections.get(1).x, intersections.get(1).y, intersections.get(2).x, intersections.get(2).y, intersections.get(3).x, intersections.get(3).y);

    Mat dstMat = new Mat(4, 1, CvType.CV_32FC2);
    dstMat.put(0, 0, 0.0, 0.0, maxWidth - 1, 0.0, 0.0, maxHeight - 1, maxWidth - 1, maxHeight - 1);

    Log.e("FinalDisplay", "srcMat: " + srcMat.size());
    Log.e("FinalDisplay", "dstMat: " + dstMat.size());

    Mat transformMatrix = Imgproc.getPerspectiveTransform(srcMat, dstMat);

    finalMat = Mat.zeros((int) maxHeight, (int) maxWidth, CvType.CV_32FC2);
    Imgproc.warpPerspective(rgbMat, finalMat, transformMatrix, finalMat.size());
    Log.e("FinalDisplay", "finalMat: " + finalMat.size());

    // display final results
    Bitmap dstBitmap = Bitmap.createBitmap(finalMat.width(), finalMat.height(), Bitmap.Config.RGB_565);
    Log.e("FinalDisplay", "dstBitmap: " + img.getWidth() + " x " + img.getHeight());
    Utils.matToBitmap(finalMat, dstBitmap); //convert mat to bitmap
        try {

        Bitmap crop = Bitmap.createBitmap(rotateandscalebitmap, 0, 0, dstBitmap.getWidth(), dstBitmap.getHeight());
         croppedbitmap = crop;
        doRecognize(croppedbitmap);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return croppedbitmap;
}


protected Mat getCanny(Mat gray) {
    Mat threshold = new Mat();
    Mat canny = new Mat();
    // last paramter 8 is using OTSU algorithm
    double high_threshold = Imgproc.threshold(gray, threshold, 0, 255, 8);
    double low_threshold = high_threshold * 0.5;
    Imgproc.Canny(gray, canny, low_threshold, high_threshold);
    return canny;
}

protected Point computeIntersection(EdgesLine l1, EdgesLine l2) {
    double x1 = l1._p1.x, x2 = l1._p2.x, y1 = l1._p1.y, y2 = l1._p2.y;
    double x3 = l2._p1.x, x4 = l2._p2.x, y3 = l2._p1.y, y4 = l2._p2.y;
    double d = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4);
    Point pt = new Point();
    pt.x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / d;
    pt.y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / d;
    return pt;
}

class EdgesLine {
    Point _p1;
    Point _p2;
    Point _center;

    EdgesLine(Point p1, Point p2) {
        _p1 = p1;
        _p2 = p2;
        _center = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
    }
}