Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

Iris Detection

Hi, I'am using the following code for Iris Detection. Apparently the code for hough circles thatI picked up from this website http://opencvlover.blogspot.in/2012/07/hough-circle-in-javacv.html for cvsmooth, cvthreshold, cvhoughcircles is not working. The code that I'am using is the following. It doesn't give a compilation error. But it doesn't detect the iris/circle either. Could someone please help me out

package com.googlecode.javacv.eyepreview;

import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Paint;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.os.Bundle;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.FrameLayout;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import com.googlecode.javacpp.Loader;
import com.googlecode.javacv.cpp.opencv_core.CvSeq;
import com.googlecode.javacv.cpp.opencv_core.IplImage;
import com.googlecode.javacv.cpp.opencv_objdetect;

import static com.googlecode.javacv.cpp.opencv_core.*;
import static com.googlecode.javacv.cpp.opencv_imgproc.*;
import static com.googlecode.javacv.cpp.opencv_objdetect.*;
import static com.googlecode.javacv.cpp.opencv_highgui.*;

// ----------------------------------------------------------------------

public class EyePreview extends Activity {
    private FrameLayout layout;
    private EyeView eyeView;
    private Preview mPreview;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);

        // Hide the window title.
        requestWindowFeature(Window.FEATURE_NO_TITLE);

        // Create our Preview view and set it as the content of our activity.
        try {
            layout = new FrameLayout(this);
            eyeView = new EyeView(this);
            mPreview = new Preview(this, eyeView);
            layout.addView(mPreview);
            layout.addView(eyeView);
            setContentView(layout);
        } catch (IOException e) {
            e.printStackTrace();
            new AlertDialog.Builder(this).setMessage(e.getMessage()).create().show();
        }
    }
}

// ----------------------------------------------------------------------

class EyeView extends View implements Camera.PreviewCallback {
    public static final int SUBSAMPLING_FACTOR = 4;

    private IplImage grayImage, grayImage1;
    private CvHaarClassifierCascade classifier;
    private CvMemStorage storage, mem;
    private CvSeq eyes;

    public EyeView(EyePreview context) throws IOException {
        super(context);

        // Load the classifier file from Java resources.
        File classifierFile = Loader.extractResource(getClass(),
            "/com/googlecode/javacv/eyepreview/haarcascade_eye.xml",
            context.getCacheDir(), "classifier", ".xml");
        if (classifierFile == null || classifierFile.length() <= 0) {
            throw new IOException("Could not extract the classifier file from Java resource.");
        }

        // Preload the opencv_objdetect module to work around a known bug.
        Loader.load(opencv_objdetect.class);
        classifier = new CvHaarClassifierCascade(cvLoad(classifierFile.getAbsolutePath()));
        classifierFile.delete();
        if (classifier.isNull()) {
            throw new IOException("Could not load the classifier file.");
        }
        storage = CvMemStorage.create();

    }

    public void onPreviewFrame(final byte[] data, final Camera camera) {
        try {
            Camera.Size size = camera.getParameters().getPreviewSize();
            processImage(data, size.width, size.height);
            camera.addCallbackBuffer(data);
        } catch (RuntimeException e) {
            // The camera has probably just been released, ignore.
        }
    }

    protected void processImage(byte[] data, int width, int height) {
        // First, downsample our image and convert it into a grayscale IplImage
        int f = SUBSAMPLING_FACTOR;
       /* if (grayImage == null || grayImage.width() != width/f || grayImage.height() != height/f) {
            grayImage = IplImage.create(width/f, height/f, IPL_DEPTH_8U, 1);
        }
        int imageWidth  = grayImage.width();
        int imageHeight = grayImage.height();
        int dataStride = f*width;
        int imageStride = grayImage.widthStep();
        ByteBuffer imageBuffer = grayImage.getByteBuffer();
        for (int y = 0; y < imageHeight; y++) {
            int dataLine = y*dataStride;
            int imageLine = y*imageStride;
            for (int x = 0; x < imageWidth; x++) {
                imageBuffer.put(imageLine + x, data[dataLine + f*x]);
            }
        }

        eyes = cvHaarDetectObjects(grayImage, classifier, storage, 1.1, 3, CV_HAAR_DO_CANNY_PRUNING);

        cvClearMemStorage(storage);*/

          if (grayImage1 == null || grayImage1.width() != width/f || grayImage1.height() != height/f) 
                grayImage1 = IplImage.create(width/f, height/f, IPL_DEPTH_8U, 1);

            int imageWidth1 = grayImage1.width();
            int imageHeight1 = grayImage1.height();
            int dataStride1 = f*width;
            int imageStride1 = grayImage1.widthStep();
            ByteBuffer imageBuffer1 = grayImage1.getByteBuffer();
            for (int y = 0; y < imageHeight1; y++) {
                int dataLine1 = y*dataStride1;
                int imageLine1 = y*imageStride1;
                for (int x = 0; x < imageWidth1; x++) {
                    imageBuffer1.put(imageLine1 + x, data[dataLine1 + f*x]);

                   // if (data[dataLine1 + f*x]<(byte)127)
                     // imageBuffer1.put(imageLine1 + x, (byte)0x00);
                     //if (data[dataLine1 + f*x] >=(byte)127  &&  data[dataLine1+ f*x] <= (byte)255)
                      // imageBuffer1.put(imageLine1 + x, (byte)0xFF);



              }
            }

        //cvcanny----------------------------------------
        mem = CvMemStorage.create(); 
        cvSmooth( grayImage1,grayImage1, CV_GAUSSIAN, 3, 3,0,0);
        cvThreshold(grayImage1,grayImage1,70,/*70 is the lower cut off*/
                150,/*this is the higher cut off*/
                CV_THRESH_BINARY

            /*The type of thresholding,more description in the documentation*/
                ); 
       // cvCanny( grayImage1, grayImage1, 10, 100, 3 ); 
        cvSmooth( grayImage1,grayImage1, CV_GAUSSIAN, 3, 3,0,0);

     // check cvhoughcirclea for correct parameters-------------------   

        CvSeq circles = cvHoughCircles(     
            grayImage1, //Input image   
            mem, //Memory Storage    
            CV_HOUGH_GRADIENT, //Detection method    
            1, //Inverse ratio    
            100, //Minimum distance between the centers of the detected circles    
            100, //Higher threshold for canny edge detector    
            100, //Threshold at the center detection stage    
            15, //min radius   
            500 //max radius    
            );     
            for(int i = 0; i < circles.total(); i++){      
                CvPoint3D32f circle = new CvPoint3D32f(cvGetSeqElem(circles, i));      
                CvPoint center = cvPointFrom32f(new CvPoint2D32f(circle.x(), circle.y()));      
                int radius = Math.round(circle.z());            
                cvCircle(grayImage1, center, radius, CvScalar.GREEN, 6, CV_AA, 0);         
                }
        postInvalidate();
        cvClearMemStorage(mem);

    }

    @Override
    protected void onDraw(Canvas canvas) {
        Paint paint = new Paint();
        paint.setColor(Color.RED);
        paint.setTextSize(20);

        String s = "EyePreview - This side up.";
        float textWidth = paint.measureText(s);
        canvas.drawText(s, (getWidth()-textWidth)/2, 20, paint);

        if (eyes != null) {
            paint.setStrokeWidth(2);
            paint.setStyle(Paint.Style.STROKE);
            float scaleX = (float)getWidth()/grayImage.width();
            float scaleY = (float)getHeight()/grayImage.height();
            int total = eyes.total();
            for (int i = 0; i < total; i++) {
                CvRect r = new CvRect(cvGetSeqElem(eyes, i));
                int x = r.x(), y = r.y(), w = r.width(), h = r.height();
                canvas.drawRect(x*scaleX, y*scaleY, (x+w)*scaleX, (y+h)*scaleY, paint);
            }
        }
    }
}

// ----------------------------------------------------------------------

class Preview extends SurfaceView implements SurfaceHolder.Callback {
    SurfaceHolder mHolder;
    Camera mCamera;
    Camera.PreviewCallback previewCallback;

    Preview(Context context, Camera.PreviewCallback previewCallback) {
        super(context);
        this.previewCallback = previewCallback;

        // Install a SurfaceHolder.Callback so we get notified when the
        // underlying surface is created and destroyed.
        mHolder = getHolder();
        mHolder.addCallback(this);
        mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    }

    public void surfaceCreated(SurfaceHolder holder) {
        // The Surface has been created, acquire the camera and tell it where
        // to draw.
        mCamera = Camera.open();
        try {
           mCamera.setPreviewDisplay(holder);
        } catch (IOException exception) {
            mCamera.release();
            mCamera = null;
            // TODO: add more exception handling logic here
        }
    }

    public void surfaceDestroyed(SurfaceHolder holder) {
        // Surface will be destroyed when we return, so stop the preview.
        // Because the CameraDevice object is not a shared resource, it's very
        // important to release it when the activity is paused.
        mCamera.stopPreview();
        mCamera.release();
        mCamera = null;
    }


    private Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
        final double ASPECT_TOLERANCE = 0.05;
        double targetRatio = (double) w / h;
        if (sizes == null) return null;

        Size optimalSize = null;
        double minDiff = Double.MAX_VALUE;

        int targetHeight = h;

        // Try to find an size match aspect ratio and size
        for (Size size : sizes) {
            double ratio = (double) size.width / size.height;
            if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
            if (Math.abs(size.height - targetHeight) < minDiff) {
                optimalSize = size;
                minDiff = Math.abs(size.height - targetHeight);
            }
        }

        // Cannot find the one match the aspect ratio, ignore the requirement
        if (optimalSize == null) {
            minDiff = Double.MAX_VALUE;
            for (Size size : sizes) {
                if (Math.abs(size.height - targetHeight) < minDiff) {
                    optimalSize = size;
                    minDiff = Math.abs(size.height - targetHeight);
                }
            }
        }
        return optimalSize;
    }

    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
        // Now that the size is known, set up the camera parameters and begin
        // the preview.
        Camera.Parameters parameters = mCamera.getParameters();

        List<Size> sizes = parameters.getSupportedPreviewSizes();
        Size optimalSize = getOptimalPreviewSize(sizes, w, h);
        parameters.setPreviewSize(optimalSize.width, optimalSize.height);

        mCamera.setParameters(parameters);
        if (previewCallback != null) {
            mCamera.setPreviewCallbackWithBuffer(previewCallback);
            Camera.Size size = parameters.getPreviewSize();
            byte[] data = new byte[size.width*size.height*
                    ImageFormat.getBitsPerPixel(parameters.getPreviewFormat())/8];
            mCamera.addCallbackBuffer(data);
        }
        mCamera.startPreview();
    }

    }

Iris Detection

Hi, I'am using the following code for Iris Detection. Apparently the code for hough circles thatI picked up from this website http://opencvlover.blogspot.in/2012/07/hough-circle-in-javacv.html for cvsmooth, cvthreshold, cvhoughcircles is not working. The code that I'am using is the following. It doesn't give a compilation error. But it doesn't detect the iris/circle either. Could someone please help me out

package com.googlecode.javacv.eyepreview;

import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Paint;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.os.Bundle;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.FrameLayout;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import com.googlecode.javacpp.Loader;
import com.googlecode.javacv.cpp.opencv_core.CvSeq;
import com.googlecode.javacv.cpp.opencv_core.IplImage;
import com.googlecode.javacv.cpp.opencv_objdetect;

import static com.googlecode.javacv.cpp.opencv_core.*;
import static com.googlecode.javacv.cpp.opencv_imgproc.*;
import static com.googlecode.javacv.cpp.opencv_objdetect.*;
import static com.googlecode.javacv.cpp.opencv_highgui.*;

// ----------------------------------------------------------------------

public class EyePreview extends Activity {
    private FrameLayout layout;
    private EyeView eyeView;
    private Preview mPreview;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);

        // Hide the window title.
        requestWindowFeature(Window.FEATURE_NO_TITLE);

        // Create our Preview view and set it as the content of our activity.
        try {
            layout = new FrameLayout(this);
            eyeView = new EyeView(this);
            mPreview = new Preview(this, eyeView);
            layout.addView(mPreview);
            layout.addView(eyeView);
            setContentView(layout);
        } catch (IOException e) {
            e.printStackTrace();
            new AlertDialog.Builder(this).setMessage(e.getMessage()).create().show();
        }
    }
}

// ----------------------------------------------------------------------

class EyeView extends View implements Camera.PreviewCallback {
    public static final int SUBSAMPLING_FACTOR = 4;

    private IplImage grayImage, grayImage1;
    private CvHaarClassifierCascade classifier;
    private CvMemStorage storage, mem;
    private CvSeq eyes;

    public EyeView(EyePreview context) throws IOException {
        super(context);

        // Load the classifier file from Java resources.
        File classifierFile = Loader.extractResource(getClass(),
            "/com/googlecode/javacv/eyepreview/haarcascade_eye.xml",
            context.getCacheDir(), "classifier", ".xml");
        if (classifierFile == null || classifierFile.length() <= 0) {
            throw new IOException("Could not extract the classifier file from Java resource.");
        }

        // Preload the opencv_objdetect module to work around a known bug.
        Loader.load(opencv_objdetect.class);
        classifier = new CvHaarClassifierCascade(cvLoad(classifierFile.getAbsolutePath()));
        classifierFile.delete();
        if (classifier.isNull()) {
            throw new IOException("Could not load the classifier file.");
        }
        storage = CvMemStorage.create();

    }

    public void onPreviewFrame(final byte[] data, final Camera camera) {
        try {
            Camera.Size size = camera.getParameters().getPreviewSize();
            processImage(data, size.width, size.height);
            camera.addCallbackBuffer(data);
        } catch (RuntimeException e) {
            // The camera has probably just been released, ignore.
        }
    }

    protected void processImage(byte[] data, int width, int height) {
        // First, downsample our image and convert it into a grayscale IplImage
        int f = SUBSAMPLING_FACTOR;
       /* if (grayImage == null || grayImage.width() != width/f || grayImage.height() != height/f) {
            grayImage = IplImage.create(width/f, height/f, IPL_DEPTH_8U, 1);
        }
        int imageWidth  = grayImage.width();
        int imageHeight = grayImage.height();
        int dataStride = f*width;
        int imageStride = grayImage.widthStep();
        ByteBuffer imageBuffer = grayImage.getByteBuffer();
        for (int y = 0; y < imageHeight; y++) {
            int dataLine = y*dataStride;
            int imageLine = y*imageStride;
            for (int x = 0; x < imageWidth; x++) {
                imageBuffer.put(imageLine + x, data[dataLine + f*x]);
            }
        }

        eyes = cvHaarDetectObjects(grayImage, classifier, storage, 1.1, 3, CV_HAAR_DO_CANNY_PRUNING);

        cvClearMemStorage(storage);*/

          if (grayImage1 == null || grayImage1.width() != width/f || grayImage1.height() != height/f) 
                grayImage1 = IplImage.create(width/f, height/f, IPL_DEPTH_8U, 1);

            int imageWidth1 = grayImage1.width();
            int imageHeight1 = grayImage1.height();
            int dataStride1 = f*width;
            int imageStride1 = grayImage1.widthStep();
            ByteBuffer imageBuffer1 = grayImage1.getByteBuffer();
            for (int y = 0; y < imageHeight1; y++) {
                int dataLine1 = y*dataStride1;
                int imageLine1 = y*imageStride1;
                for (int x = 0; x < imageWidth1; x++) {
                    imageBuffer1.put(imageLine1 + x, data[dataLine1 + f*x]);

                   // if (data[dataLine1 + f*x]<(byte)127)
                     // imageBuffer1.put(imageLine1 + x, (byte)0x00);
                     //if (data[dataLine1 + f*x] >=(byte)127  &&  data[dataLine1+ f*x] <= (byte)255)
                      // imageBuffer1.put(imageLine1 + x, (byte)0xFF);



              }
            }

        //cvcanny----------------------------------------
        mem = CvMemStorage.create(); 
        cvSmooth( grayImage1,grayImage1, CV_GAUSSIAN, 3, 3,0,0);
        cvThreshold(grayImage1,grayImage1,70,/*70 is the lower cut off*/
                150,/*this is the higher cut off*/
                CV_THRESH_BINARY

            /*The type of thresholding,more description in the documentation*/
                ); 
       // cvCanny( grayImage1, grayImage1, 10, 100, 3 ); 
        // cvSmooth( grayImage1,grayImage1, CV_GAUSSIAN, 3, 3,0,0);

     // check cvhoughcirclea for correct parameters-------------------   

        CvSeq circles = cvHoughCircles(     
            grayImage1, //Input image   
            mem, //Memory Storage    
            CV_HOUGH_GRADIENT, //Detection method    
            1, //Inverse ratio    
            100, //Minimum distance between the centers of the detected circles    
            100, //Higher threshold for canny edge detector    
            100, //Threshold at the center detection stage    
            15, //min radius   
            500 //max radius    
            );     
            for(int i = 0; i < circles.total(); i++){      
                CvPoint3D32f circle = new CvPoint3D32f(cvGetSeqElem(circles, i));      
                CvPoint center = cvPointFrom32f(new CvPoint2D32f(circle.x(), circle.y()));      
                int radius = Math.round(circle.z());            
                cvCircle(grayImage1, center, radius, CvScalar.GREEN, 6, CV_AA, 0);         
                }
        postInvalidate();
        cvClearMemStorage(mem);

    }

    @Override
    protected void onDraw(Canvas canvas) {
        Paint paint = new Paint();
        paint.setColor(Color.RED);
        paint.setTextSize(20);

        String s = "EyePreview - This side up.";
        float textWidth = paint.measureText(s);
        canvas.drawText(s, (getWidth()-textWidth)/2, 20, paint);

        if (eyes != null) {
            paint.setStrokeWidth(2);
            paint.setStyle(Paint.Style.STROKE);
            float scaleX = (float)getWidth()/grayImage.width();
            float scaleY = (float)getHeight()/grayImage.height();
            int total = eyes.total();
            for (int i = 0; i < total; i++) {
                CvRect r = new CvRect(cvGetSeqElem(eyes, i));
                int x = r.x(), y = r.y(), w = r.width(), h = r.height();
                canvas.drawRect(x*scaleX, y*scaleY, (x+w)*scaleX, (y+h)*scaleY, paint);
            }
        }
    }
}

// ----------------------------------------------------------------------

class Preview extends SurfaceView implements SurfaceHolder.Callback {
    SurfaceHolder mHolder;
    Camera mCamera;
    Camera.PreviewCallback previewCallback;

    Preview(Context context, Camera.PreviewCallback previewCallback) {
        super(context);
        this.previewCallback = previewCallback;

        // Install a SurfaceHolder.Callback so we get notified when the
        // underlying surface is created and destroyed.
        mHolder = getHolder();
        mHolder.addCallback(this);
        mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    }

    public void surfaceCreated(SurfaceHolder holder) {
        // The Surface has been created, acquire the camera and tell it where
        // to draw.
        mCamera = Camera.open();
        try {
           mCamera.setPreviewDisplay(holder);
        } catch (IOException exception) {
            mCamera.release();
            mCamera = null;
            // TODO: add more exception handling logic here
        }
    }

    public void surfaceDestroyed(SurfaceHolder holder) {
        // Surface will be destroyed when we return, so stop the preview.
        // Because the CameraDevice object is not a shared resource, it's very
        // important to release it when the activity is paused.
        mCamera.stopPreview();
        mCamera.release();
        mCamera = null;
    }


    private Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
        final double ASPECT_TOLERANCE = 0.05;
        double targetRatio = (double) w / h;
        if (sizes == null) return null;

        Size optimalSize = null;
        double minDiff = Double.MAX_VALUE;

        int targetHeight = h;

        // Try to find an size match aspect ratio and size
        for (Size size : sizes) {
            double ratio = (double) size.width / size.height;
            if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
            if (Math.abs(size.height - targetHeight) < minDiff) {
                optimalSize = size;
                minDiff = Math.abs(size.height - targetHeight);
            }
        }

        // Cannot find the one match the aspect ratio, ignore the requirement
        if (optimalSize == null) {
            minDiff = Double.MAX_VALUE;
            for (Size size : sizes) {
                if (Math.abs(size.height - targetHeight) < minDiff) {
                    optimalSize = size;
                    minDiff = Math.abs(size.height - targetHeight);
                }
            }
        }
        return optimalSize;
    }

    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
        // Now that the size is known, set up the camera parameters and begin
        // the preview.
        Camera.Parameters parameters = mCamera.getParameters();

        List<Size> sizes = parameters.getSupportedPreviewSizes();
        Size optimalSize = getOptimalPreviewSize(sizes, w, h);
        parameters.setPreviewSize(optimalSize.width, optimalSize.height);

        mCamera.setParameters(parameters);
        if (previewCallback != null) {
            mCamera.setPreviewCallbackWithBuffer(previewCallback);
            Camera.Size size = parameters.getPreviewSize();
            byte[] data = new byte[size.width*size.height*
                    ImageFormat.getBitsPerPixel(parameters.getPreviewFormat())/8];
            mCamera.addCallbackBuffer(data);
        }
        mCamera.startPreview();
    }

    }