Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

Save an image of the eyes detected

I have an openCv4Android project working and detecting eyes from the front camera feed on the phone. Rectangles are painted around the eyes and I would like to take what is inside those rectangles and save them to some sort of image file. Any ideas on how to do this?

Here is the code so far. It is based on the face-detection tutorial on openCvAndroid

public class FdActivity extends Activity implements CvCameraViewListener {

private static final String    TAG                 = "EyeDection::Activity"; // Name for Log ID
// FACE_RECT_COLOR - scalar used to represent RGB color value. Fourth value is opacity.
private static final Scalar    FACE_RECT_COLOR     = new Scalar(255, 0, 0, 255); 
public static final int        JAVA_DETECTOR       = 0;


private Mat                    mRgba;
private Mat                    mGray;
private File                   mCascadeFile;
private File                   mCascadeFile_eyes;   
private CascadeClassifier      mJavaDetector; // Class used to detect objects in video stream
private CascadeClassifier      mJavaDetector_eyes;

private int                    mDetectorType       = JAVA_DETECTOR; //Might Not need
private String[]               mDetectorName;

private float                  mRelativeFaceSize   = 0.2f; // Not sure
private int                    mAbsoluteFaceSize   = 0;

private CameraBridgeViewBase   mOpenCvCameraView; // Interface between Camera and opencv. Why not Java

private BaseLoaderCallback  mLoaderCallback = new BaseLoaderCallback(this) {
    @Override
    public void onManagerConnected(int status) {
        switch (status) {
            case LoaderCallbackInterface.SUCCESS:
            {
                Log.i(TAG, "OpenCV loaded successfully");

                // Load native library after(!) OpenCV initialization
                System.loadLibrary("detection_based_tracker");

                try {
                    // load cascade file from application resources
                    InputStream is = getResources().openRawResource(R.raw.haarcascade_eye);
                    File cascadeDir = getDir("cascade", Context.MODE_PRIVATE); //find or make new directory
                    mCascadeFile = new File(cascadeDir, "haarcascade_eye.xml");

                   // InputStream is_eyes = getResources().openRawResource(R.raw.haarcascade_eye);
                 //   mCascadeFile_eyes = new File(cascadeDir, "haarcascade_eye.xml");

                    FileOutputStream os = new FileOutputStream(mCascadeFile);
                  //  FileOutputStream os_eyes = new FileOutputStream(mCascadeFile_eyes);

                    byte[] buffer = new byte[4096];
                    int bytesRead;
                    while ((bytesRead = is.read(buffer)) != -1) {
                        os.write(buffer, 0, bytesRead);
                    }

                   // byte[] buffer_eyes = new byte[4096];
                   // while ((bytesRead = is_eyes.read(buffer_eyes)) != -1) {
                    //  os_eyes.write(buffer_eyes, 0, bytesRead);
                    //}
                    is.close();
                   // is_eyes.close();
                    os.close();
                   // os_eyes.close();

                    mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
                   // mJavaDetector_eyes = new CascadeClassifier(mCascadeFile_eyes.getAbsolutePath());
                    if (mJavaDetector.empty()) {
                        Log.e(TAG, "Failed to load cascade classifier");
                        mJavaDetector = null;
                        //mJavaDetector_eyes = null;
                    } else
                        Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());

                   // mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);

                    cascadeDir.delete();

                } catch (IOException e) {
                    e.printStackTrace();
                    Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
                }

                mOpenCvCameraView.enableView();
            } break;
            default:
            {
                super.onManagerConnected(status);
            } break;
        }
    }
};

public FdActivity() {
    //mDetectorName = new String[2];
   // mDetectorName[JAVA_DETECTOR] = "Java";
    //mDetectorName[NATIVE_DETECTOR] = "Native (tracking)";

    Log.i(TAG, "Instantiated new " + this.getClass());
}

/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
    Log.i(TAG, "called onCreate");
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    setContentView(R.layout.face_detect_surface_view);




    mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view);
    mOpenCvCameraView.setCvCameraViewListener(this);
}

@Override
public void onPause()
{
    if (mOpenCvCameraView != null)
        mOpenCvCameraView.disableView();
    super.onPause();
}

@Override
public void onResume()
{
    super.onResume();
    OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
}

public void onDestroy() {
    super.onDestroy();
    mOpenCvCameraView.disableView();
}

public void onCameraViewStarted(int width, int height) {
    mGray = new Mat();
    mRgba = new Mat();
}

public void onCameraViewStopped() {
    mGray.release();
    mRgba.release();
}

public Mat onCameraFrame(Mat inputFrame) {

    inputFrame.copyTo(mRgba);
    Imgproc.cvtColor(inputFrame, mGray, Imgproc.COLOR_RGBA2GRAY); // Output Frame to Mat in grayscale

    if (mAbsoluteFaceSize == 0) {
        int height = mGray.rows();
        if (Math.round(height * mRelativeFaceSize) > 0) {
            mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
        }
       // mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
    }

    MatOfRect faces = new MatOfRect();
    //MatOfRect eyes = new MatOfRect();

    if (mDetectorType == JAVA_DETECTOR) {
        if (mJavaDetector != null)
            mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                    new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
    }
    /*else if (mDetectorType == NATIVE_DETECTOR) {
        if (mNativeDetector != null)
            mNativeDetector.detect(mGray, faces);
    }*/
    else {
        Log.e(TAG, "Detection method is not selected!");
    }
    try {
        FileOutputStream out = openFileOutput("sample.txt", Context.MODE_WORLD_READABLE);
        OutputStreamWriter outStream = new OutputStreamWriter(out);
        outStream.write(faces.toString());
        outStream.flush();
        outStream.close();
    } catch (Exception e) {
        // TODO Auto-generated catch block
        Log.e(TAG, "ERROR!");
        e.printStackTrace();
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++) {
        Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

This is the area that is of interest...

Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++) {
    Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);