How to Display an image during On-line processing

asked 2014-05-19 08:30:57 -0600

Nabeel Ahmed gravatar image

updated 2019-12-09 08:02:18 -0600

Akhil Patel gravatar image

Hello OpenCV,

Once again I need your help :)

I want to show an image upper side of my face detect. I search it every where but could not find any way . . .

My code is here bellow plz help how I can display. . .

class FdView extends SampleCvViewBase { private static final String TAG = "Sample::FdView"; private Mat mRgba; private Mat mGray;

private Mat                 mZoomCorner;
private Mat                 mZoomWindow;
private Mat                 mZoomWindow2;
private Mat                 mResult;
private Mat                 teplateR;
private Mat                 teplateL;
private File                mCascadeFile;
private CascadeClassifier   mJavaDetector;
private CascadeClassifier   mCascadeER;
private CascadeClassifier   mCascadeEL;
private DetectionBasedTracker mNativeDetector;


private static final Scalar   FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);

public static final int       JAVA_DETECTOR     = 0;
public static final int       NATIVE_DETECTOR   = 1;

private static final int TM_SQDIFF          = 0;
private static final int TM_SQDIFF_NORMED   = 1;
private static final int TM_CCOEFF          = 2;
private static final int TM_CCOEFF_NORMED   = 3;
private static final int TM_CCORR           = 4;
private static final int TM_CCORR_NORMED    = 5;

private int                   mDetectorType     = JAVA_DETECTOR;

private float                 mRelativeFaceSize = 0;
private int                   mAbsoluteFaceSize = 0;
private int                   learn_frames = 0;
private double                match_value;

// private Rect eyearea = new Rect();

public void setMinFaceSize(float faceSize)
{
    mRelativeFaceSize = faceSize;
    mAbsoluteFaceSize = 0;
}

public void setDetectorType(int type)
{
    if (mDetectorType != type)
    {
        mDetectorType = type;

        if (type == NATIVE_DETECTOR)
        {
            Log.i(TAG, "Detection Based Tracker enabled");
            mNativeDetector.start();
        }
        else
        {
            Log.i(TAG, "Cascade detector enabled");
            mNativeDetector.stop();
        }
    }
}

public void resetLearFramesCount()
{
    learn_frames = 0;
}

public FdView(Context context) {
    super(context);

    try {
        InputStream is = context.getResources().openRawResource(R.raw.lbpcascade_frontalface);
        File cascadeDir = context.getDir("cascade", Context.MODE_PRIVATE);
        mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
        FileOutputStream os = new FileOutputStream(mCascadeFile);

        byte[] buffer = new byte[4096];
        int bytesRead;
        while ((bytesRead = is.read(buffer)) != -1) {
            os.write(buffer, 0, bytesRead);
        }
        is.close();
        os.close();


        // --------------------------------- load left eye classificator -----------------------------------
        InputStream iser = context.getResources().openRawResource(R.raw.haarcascade_lefteye_2splits);
        File cascadeDirER = context.getDir("cascadeER", Context.MODE_PRIVATE);
        File cascadeFileER = new File(cascadeDirER, "haarcascade_eye_right.xml");
        FileOutputStream oser = new FileOutputStream(cascadeFileER);

        byte[] bufferER = new byte[4096];
        int bytesReadER;
        while ((bytesReadER = iser.read(bufferER)) != -1) {
            oser.write(bufferER, 0, bytesReadER);
        }
        iser.close();
        oser.close();
        //----------------------------------------------------------------------------------------------------


        // --------------------------------- load right eye classificator ------------------------------------
        InputStream isel = context.getResources().openRawResource(R.raw.haarcascade_lefteye_2splits);
        File cascadeDirEL = context.getDir("cascadeEL", Context.MODE_PRIVATE);
        File cascadeFileEL = new File(cascadeDirEL, "haarcascade_eye_left.xml");
        FileOutputStream osel = new FileOutputStream(cascadeFileEL);

        byte[] bufferEL = new byte[4096];
        int bytesReadEL;
        while ((bytesReadEL = isel.read(bufferEL)) != -1) {
            osel.write(bufferEL, 0, bytesReadEL);
        }
        isel.close();
        osel.close();

        // ------------------------------------------------------------------------------------------------------


        mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
        mCascadeER = new CascadeClassifier(cascadeFileER.getAbsolutePath());
        mCascadeEL = new CascadeClassifier(cascadeFileER.getAbsolutePath());
        if (mJavaDetector.empty()|| mCascadeER.empty() || mCascadeEL.empty()) {
            Log.e(TAG, "Failed to load cascade classifier");
            mJavaDetector = null;
            mCascadeER=null;
            mCascadeEL=null;
        } else
            Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());

        mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);

        cascadeDir.delete();
        cascadeFileER.delete();
        cascadeDirER.delete();
        cascadeFileEL.delete();
        cascadeDirEL.delete();

    } catch (IOException e) {
        e.printStackTrace();
        Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
    }
}

@Override
public void surfaceCreated(SurfaceHolder holder) {
    synchronized (this) {
        // initialize Mats before usage
        mGray = new Mat();
        mRgba = new Mat();
        //images = new Mat();
    }

    super.surfaceCreated(holder);
}

@Override
protected Bitmap processFrame(VideoCapture capture) {
    capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);


    if (mAbsoluteFaceSize ...
(more)
edit retag flag offensive close merge delete