Ask Your Question

JVJCJOHN's profile - activity

2017-09-15 17:07:35 -0600 received badge  Popular Question (source)
2015-01-12 20:56:02 -0600 asked a question Face Detection Capture and Store in Mobile Storage

I am getting error about storing the captured detected face in my Face Detection. Here is the code of Face Detection from tutorials and i also add a datatype aww = bitmap , as bitmap collector.

Code in face Detection :

protected Bitmap processFrame(VideoCapture capture) {
        capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
        capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);

        if (mAbsoluteFaceSize == 0)
        {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0);
            {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
            mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
        }

        MatOfRect faces = new MatOfRect();

        if (mDetectorType == JAVA_DETECTOR)
        {
            if (mJavaDetector != null)
                mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        , new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());


             if (mZoomCorner == null || mZoomWindow == null)
                    CreateAuxiliaryMats();


             Rect[] facesArray = faces.toArray();

             for (int i = 0; i < facesArray.length; i++){
                Rect r = facesArray[i];
                 Core.rectangle(mGray, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);
                 Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);

                 eyearea = new Rect(r.x +r.width/8,(int)(r.y + (r.height/4.5)),r.width - 2*r.width/8,(int)( r.height/3.0));
                 Core.rectangle(mRgba,eyearea.tl(),eyearea.br() , new Scalar(255,0, 0, 255), 2); 
                 Rect eyearea_right = new Rect(r.x +r.width/16,(int)(r.y + (r.height/4.5)),(r.width - 2*r.width/16)/2,(int)( r.height/3.0));
                 Rect eyearea_left = new Rect(r.x +r.width/16 +(r.width - 2*r.width/16)/2,(int)(r.y + (r.height/4.5)),(r.width - 2*r.width/16)/2,(int)( r.height/3.0));
                 Core.rectangle(mRgba,eyearea_left.tl(),eyearea_left.br() , new Scalar(255,0, 0, 255), 2); 
                 Core.rectangle(mRgba,eyearea_right.tl(),eyearea_right.br() , new Scalar(255, 0, 0, 255), 2);

                 if(learn_frames<5){
                    teplateR = get_template(mCascadeER,eyearea_right,24);
                    teplateL = get_template(mCascadeEL,eyearea_left,24);
                    learn_frames++;
                 }else{


                 match_value = match_eye(eyearea_right,teplateR,FdActivity.method); 

                 match_value = match_eye(eyearea_left,teplateL,FdActivity.method); 
                    ;
                    }
                    Imgproc.resize(mRgba.submat(eyearea_left), mZoomWindow2, mZoomWindow2.size());
                    Imgproc.resize(mRgba.submat(eyearea_right), mZoomWindow, mZoomWindow.size());

                    }
        }
        else if (mDetectorType == NATIVE_DETECTOR)
        {
            if (mNativeDetector != null)
                mNativeDetector.detect(mGray, faces);
        }
        else
        {
            Log.e(TAG, "Detection method is not selected!");
        }

        Rect[] facesArray = faces.toArray();
        for (int i = 0; i < facesArray.length; i++)
            Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

        Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);

        try {
            Utils.matToBitmap(mRgba, bmp);
        } catch(Exception e) {
            Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
            bmp.recycle();
            bmp = null;
        }
        aww = bmp;
        return bmp;
    }

After that, ive made an onclick listener to capture the face detected. Here is the code :

private void storeImage(Bitmap image,Context l) {
        File pictureFile = getOutputMediaFile(l);
        if (pictureFile == null) {
            Log.d(TAG,
                    "Error creating media file, check storage permissions: ");// e.getMessage());
            return;
        } 
        try {
            FileOutputStream fos = new FileOutputStream(pictureFile);
            image.compress(Bitmap.CompressFormat.JPEG, 40, fos);
            fos.close();
        } catch (FileNotFoundException e) {
            Log.d(TAG, "File not found: " + e.getMessage());
        } catch (IOException e) {
            Log.d(TAG, "Error accessing file: " + e.getMessage());
        }  
    }
    private  File getOutputMediaFile(Context l){
        // To be safe, you should check that the SDCard is ...
(more)
2015-01-12 01:02:20 -0600 asked a question how to capture and save the face detected in OpenCV tutorial face Detection

i get a code from the link http://romanhosek.cz/android-eye-dete... for face detection and eye detection. can anyone help me how to put capture button to capture the face and crop the face detected .. here is the code

package org.opencv.samples.fd;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;

import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.graphics.Color;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.RelativeLayout;
import android.widget.SeekBar;
import android.widget.TextView;
import android.widget.ToggleButton;
import android.widget.SeekBar.OnSeekBarChangeListener;

public class FdActivity extends Activity {
    private static final String TAG = "Sample::Activity";

    private MenuItem mItemFace50;
    private MenuItem mItemFace40;
    private MenuItem mItemFace30;
    private MenuItem mItemFace20;
    private MenuItem mItemType;

    private FdView mView;
    private TextView matching_method;
    public static int method = 1;

    private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            switch (status) {
            case LoaderCallbackInterface.SUCCESS: {
                Log.i(TAG, "OpenCV loaded successfully");

                // Load native libs after OpenCV initialization
                // System.loadLibrary("detection_based_tracker");

                // Create and set View
                mView = new FdView(mAppContext);
                mView.setDetectorType(mDetectorType);
                mView.setMinFaceSize(0.2f);

                VerticalSeekBar VerticalseekBar = new VerticalSeekBar(
                        getApplicationContext());
                VerticalseekBar.setMax(5);
                VerticalseekBar.setPadding(20, 20, 20, 20);
                RelativeLayout.LayoutParams vsek = new RelativeLayout.LayoutParams(
                        RelativeLayout.LayoutParams.WRAP_CONTENT, 400);
                vsek.addRule(RelativeLayout.ALIGN_PARENT_RIGHT);
                VerticalseekBar.setId(1);
                VerticalseekBar
                        .setOnSeekBarChangeListener(new OnSeekBarChangeListener() {

                            public void onProgressChanged(SeekBar seekBar,
                                    int progress, boolean fromUser) {

                                method = progress;
                                switch (method) {
                                case 0:
                                    matching_method.setText("TM_SQDIFF");
                                    break;
                                case 1:
                                    matching_method.setText("TM_SQDIFF_NORMED");
                                    break;
                                case 2:
                                    matching_method.setText("TM_CCOEFF");
                                    break;
                                case 3:
                                    matching_method.setText("TM_CCOEFF_NORMED");
                                    break;
                                case 4:
                                    matching_method.setText("TM_CCORR");
                                    break;
                                case 5:
                                    matching_method.setText("TM_CCORR_NORMED");
                                    break;
                                }

                            }

                            public void onStartTrackingTouch(SeekBar seekBar) {
                            }

                            public void onStopTrackingTouch(SeekBar seekBar) {
                            }
                        });

                matching_method = new TextView(getApplicationContext());
                matching_method.setText("TM_SQDIFF");
                matching_method.setTextColor(Color.YELLOW);
                RelativeLayout.LayoutParams matching_method_param = new RelativeLayout.LayoutParams(
                        RelativeLayout.LayoutParams.WRAP_CONTENT,
                        RelativeLayout.LayoutParams.WRAP_CONTENT);
                matching_method_param
                        .addRule(RelativeLayout.ALIGN_PARENT_RIGHT);
                matching_method_param.addRule(RelativeLayout.BELOW,
                        VerticalseekBar.getId());

                Button btn = new Button(getApplicationContext());
                btn.setText("Capture");
                RelativeLayout.LayoutParams btnp = new RelativeLayout.LayoutParams(
                        RelativeLayout.LayoutParams.WRAP_CONTENT,
                        RelativeLayout.LayoutParams.WRAP_CONTENT);
                btnp.addRule(RelativeLayout.ALIGN_PARENT_LEFT);
                btn.setId(2);

                btn.setOnClickListener(new OnClickListener() {
                    public void onClick(View v) {
                        mView.resetLearFramesCount();
                    }
                });

                RelativeLayout frameLayout = new RelativeLayout(
                        getApplicationContext());
                frameLayout.addView(mView, 0);
                frameLayout.addView(btn, btnp);

                frameLayout.addView(VerticalseekBar, vsek);
                frameLayout.addView(matching_method, matching_method_param);

                setContentView(frameLayout);

                // Check native OpenCV camera
                if (!mView.openCamera()) {
                    AlertDialog ad = new AlertDialog.Builder(mAppContext)
                            .create();
                    ad.setCancelable(false); // This blocks the 'BACK' button
                    ad.setMessage("Fatal error: can't open camera!");
                    ad.setButton("OK", new DialogInterface.OnClickListener() {
                        public void onClick(DialogInterface dialog, int which) {
                            dialog.dismiss();
                            finish();
                        }
                    });
                    ad.show();
                }
            }
                break;
            default: {
                super.onManagerConnected(status);
            }
                break;
            }
        }
    };

    private int mDetectorType = 0;
    private String[] mDetectorName;

    public FdActivity() {
        Log.i(TAG, "Instantiated new " + this.getClass());
        mDetectorName = new String[2];
        mDetectorName[FdView.JAVA_DETECTOR] = "Java";
        mDetectorName[FdView.NATIVE_DETECTOR] = "Native (tracking)";
    }

    @Override
    protected void onPause() {
        Log.i(TAG, "onPause");
        super.onPause();
        if (mView != null)
            mView.releaseCamera ...
(more)