Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

exception using opencv and jni in android project

i am working on a lip reading application in android. i am using openCV and jni for my project. i want to use openCV native camera, but when i run my code, i get this exception:

E/cv::error()(13512): OpenCV Error: Assertion failed (src.dims == 2 && info.height == (uint32_t)src.rows && info.width == (uint32_t)src.cols) in void Java_org_opencv_android_Utils_nMatToBitmap2(JNIEnv*, jclass, jlong, jobject, jboolean), file /home/reports/ci/slave/50-SDK/opencv/modules/java/generator/src/cpp/utils.cpp, line 97
E/org.opencv.android.Utils(13512): nMatToBitmap catched cv::Exception: /home/reports/ci/slave/50-SDK/opencv/modules/java/generator/src/cpp/utils.cpp:97: error: (-215) src.dims == 2 && info.height == (uint32_t)src.rows && info.width == (uint32_t)src.cols in function void Java_org_opencv_android_Utils_nMatToBitmap2(JNIEnv*, jclass, jlong, jobject, jboolean)
E/CameraBridge(13512): Mat type: Mat [ 352*288*CV_8UC4, isCont=true, isSubmat=false, nativeObj=0x5a4132a8, dataAddr=0x5f4c4010 ]
E/CameraBridge(13512): Bitmap type: 352*288
E/CameraBridge(13512): Utils.matToBitmap() throws an exception: /home/reports/ci/slave/50-SDK/opencv/modules/java/generator/src/cpp/utils.cpp:97: error: (-215) src.dims == 2 && info.height == (uint32_t)src.rows && info.width == (uint32_t)src.cols in function void Java_org_opencv_android_Utils_nMatToBitmap2(JNIEnv*, jclass, jlong, jobject, jboolean)

below is my code:

private BaseLoaderCallback  loaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
    switch (status) {
    case LoaderCallbackInterface.SUCCESS:
    {
        Log.i("adi", "OpenCV loaded successfully");

        // Load native library after(!) OpenCV initialization

        try
        {
            System.loadLibrary("feature_extractor");
            Log.i("adi" , " Success System.loadLibrary(\"feature_extractor\")");

        }

        catch(Exception ex){
            Log.i("adi" , " Exception System.loadLibrary(\"feature_extractor\")");

            ex.printStackTrace();
        }


        try {
            // load cascade file from application resources
            InputStream is = getResources().openRawResource(R.raw.haarcascade_mcs_mouth);
            File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
            cascadeFile = new File(cascadeDir, "haarcascade_mcs_mouth.xml");
            FileOutputStream os = new FileOutputStream(cascadeFile);
            byte[] buffer = new byte[4096];
            int bytesRead;
            while ((bytesRead = is.read(buffer)) != -1) {
                os.write(buffer, 0, bytesRead);
            }
            is.close();
            os.close();

            //openCvCameraView.setMaxFrameSize(800, 800);
            fe = new FeatureExtractor(cascadeFile.getAbsolutePath(), openCvCameraView.getHeight(), openCvCameraView.getWidth());
            cascadeFile.delete();
        } catch (IOException e) {

            Log.e("adi", "Failed to load cascade. Exception thrown: " + e);
        }

        openCvCameraView.enableView();
    } break;
    default:
    {
        super.onManagerConnected(status);
    } break;
    }
}

};

public LipReadingActivity() {
    Log.i("adi", "Instantiated new " + this.getClass());
}

/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
    Log.i("adi", "called onCreate");
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    setContentView(R.layout.main);



openCvCameraView = (CameraBridgeViewBase) findViewById(R.id.lip_reading_surface_view);
openCvCameraView.setCvCameraViewListener(this);
client = AndroidHttpClient.newInstance("lipreading-android");
tts = new TextToSpeech(this, this);
preferences = getPreferences(MODE_PRIVATE);
isTrainingMode = preferences.getBoolean(getString(R.string.trainingModePref), false);
uri = preferences.getString(getString(R.string.serverPref), getString(R.string.serverDef));
settingsFragment = new SettingsFragment().setContext(this);
output = (TextView) findViewById(R.id.output);
recordButton = (ImageButton) findViewById(R.id.recordButton);       
recordButton.setOnClickListener(
        new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                onRecordButtonPressed();
            }
        });

and here is my FeatureExtractor.java class definition:

public class FeatureExtractor {
private static native long nativeCreateObject(String cascadeName, int w, int h);
private static native void nativeDestroyObject(long thiz);
private static native void nativeDetect(long thiz, long grayImage, long rgbaImage, int[]     points);

 public FeatureExtractor(String cascadeName, int w, int h) {
     mNativeObj = nativeCreateObject(cascadeName, w, h);
 }


 public void detect(Mat grayImage, Mat rgbaImage, int[] points) {
     nativeDetect(mNativeObj, grayImage.getNativeObjAddr(), rgbaImage.getNativeObjAddr(),  points);
 }

public void release() {
     nativeDestroyObject(mNativeObj);
     mNativeObj = 0;
 }

 private long mNativeObj = 0;

and here is my FeatureExtractor_jni.cpp class:

#include <FeatureExtractor_jni.h>
#include <FeatureExtractor.h>
#include <string>


 #include <android/log.h>

 #define LOG_TAG "FaceDetection/DetectionBasedTracker"
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))

using namespace std;
using namespace cv;

JNIEXPORT jlong JNICALL Java_edu_lipreading_android_FeatureExtractor_nativeCreateObject
 (JNIEnv * jenv, jclass, jstring jFileName, jint w, jint h)
 {
     LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeCreateObject   enter");
    const char* jnamestr = jenv->GetStringUTFChars(jFileName, NULL);
    string stdFileName(jnamestr);
     jlong result = 0;

try
{
    FeatureExtractor* fe = new FeatureExtractor(stdFileName, w, h);
    result = (jlong)fe;
}
catch(cv::Exception& e)
{
    LOGD("nativeCreateObject caught cv::Exception: %s", e.what());
    jclass je = jenv->FindClass("org/opencv/core/CvException");
    if(!je)
         je = jenv->FindClass("java/lang/Exception");
     jenv->ThrowNew(je, e.what());
 }
 catch (...)
 {
    LOGD("nativeCreateObject caught unknown exception");
    jclass je = jenv->FindClass("java/lang/Exception");
     jenv->ThrowNew(je, "Unknown exception in JNI code         {highgui::VideoCapture_n_1VideoCapture__()}");
     return 0;
 }

 LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeCreateObject exit");
 return result;

}

JNIEXPORT void JNICALL Java_edu_lipreading_android_FeatureExtractor_nativeDestroyObject
 (JNIEnv * jenv, jclass, jlong thiz)
 {
    LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDestroyObject    enter");
    try
    {
         if(thiz != 0)
         {
             delete (FeatureExtractor*)thiz;
         }
     }
     catch(cv::Exception& e)
     {
          LOGD("nativeestroyObject caught cv::Exception: %s", e.what());
          jclass je = jenv->FindClass("org/opencv/core/CvException");
          if(!je)
            je = jenv->FindClass("java/lang/Exception");
           jenv->ThrowNew(je, e.what());
 }
 catch (...)
 {
     LOGD("nativeDestroyObject caught unknown exception");
     jclass je = jenv->FindClass("java/lang/Exception");
     jenv->ThrowNew(je, "Unknown exception in JNI code  {highgui::VideoCapture_n_1VideoCapture__()}");
 }
 LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDestroyObject exit");

}

      JNIEXPORT void JNICALL Java_edu_lipreading_android_FeatureExtractor_nativeDetect
  (JNIEnv * jenv, jclass, jlong thiz, jlong grayImage, jlong rgbaImage, jintArray points)
  {
     LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect enter");
     try
      {
         jint* rpoints = jenv->GetIntArrayElements(points, NULL);
          int * npoints = new int[12];
          for(int i = 0; i < 12; i++)
                 {
                        npoints[i] = 0;
                 }

          cv::Mat* mat = (Mat*) (rgbaImage);
    cv::Mat* gmat = (Mat*) (grayImage);
     *mat = mat->t();
*gmat = gmat->t();
flip(*mat, *mat, 0);
flip(*gmat, *gmat, 0);
((FeatureExtractor*) thiz)->detect(*gmat, *mat, npoints);
flip(*mat, *mat, 1);
for(int i = 0; i < 12; i++)
{
    rpoints[i] = npoints[i];
}
jenv->ReleaseIntArrayElements(points, rpoints, 0);
delete npoints;
}
catch(cv::Exception& e)
 {
     LOGD("nativeDetect caught cv::Exception: %s", e.what());
     jclass je = jenv->FindClass("org/opencv/core/CvException");
     if(!je)
         je = jenv->FindClass("java/lang/Exception");
     jenv->ThrowNew(je, e.what());
 }
catch (...)
{
    LOGD("nativeDetect caught unknown exception");
    jclass je = jenv->FindClass("java/lang/Exception");
     jenv->ThrowNew(je, "Unknown exception in JNI code    {highgui::VideoCapture_n_1VideoCapture__()}");
}
 LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect exit");