I am implementing a facial expression recognition (FER) in openCV, entirely on C++.
Now that my program is up and ready, I am planning to implement this in Android using JNI so that I do not have to change my code.
I have three stages in my program, which is as follows:
Face-detection using cascade classifier Local Binary Pattern to determine feature Vector Expression Classification using Support Vector Machine I have two 'modes' which are:
Training mode, where the program asks user to mimic different expressions, process them to determine feature vectors, then train the SVM 'Live' Mode, where the program determines the predicted expression on given expression. My problem lies on the training mode. In executable program, I use getchar to wait for user (so that they can change their expression and then press Enter to mimic the next expression) and then take 100 face images, etc so on until I get images for all expressions. In this way user can 'get ready' for the next expression.
Now, I do not know how to do this in android programming , since I am really not used to it.
I want to maybe, take the first 100 face images for one expression, wait for user to touch the screen, then take another 100 face images expression, and so on.
So generally what I am asking is, how to program in such a way that I can wait for user input before proceeding to the next step ?
Sorry for my bad english, not my first language so hope you guys don't mind.
Here is a snippet of my code (only the Java part) so you guys have some idea.
public class Tutorial2Activity extends Activity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private static final int VIEW_MODE_LIVE_MODE = 0;
private static final int VIEW_MODE_TRAINING_MODE = 1;
private int mViewMode;
private Mat mRgba;
private Mat mIntermediateMat;
private Mat mGray;
private File mCascadeFileEyes;
private File mCascadeFileClassifier;
private MenuItem mItemLiveMode;
private MenuItem mItemTrainingMode;
private int mCameraId;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("mixed_sample");
mCameraId = 1;
mOpenCvCameraView.setCameraIndex(mCameraId);
InputStream is, isClassifier;
FileOutputStream os, osClassifier;
try {
is = getResources().getAssets().open("lbpcascade_frontalface.xml");
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
mCascadeFileEyes = new File(cascadeDir, "lbpcascade_frontalface.xml");
//FileOutputStream os;
os = new FileOutputStream(mCascadeFileEyes);
byte[] buffer = new byte[16384];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
Log.i(TAG, "face cascade found");
} catch (IOException e) {
Log.i(TAG, "face cascade not found");
}
try {
isClassifier = getResources().getAssets().open("svmClassifier.yml");
File cascadeDirClassifier = getDir("cascade", Context.MODE_PRIVATE);
mCascadeFileClassifier = new File(cascadeDirClassifier, "svmClassifier.yml");
//FileOutputStream os;
osClassifier = new FileOutputStream(mCascadeFileClassifier);
byte[] bufferClassifier = new byte[16384];
int bytesReadClassifer;
while ((bytesReadClassifer = isClassifier.read(bufferClassifier)) != -1) {
osClassifier.write(bufferClassifier, 0, bytesReadClassifer);
}
isClassifier.close();
osClassifier.close();
Log.i(TAG, "svmClassifier found");
} catch (IOException e) {
Log.i(TAG, "svmClassifier not found");
}
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Tutorial2Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial2_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial2_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "called onCreateOptionsMenu");
mItemLiveMode = menu.add("Live Mode");
mItemTrainingMode = menu.add("Training Mode");
//mItemAbout = menu.add("Monash University Malaysia 2014");
return true;
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mGray = new Mat(height, width, CvType.CV_8UC1);
}
public void onCameraViewStopped() {
mRgba.release();
mGray.release();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
final int viewMode = mViewMode;
switch (viewMode) {
case VIEW_MODE_LIVE_MODE:
liveMode(mCascadeFileEyes.getAbsolutePath(),
mCascadeFileClassifier.getAbsolutePath(),
mGray.getNativeObjAddr(),
mRgba.getNativeObjAddr()
);
break;
case VIEW_MODE_TRAINING_MODE:
break;
}
return mRgba;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemLiveMode) {
mViewMode = VIEW_MODE_LIVE_MODE;
}
else if (item == mItemTrainingMode){
mViewMode = VIEW_MODE_TRAINING_MODE;
}
return true;
}
public native void liveMode(String EyesClassifier, String svmClassifier,long matAddrGr, long matAddrRgba);
public native void trainingMode(String EyesClassifier, String svmClassifier,long matAddrGr, long matAddrRgba);
}