I originally asked this question on StackOverflow here, but I haven't received any replies or answers.
My goal is to do a couple things:
- Use OpenCV and the JavaCameraView to process frames from the phone's camera feed
- Enable recording of that processed video as it happens
I have both of them working, but the way I had to implement number 2 is ridiculous:
- For each frame, write the processed Mat as an image file.
- When the recording stops, use JCodec's Android library to stitch them together into a video file.
That works, but it comes with a ton of drawbacks: the framerate drops unbearably low during a recording, and the stitching step takes about half a second per frame, and runs out of memory for videos more than a couple seconds long- and that's after I lower my camera's resolution to make sure the images are as small as possible. Even then, the video framerate is way out of whack with reality, and the video looks insanely sped up.
This seems ridiculous for a lot of reasons, so my question is: is there a better way to do this?
Here's a little example if anybody wants to run it. This requires the OpenCV Android project available here, and the JCodec Andrpod project available here.
Manifest.xml:
<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.example.videotest" android:versioncode="1" android:versionname="1.0" ><="" p="">
<uses-sdk
android:minSdkVersion="8"
android:targetSdkVersion="22"
/>
<application
android:allowBackup="true"
android:icon="@drawable/ic_launcher"
android:label="@string/app_name"
android:theme="@android:style/Theme.NoTitleBar.Fullscreen" >
<activity
android:name=".MainActivity"
android:screenOrientation="landscape"
android:configChanges="orientation|keyboardHidden|screenSize"
android:label="@string/app_name" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
</manifest>
MainActivity:
package com.example.videotest;
import java.io.File;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.media.MediaScannerConnection;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.widget.Toast;
public class MainActivity extends Activity implements CvCameraViewListener2{
private CameraView cameraView;
private Mat edgesMat;
private final Scalar greenScalar = new Scalar(0,255,0);
private int resolutionIndex = 0;
private MatVideoWriter matVideoWriter = new MatVideoWriter();
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i("VideoTest", "OpenCV loaded successfully");
cameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
cameraView = (CameraView) findViewById(R.id.cameraView);
cameraView.setVisibility(SurfaceView.VISIBLE);
cameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
super.onPause();
if (cameraView != null){
cameraView.disableView();
}
}
@Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
if (cameraView != null)
cameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
edgesMat = new Mat();
}
public void onCameraViewStopped() {
if (edgesMat != null)
edgesMat.release();
edgesMat = null;
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat rgba = inputFrame.rgba();
org.opencv.core.Size sizeRgba = rgba.size();
int rows = (int) sizeRgba.height;
int cols = (int) sizeRgba.width;
int left = cols / 8;
int top = rows / 8;
int width = cols * 3 / 4;
int height = rows * 3 / 4;
//get sub-image
Mat rgbaInnerWindow = rgba.submat(top, top + height, left, left + width);
//create edgesMat from sub-image
Imgproc.Canny(rgbaInnerWindow, edgesMat, 100, 100);
Mat colorEdges = new Mat();
Mat killMe = colorEdges;
edgesMat.copyTo(colorEdges);
Imgproc.cvtColor(colorEdges, colorEdges, Imgproc.COLOR_GRAY2BGRA);
colorEdges = colorEdges.setTo(greenScalar, edgesMat);
colorEdges.copyTo(rgbaInnerWindow, edgesMat);
killMe.release();
colorEdges.release();
rgbaInnerWindow.release();
if(matVideoWriter.isRecording()){
matVideoWriter.write(rgba);
}
return rgba;
}
public void changeResolution(View v){
List<android.hardware.Camera.Size> cameraResolutionList = cameraView.getResolutionList();
resolutionIndex++;
if(resolutionIndex >= cameraResolutionList.size()){
resolutionIndex = 0;
}
android.hardware.Camera.Size resolution = cameraResolutionList.get(resolutionIndex);
cameraView.setResolution(resolution.width, resolution.height);
resolution = cameraView.getResolution();
String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
}
public void startVideo(View v){
if(matVideoWriter.isRecording()){
matVideoWriter.stop();
File file = new File(getExternalFilesDir(null), "VideoTest/images/");
for(String img : file.list()){
String scanMe = new File(file, img).getAbsolutePath();
MediaScannerConnection.scanFile(this, new String[]{scanMe}, null, null);
Log.i("VideoTest", "Scanning: " +scanMe);
}
file = new File(file, "video.mp4");
MediaScannerConnection.scanFile(this, new String[]{file.getAbsolutePath()}, null, null);
}
else{
String state = Environment.getExternalStorageState();
Log.i("VideoTest", "state: " + state);
File ext = getExternalFilesDir(null);
Log.i("VideoTest", "ext: " + ext.getAbsolutePath());
File file = new File(getExternalFilesDir(null), "VideoTest/images/");
if(!file.exists()){
boolean success = file.mkdirs();
Log.i("VideoTest", "mkdirs: " + success);
}
else{
Log.i("VideoTest", "file exists.");
}
Log.i("VideoTest", "starting recording: " + file.getAbsolutePath());
matVideoWriter.start(file);
}
}
}
CameraView:
package com.example.videotest;
import java.io.FileOutputStream;
import java.util.List;
import org.opencv.android.JavaCameraView;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.util.AttributeSet;
import android.util.Log;
public class CameraView extends JavaCameraView{
private String mPictureFileName;
public CameraView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public List<String> getEffectList() {
return mCamera.getParameters().getSupportedColorEffects();
}
public boolean isEffectSupported() {
return (mCamera.getParameters().getColorEffect() != null);
}
public String getEffect() {
return mCamera.getParameters().getColorEffect();
}
public void setEffect(String effect) {
Camera.Parameters params = mCamera.getParameters();
params.setColorEffect(effect);
mCamera.setParameters(params);
}
public List<android.hardware.Camera.Size> getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}
public void setResolution(int width, int height) {
disconnectCamera();
mMaxHeight = height;
mMaxWidth = width;
connectCamera(getWidth(), getHeight());
}
public android.hardware.Camera.Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}
}
MatVideoWriter:
package com.example.videotest;
import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.jcodec.api.android.SequenceEncoder;
import org.opencv.core.Mat;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Log;
public class MatVideoWriter {
boolean recording;
File dir;
int imageIndex = 0;
public void start(File dir){
this.dir = dir;
recording = true;
}
public void stop(){
recording = false;
try{
File file = new File(dir, "video.mp4");
SequenceEncoder encoder = new SequenceEncoder(file);
List<File> files = Arrays.asList(dir.listFiles());
Collections.sort(files, new Comparator<File>(){
@Override
public int compare(File lhs, File rhs) {
return lhs.getName().compareTo(rhs.getName());
}
});
for(File f : files){
Log.i("VideoTest", "Encoding image: " + f.getAbsolutePath());
try{
Bitmap frame = BitmapFactory.decodeFile(f.getAbsolutePath());
encoder.encodeImage(frame);
}
catch(Exception e){
e.printStackTrace();
}
}
encoder.finish();
}
catch(Exception e){
e.printStackTrace();
}
}
public void write(Mat mat){
//convert from BGR to RGB
Mat rgbMat = new Mat();
Imgproc.cvtColor(mat, rgbMat, Imgproc.COLOR_BGR2RGB);
File file = new File(dir, "img" + imageIndex + ".png");
String filename = file.toString();
boolean success = Highgui.imwrite(filename, rgbMat);
Log.i("VideoTest", "Success writing img" + imageIndex +".png: " + success);
imageIndex++;
}
public boolean isRecording() {
return recording;
}
}