1 | initial version |
When you build the cumulative buffer, you should take into account the stride info from android.media.Image
. Luckily, for Y you are guaranteed to have pixelStride = 1. So, if you only need grayscale, you can work with Y.getBuffer() directly.
If you need color info, you must copy the three buffers to single byte array respecting both pixelStride and rowStride for each plane. It may be an overkill invoke OpenCV for this. The conversion can be performed in pure Java, and usually has affordable performance:
int[] rgbBytes = new int[image.getHeight()*image.getWidth());
int idx = 0;
ByteBuffer yBuffer = Y.getBuffer();
int yPixelStride = Y.getPixelStride();
int yRowStride = Y.getRowStride();
ByteBuffer uBuffer = U.getBuffer();
int uPixelStride = U.getPixelStride();
int uRowStride = U.getRowStride();
ByteBuffer vBuffer = V.getBuffer();
int vPixelStride = V.getPixelStride();
int vRowStride = V.getRowStride();
for (int row = 0; row < image.getHeight(); row++) {
for (int col = 0; col < image.getWidth(); col++) {
int y = yBuffer.get(col*yPixelStride + row*yRowStride) & 0xff;
int u = uBuffer.get(col/2*uPixelStride + row/2*uRowStride) & 0xff;
int v = vBuffer.get(col/2*vPixelStride + row/2*vRowStride) & 0xff;
int y1 = ((19077 << 8) * y) >> 16;
int r = (y1 + (((26149 << 8) * v) >> 16) - 14234) >> 6;
int g = (y1 - (((6419 << 8) * u) >> 16) - (((13320 << 8) * v) >> 16) + 8708) >> 6;
int b = (y1 + (((33050 << 8) * u) >> 16) - 17685) >> 6;
if (r < 0) r = 0;
if (g < 0) g = 0;
if (b < 0) b = 0;
if (r > 255) r = 255;
if (g > 255) g = 255;
if (b > 255) b = 255;
rgbBytes[idx++] = 0xff000000 + b + 256 * (g + 256 * r);
}
}
final Bitmap bit = Bitmap.createBitmap(rgbBytes, image.getWidth(), image.getHeight(), Bitmap.Config.ARGB_8888);
PS note that your code cannot work at all, because ByteBuffer.allocateDirect() creates a buffer that has no buffer.array().