2

I've got some code below .. it converts YUV_420_888 (android camera2 api) image format to ARGB8888 and saves it to a jpeg .. however the result is almost completely green .. any idea how to get this to work as expected??

private ByteBuffer image2ByteBuffer(Image image) {
    Image.Plane Y = image.getPlanes()[0];
    Image.Plane U = image.getPlanes()[1];
    Image.Plane V = image.getPlanes()[2];

    int yRowStride = Y.getRowStride();
    int yPixelStride = Y.getPixelStride();
    int uRowStride = U.getRowStride();
    int uPixelStride = U.getPixelStride();
    int vRowStride = V.getRowStride();
    int vPixelStride = V.getPixelStride();


    ByteBuffer YBuffer = Y.getBuffer();
    ByteBuffer UBuffer = U.getBuffer();
    ByteBuffer VBuffer = V.getBuffer();

    YBuffer.rewind();
    UBuffer.rewind();
    VBuffer.rewind();

    int yb, ub, vb;
    yb = YBuffer.capacity();
    ub = UBuffer.capacity();
    vb = VBuffer.capacity();

    byte[] ybb, ubb,vbb;
    ybb = new byte[yb];
    ubb = new byte[ub];
    vbb = new byte[vb];

    YBuffer.get(ybb);
    UBuffer.get(ubb);
    VBuffer.get(vbb);

    byte[] yData = ybb;
    byte[] uData = new byte[yb/4];
    byte[] vData = new byte[yb/4];

    int index = 0;
    for(int i = 0; (i < (640 * 480 * 0.5)) && (index < (640 * 480 * 0.25)); i += uPixelStride){
        uData[index] = ubb[i];
        vData[index] = vbb[i];
        index++;
    }

    byte[] yuv420sp = new byte[(int)(640 * 480 * 1.5)];

    // Y DATA FIRST

    for (int i = 0 ; i < 640 * 480; i++)
        yuv420sp[i] = yData[i];

    //UV INTERLEAVED DATA NEXT

    index = 640 * 480;
    for (int i = 0; i < (640 * 480 * 0.25); i++) {
        yuv420sp[index] = uData[i];
        index++;
        yuv420sp[index] = vData[i];
        index++;
    }



    int[] decoded = new int[640 * 480];
    YUV420Converter.decodeYUV420888(decoded, yuv420sp, 640, 480);
    Bitmap bitmap = Bitmap.createBitmap(decoded, 640, 480, Bitmap.Config.ARGB_8888);
    ByteBuffer byteBuffer = ByteBuffer.allocate(640 * 480 * 4);
    bitmap.copyPixelsToBuffer(byteBuffer);
    bitmap.recycle();
    return byteBuffer;
}

The next bit of code is from the imagereader

    public void onImageAvailable(ImageReader reader) {

        Image image = reader.acquireLatestImage();

        ByteBuffer byteBuffer = image2ByteBuffer(image);

        saveImage(byteBuffer);
        byteBuffer.clear();
        image.close();
    }

    public void saveImage(ByteBuffer imageByteBuffer) {
        try {
            imageByteBuffer.rewind();
            Bitmap imageBitmap = Bitmap.createBitmap(WIDTH, HEIGHT, Bitmap.Config.ARGB_8888);
            imageBitmap.copyPixelsFromBuffer(imageByteBuffer);
            String filePath = Environment.getExternalStorageDirectory().getPath()
                    + "/pic" + mImagesCount + ".png";
            FileOutputStream fileOutputStream = new FileOutputStream(filePath);

            imageBitmap.compress(Bitmap.CompressFormat.PNG, 100, fileOutputStream);

            //fileOutputStream.write(imageByteArray);
            fileOutputStream.flush();
            fileOutputStream.close();
            imageBitmap.recycle();
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

Used to decode to ARGB once we have a proper formatted YUV

public static void decodeYUV420888(int[] rgb, byte[] yuv420sp, int width, int height) {

    final int frameSize = width * height;

    for (int j = 0, yp = 0; j < height; j++) {       int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
        for (int i = 0; i < width; i++, yp++) {
            int y = (0xff & ((int) yuv420sp[yp])) - 16;
            if (y < 0)
                y = 0;
            if ((i & 1) == 0) {
                v = (0xff & yuv420sp[uvp++]) - 128;
                u = (0xff & yuv420sp[uvp++]) - 128;
            }

            int y1192 = 1192 * y;
            int b = (y1192 + 1634 * v);
            int g = (y1192 - 833 * v - 400 * u);
            int r = (y1192 + 2066 * u);

            if (r < 0)                  r = 0;               else if (r > 262143)
                r = 262143;
            if (g < 0)                  g = 0;               else if (g > 262143)
                g = 262143;
            if (b < 0)                  b = 0;               else if (b > 262143)
                b = 262143;

            rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
        }
    }
}
4

2 回答 2

1

显然 API 21 存在问题,但这在 API 22 中得到了解决。
我在以下帖子中找到了这一点:yuv_420_888-to-rgb
关于这个主题的另一篇优秀帖子可以在这里找到:解决一些 android 问题

希望这可以帮助 !

于 2016-07-12T12:17:14.353 回答
0

只是为了结束这个..我正在使用三星 Note 5 并在下面的链接中实施了解决方案

三星 Galaxy S7 (Camera2) 上的 YUV_420_888 解读

更新:

以防有人偶然发现这个......这是一个将 yuv420sp (NV21) 图像转换为 RGBA 的渲染脚本示例。

https://www.rounds.com/blog/unleash-power-gpu-renderscript/

结合起来,它们都解决了 Camera API 级别 1/2 YUV_XX -> RGBA 问题。快乐编码!

于 2016-10-17T17:05:56.587 回答