0

我正在尝试使用 MediaCodec 和 MediaMuxer 将一系列 JPEG 转换为 mp4。

无论我做什么,我总是在 MP4 上得到一个绿色的静态屏幕作为输出。

代码如下:

 public class AvcEncoder
{
    public bool CanEncode = true;

    MediaCodec codec;
    MediaMuxer muxer;
    MediaFormat format;
    public AvcEncoder()
    {
        codec = MediaCodec.CreateEncoderByType("video/avc");
        format = MediaFormat.CreateVideoFormat("video/avc", 720, 480);
        format.SetInteger(MediaFormat.KeyBitRate, 700000);
        format.SetInteger(MediaFormat.KeyFrameRate, 10);
        format.SetInteger(MediaFormat.KeyColorFormat, (int)Android.Media.MediaCodecCapabilities.Formatyuv420planar);
        format.SetInteger(MediaFormat.KeyIFrameInterval, 5);
        codec.Configure(format, null, null, MediaCodecConfigFlags.Encode);
        codec.Start();
        Java.IO.File f = new Java.IO.File(Android.OS.Environment.ExternalStorageDirectory, "Parkingdom");
        if (!f.Exists())
        {
            f.Mkdirs();
        }
        muxer = new MediaMuxer(f.ToString() + "/test.mp4", MuxerOutputType.Mpeg4);
    }

    public void EncodeFrame(Bitmap image)
    {
        int mWidth = image.Width;
        int mHeight = image.Height;

        int[] mIntArray = new int[mWidth * mHeight];

        // Copy pixel data from the Bitmap into the 'intArray' array
        image.GetPixels(mIntArray, 0, mWidth, 0, 0, mWidth, mHeight);
        byte[] byteArray = new byte[mWidth * mHeight * 3 / 2];
        // Call to encoding function : convert intArray to Yuv Binary data
        EncodeYUV420P(byteArray, mIntArray, mWidth, mHeight);


        using (var stream = new MemoryStream())
        {
            image.Compress(Bitmap.CompressFormat.Png, 100, stream);
            byteArray = stream.ToArray();
        }

        int inputBufferIndex = codec.DequeueInputBuffer(-1);
        if (inputBufferIndex >= 0)
        {
            ByteBuffer buffer = codec.GetInputBuffer(inputBufferIndex);
            buffer.Clear();
            buffer.Put(byteArray);
            codec.QueueInputBuffer(inputBufferIndex, 0, byteArray.Length, 0, 0);
        }
    }

    public void SaveMp4()
    {

        CanEncode = false;
        bool running = true;
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        int track = -1;

        while (running)
        {
            int index = codec.DequeueOutputBuffer(bufferInfo, 10000);
            if (index == (int)MediaCodecInfoState.OutputFormatChanged)
            {
                MediaFormat format = codec.OutputFormat;
                track = muxer.AddTrack(format);
                muxer.Start();
            }
            else if (index == (int)MediaCodecInfoState.TryAgainLater)
            {
                break;
            }
            else if (index >= 0)
            {
                if ((bufferInfo.Flags & MediaCodecBufferFlags.CodecConfig) != 0)
                {
                    bufferInfo.Size = 0;
                }


                if (track != -1)
                {
                    ByteBuffer outBuffer = codec.GetOutputBuffer(index);
                    outBuffer.Position(bufferInfo.Offset);
                    outBuffer.Limit(bufferInfo.Offset + bufferInfo.Size);
                    muxer.WriteSampleData(track, outBuffer, bufferInfo);
                    codec.ReleaseOutputBuffer(index, false);
                }
            }
        }

        codec.Stop();
        codec.Release();
        muxer.Stop();
        muxer.Release();

        CanEncode = true;

    }

    void EncodeYUV420P(byte[] yuv420p, int[] argb, int width, int height)
    {
        int frameSize = width * height;
        int chromasize = frameSize / 4;


        int yIndex = 0;
        int uIndex = frameSize;
        int vIndex = frameSize + chromasize;

        int a, R, G, B, Y, U, V;
        int index = 0;
        for (int j = 0; j < height; j++)
        {
            for (int i = 0; i < width; i++)
            {

                a = (int)(argb[index] & 0xff000000) >> 24; // a is not used obviously
                R = (argb[index] & 0xff0000) >> 16;
                G = (argb[index] & 0xff00) >> 8;
                B = (argb[index] & 0xff) >> 0;

                Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
                U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
                V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;


                yuv420p[yIndex++] = (byte)((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
                if (j % 2 == 0 && index % 2 == 0)
                {
                    yuv420p[uIndex++] = (byte)((U < 0) ? 0 : ((U > 255) ? 255 : U));
                    yuv420p[vIndex++] = (byte)((V < 0) ? 0 : ((V > 255) ? 255 : V));
                }

                index++;
            }
        }
    }
}

每次生成新的 jpeg 时都会调用“EncodeFrame”,这应该将其更改为媒体编解码器的 YUV420Planar 格式。我正在测试的编解码器不支持半平面。

4

1 回答 1

0

万一以后有人遇到这个我改变了

EncodeFrame 改为使用 Surface 并仅使用 DrawBitmap()。

它比字节副本慢,但适用于我的目的。

于 2017-07-11T19:41:29.477 回答