7

尝试通过 WebRtc 流式传输位图。我的 Capturer 类大致如下所示:

public class BitmapCapturer implements VideoCapturer, VideoSink {
private Capturer capturer;

private int width;
private int height;

private SurfaceTextureHelper textureHelper;
private Context appContext;
@Nullable
private CapturerObserver capturerObserver;

@Override
public void initialize(SurfaceTextureHelper surfaceTextureHelper,
                       Context context, CapturerObserver capturerObserver) {
    if (capturerObserver == null) {
        throw new RuntimeException("capturerObserver not set.");
    } else {
        this.appContext = context;
        this.textureHelper = surfaceTextureHelper;
        this.capturerObserver = capturerObserver;
        this.capturer = new Capturer();
        this.textureHelper.startListening(this);
    }
}

@Override
public void startCapture(int width, int height, int fps) {
    this.width = width;
    this.height = height;

    long start = System.nanoTime();
    textureHelper.setTextureSize(width, height);

    int[] textures = new int[1];
    GLES20.glGenTextures(1, textures, 0);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);

    Matrix matrix = new Matrix();
    matrix.preTranslate(0.5f, 0.5f);
    matrix.preScale(1f, -1f);
    matrix.preTranslate(-0.5f, -0.5f);

    YuvConverter yuvConverter = new YuvConverter();
    TextureBufferImpl buffer = new TextureBufferImpl(width, height,
            VideoFrame.TextureBuffer.Type.RGB, textures[0],  matrix,
            textureHelper.getHandler(), yuvConverter, null);

    this.capturerObserver.onCapturerStarted(true);
    this.capturer.startCapture(new ScreenConfig(width, height),
        new CapturerCallback() {
            @Override
            public void onFrame(Bitmap bitmap) {
                textureHelper.getHandler().post(() -> {
                    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
                    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
                    GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);

                    long frameTime = System.nanoTime() - start;
                    VideoFrame videoFrame = new VideoFrame(buffer.toI420(), 0, frameTime);
                    capturerObserver.onFrameCaptured(videoFrame);
                    videoFrame.release();
                });
            }
    });
}

@Override
public void onFrame(VideoFrame videoFrame) {
    capturerObserver.onFrameCaptured(videoFrame);
}


@Override
public void stopCapture() throws InterruptedException {

}

@Override
public void changeCaptureFormat(int i, int i1, int i2) {

}

@Override
public void dispose() {

}

@Override
public boolean isScreencast() {
    return true;
}}

生成的流如下所示:

损坏的帧

下面,我将尝试给出迄今为止我的实验结果。

  1. 如果框架旋转到 90 度 - 流看起来很正常。

    VideoFrame videoFrame = new VideoFrame(buffer.toI420(), 90, frameTime);
    

框架旋转

  1. 试图交换 TextureBuffer 大小

    TextureBufferImpl buffer = new TextureBufferImpl(height, width,
        VideoFrame.TextureBuffer.Type.RGB, textures[0],  matrix,
        textureHelper.getHandler(), yuvConverter, null);
    

帧交换

  1. 还尝试将高度作为高度和宽度传递

    TextureBufferImpl buffer = new TextureBufferImpl(height, height,
        VideoFrame.TextureBuffer.Type.RGB, textures[0],  matrix,
        textureHelper.getHandler(), yuvConverter, null);
    

拉伸框架

我在这里很困惑。尽管所有尺寸都设置为垂直,但 WebRtc 似乎以某种方式期望水平框架。我尝试将 WebRtc 库中的所有帧缓冲区大小记录到视频编码器,它们是正确的。问题似乎与转换方法无关,因为除此之外,我尝试使用 libyuv 中的 ARGBToI420 进行转换。产生的结果是一样的。

非常感谢任何帮助

4

0 回答 0