0

我正在使用 openvidu 开发视频会议应用程序。我们正在尝试在通话中加入 wikiitude AR 会话。问题是他们都需要访问相机,所以我有下一个场景:如果我先实例化本地参与者视频,我无法启动 wikitude AR 会话,因为视频不加载。如果我首先实例化 wikiitude 会话,则呼叫的其他参与者看不到设备视频。我能够为 openvidu 创建一个模仿相机的自定义视频捕捉器。需要发送每一帧才能使其正常工作。

package org.webrtc;

import android.content.Context;
import android.graphics.Bitmap;

import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.atomic.AtomicReference;

public class CustomVideoCapturer implements VideoCapturer {
    private final static String TAG = "FileVideoCapturer";

    //private final FileVideoCapturer.VideoReader videoReader;
    private final Timer timer = new Timer();
    private CapturerObserver capturerObserver;

    private AtomicReference<Bitmap> image = new AtomicReference<Bitmap>();

    private final TimerTask tickTask = new TimerTask() {
        @Override
        public void run() {
            tick();
        }
    };

    public CustomVideoCapturer() {

    }

    public void tick() {
        Bitmap frame = image.get();
        if (frame != null && !frame.isRecycled()) {
            NV21Buffer nv21Buffer = new NV21Buffer(getNV21(frame),frame.getWidth(),frame.getHeight(), null);
            VideoFrame videoFrame = new VideoFrame(nv21Buffer, 0, System.nanoTime());
            capturerObserver.onFrameCaptured(videoFrame);
        }
    }

    byte [] getNV21(Bitmap image) {
        int [] argb = new int[image.getWidth() * image.getHeight()];

        image.getPixels(argb, 0, image.getWidth(), 0, 0, image.getWidth(), image.getHeight());

        byte [] yuv = new byte[image.getWidth()*image.getHeight()*3/2];
        encodeYUV420SP(yuv, argb, image.getWidth(), image.getHeight());

        image.recycle();

        return yuv;
    }

    void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
        final int frameSize = width * height;

        int yIndex = 0;
        int uvIndex = frameSize;

        int a, R, G, B, Y, U, V;
        int index = 0;
        for (int j = 0; j < height; j++) {
            for (int i = 0; i < width; i++) {

                a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
                R = (argb[index] & 0xff0000) >> 16;
                G = (argb[index] & 0xff00) >> 8;
                B = (argb[index] & 0xff) >> 0;

                // well known RGB to YUV algorithm
                Y = ( (  66 * R + 129 * G +  25 * B + 128) >> 8) +  16;
                U = ( ( -38 * R -  74 * G + 112 * B + 128) >> 8) + 128;
                V = ( ( 112 * R -  94 * G -  18 * B + 128) >> 8) + 128;

                // NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
                //    meaning for every 4 Y pixels there are 1 V and 1 U.  Note the sampling is every other
                //    pixel AND every other scanline.
                yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
                if (j % 2 == 0 && index % 2 == 0) {
                    yuv420sp[uvIndex++] = (byte)((V<0) ? 0 : ((V > 255) ? 255 : V));
                    yuv420sp[uvIndex++] = (byte)((U<0) ? 0 : ((U > 255) ? 255 : U));
                }

                index ++;
            }
        }
    }

    public void sendFrame(Bitmap bitmap) {
        image.set(bitmap);
    }

    @Override
    public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
                           CapturerObserver capturerObserver) {
        this.capturerObserver = capturerObserver;
    }

    @Override
    public void startCapture(int width, int height, int framerate) {
        //timer.schedule(tickTask, 0, 1000 / framerate);
        threadCV().start();
    }

    Thread threadCV() {
        return new Thread() {
            @Override
            public void run() {
                while (true) {
                    if (image.get() != null) {
                        tick();
                    }

                    try {
                        Thread.sleep(10);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                }
            }
        };
    }

    @Override
    public void stopCapture() throws InterruptedException {
        timer.cancel();
    }

    @Override
    public void changeCaptureFormat(int width, int height, int framerate) {
        // Empty on purpose
    }

    @Override
    public void dispose() {
        //videoReader.close();
    }

    @Override
    public boolean isScreencast() {
        return false;
    }

    private interface VideoReader {
        VideoFrame getNextFrame();

        void close();
    }

    /**
     * Read video data from file for the .y4m container.
     */

}

在本地参与者上,我使用此功能发送帧:

    public void sendFrame(Bitmap frame) {
        customVideoCapturer.sendFrame(frame);
    }

但我无法从维基相机中取出帧。有办法访问帧并重新发送它们吗?

4

1 回答 1

0

例如 Native Api sdk,版本 9.10.0,根据 wikitude 支持 https://support.wikitude.com/support/discussions/topics/5000096719?page=1的回答,要访问相机帧,自定义插件应该被创建: https ://www.wikitude.com/external/doc/documentation/latest/androidnative/pluginsapi.html#plugins-api

于 2021-11-10T08:37:37.983 回答