2

我尝试使用 webrtc sdk 在 android 中创建一个视频聊天实现,使用 Firebase FireStore 作为信号机制。我遵循了一些教程,并成功地创建了点对点连接。

最初,我尝试将视频流从一个对等方发送到另一个对等方并且它有效。然后,当我尝试将音轨添加到流中时,初始对等方无法再创建报价(我从调试中发现)。结果,无法再建立webrtc连接。现在我很困惑这是怎么发生的。

后来我还发现,如果我只添加一个轨道,无论是音频轨道还是视频轨道,那么我的应用程序就可以正常工作,但是如果我尝试同时添加音频和视频轨道,那么初始对等方将无法再创建报价。

这是我的 CallActivity.java 的完整代码,其中包含所有 webrtc 实现。我从这段代码中删除了一些我认为不需要的不需要的语句。

public class CallActivity extends AppCompatActivity {
    private String userUid, friendUid;
    private FirebaseFirestore db = FirebaseFirestore.getInstance();
    private boolean isInitiator = false;

    //request codes
    private int CAMERA_PERMISSION_CODE = 0;

    //views
    SurfaceViewRenderer localVideoView, friendVideoView;

    //webrtc
    private EglBase rootEglBase;
    private PeerConnectionFactory factory;
    private PeerConnection peerConnection;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_call);

        userUid = getIntent().getStringExtra("userUid");
        friendUid = getIntent().getStringExtra("friendUid");
        isInitiator = getIntent().getBooleanExtra("initiator", false);

        checkPermissions();
        initialize();
        setupFireStoreListeners();
    }

    @Override
    public void onBackPressed() {
        hangup();
    }

    private void initialize()
    {
        //initialize views
        rootEglBase = EglBase.create();

        localVideoView = findViewById(R.id.localVideo);
        localVideoView.init(rootEglBase.getEglBaseContext(), null);
        localVideoView.setEnableHardwareScaler(true);
        localVideoView.setMirror(true);

        friendVideoView = findViewById(R.id.friendVideo);
        friendVideoView.init(rootEglBase.getEglBaseContext(), null);
        friendVideoView.setEnableHardwareScaler(true);
        friendVideoView.setMirror(true);

        //initialize peer connection factory
        PeerConnectionFactory.InitializationOptions initializationOptions = PeerConnectionFactory.InitializationOptions.builder(this)
                .setEnableInternalTracer(true)
                .setFieldTrials("WebRTC-H264HighProfile/Enabled/")
                .createInitializationOptions();
        PeerConnectionFactory.initialize(initializationOptions);

        PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
        options.disableEncryption = true;
        options.disableNetworkMonitor = true;

        factory = PeerConnectionFactory.builder()
                .setOptions(options)
                .setVideoDecoderFactory(new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext()))
                .setVideoEncoderFactory(new DefaultVideoEncoderFactory(rootEglBase.getEglBaseContext(), true, true))
                .createPeerConnectionFactory();

        //create video track form camera and show it
        VideoCapturer videoCapturer = createVideoCapturer();
        if(videoCapturer == null){
            finish();
            return;
        }
        VideoSource videoSource = factory.createVideoSource(false);
        SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(Thread.currentThread().getName(), rootEglBase.getEglBaseContext());
        videoCapturer.initialize(surfaceTextureHelper, localVideoView.getContext(), videoSource.getCapturerObserver());
        videoCapturer.startCapture(1240, 720, 30);
        VideoTrack localVideoTrack = factory.createVideoTrack("local", videoSource);
        localVideoTrack.addSink(localVideoView);

        //set ice candidates to null
        db.document("users/" + userUid).update("ice", null);
        db.document("users/" + friendUid).update("ice", null);

        //create peer connection
        ArrayList<PeerConnection.IceServer> iceServers = new ArrayList<>();
        iceServers.add(PeerConnection.IceServer.builder("stun:stun.l.google.com:19302").createIceServer());
        PeerConnection.Observer pcObserver = new SimplePeerConnectionObserver() {
            @Override
            public void onIceCandidate(IceCandidate iceCandidate) {
                Log.d("WEBRTCD", "Ice");

                db.runTransaction(new Transaction.Function<Void>() {
                    @Nullable
                    @Override
                    public Void apply(@NonNull Transaction transaction) throws FirebaseFirestoreException {
                        List<Map> iceList = (List<Map>) transaction.get(db.document("users/" + friendUid)).get("ice");
                        if(iceList == null) iceList = new ArrayList<>();

                        Map<String, Object> ice = new HashMap<>();
                        ice.put("label", iceCandidate.sdpMLineIndex);
                        ice.put("id", iceCandidate.sdpMid);
                        ice.put("sdp", iceCandidate.sdp);

                        iceList.add(0, ice);
                        transaction.update(db.document("users/" + friendUid), "ice", iceList);

                        return null;
                    }
                });
            }

            @Override
            public void onAddStream(MediaStream mediaStream) {
                VideoTrack remoteVideoTrack = mediaStream.videoTracks.get(0);
                if(mediaStream.audioTracks.size() > 0) {
                    AudioTrack remoteAudioTrack = mediaStream.audioTracks.get(0);
                    remoteAudioTrack.setEnabled(true);
                }
                remoteVideoTrack.setEnabled(true);
                remoteVideoTrack.addSink(friendVideoView);
            }
        };
        peerConnection = factory.createPeerConnection(iceServers, pcObserver);

        //create audio track
        MediaConstraints audioConstraints = new MediaConstraints();
        AudioSource audioSource = factory.createAudioSource(audioConstraints);
        AudioTrack localAudioTrack = factory.createAudioTrack("local", audioSource);
        peerConnection.setAudioRecording(true);
        peerConnection.setAudioPlayout(true);

        //add stream to peer connection
        MediaStream mediaStream = factory.createLocalMediaStream("local");
        //mediaStream.addTrack(localAudioTrack);
        mediaStream.addTrack(localVideoTrack);
        peerConnection.addStream(mediaStream);

        if(isInitiator) doCall();
        else doAnswer();
    }

    private void doCall()
    {
        db.document("users/" + friendUid).update("call", userUid);

        MediaConstraints mediaConstraints = new MediaConstraints();
        mediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
        mediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
        peerConnection.createOffer(new SimpleSdpObserver() {
            @Override
            public void onCreateSuccess(SessionDescription sessionDescription) {
                peerConnection.setLocalDescription(new SimpleSdpObserver(), sessionDescription);
                Map<String, String> sdp = new HashMap<>();
                sdp.put("type", "offer");
                sdp.put("desc", sessionDescription.description);
                db.document("users/" + friendUid).update("sdp", sdp);
            }
        }, mediaConstraints);
    }

    private void doAnswer()
    {
        db.document("users/" + friendUid).update("call", userUid);

        db.document("users/" + userUid).get().addOnCompleteListener(new OnCompleteListener<DocumentSnapshot>() {
            @Override
            public void onComplete(@NonNull Task<DocumentSnapshot> task) {
                if(task.isSuccessful()  &&  task.getResult() != null)
                {
                    Map sdpData = (Map) task.getResult().get("sdp");
                    MediaConstraints mediaConstraints = new MediaConstraints();
                    mediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
                    mediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
                    peerConnection.setRemoteDescription(new SimpleSdpObserver(), new SessionDescription(SessionDescription.Type.OFFER, (String) sdpData.get("desc")));
                    peerConnection.createAnswer(new SimpleSdpObserver(){
                        @Override
                        public void onCreateSuccess(SessionDescription sessionDescription) {
                            peerConnection.setLocalDescription(new SimpleSdpObserver(), sessionDescription);
                            Map<String, String> sdp = new HashMap<>();
                            sdp.put("type", "answer");
                            sdp.put("desc", sessionDescription.description);
                            db.document("users/" + friendUid).update("sdp", sdp);
                        }
                    }, mediaConstraints);
                }
            }
        });
    }

    private void hangup()
    {
        db.document("users/" + friendUid).update("call", "hangup", "ice", null, "sdp", null);
        db.document("users/" + userUid).update("call", "hangup", "ice", null, "sdp", null);
    }

    private void setupFireStoreListeners()
    {
        //listen for ice candidates
        db.document("users/" + userUid).addSnapshotListener(this, new EventListener<DocumentSnapshot>() {
            @Override
            public void onEvent(@Nullable DocumentSnapshot value, @Nullable FirebaseFirestoreException error) {
                if(value != null  &&  value.get("ice") != null)
                {
                    List<Map> iceList = (List<Map>) value.get("ice");
                    if(iceList == null) iceList = new ArrayList<>();
                    for(Map iceCandidate : iceList) {
                        Log.d("WEBRTCD", "Ice added");
                        peerConnection.addIceCandidate(new IceCandidate((String) iceCandidate.get("id"), Integer.parseInt(iceCandidate.get("label") + ""), (String) iceCandidate.get("sdp")));
                    }
                    //db.document("users/" + userUid).update("ice", null);
                }
            }
        });

        //listen for hangup
        db.document("users/" + userUid).addSnapshotListener(this, new EventListener<DocumentSnapshot>() {
            @Override
            public void onEvent(@Nullable DocumentSnapshot value, @Nullable FirebaseFirestoreException error) {
                if(value != null  &&  value.get("call") != null  &&  value.get("call").equals("hangup"))
                {
                    db.document("users/" + userUid).update("call", null);
                    endCall();
                }
            }
        });

        //listen for answer if initiator
        if(!isInitiator) return;
        db.document("users/" + userUid).addSnapshotListener(this, new EventListener<DocumentSnapshot>() {
            @Override
            public void onEvent(@Nullable DocumentSnapshot value, @Nullable FirebaseFirestoreException error) {
                if(value != null  &&  value.get("sdp") != null) {
                    peerConnection.setRemoteDescription(new SimpleSdpObserver(), new SessionDescription(SessionDescription.Type.ANSWER, (String) ((Map) value.get("sdp")).get("desc")));
                    db.document("users/" + userUid).update("sdp", null);
                }
            }
        });
    }

    private void endCall(){
        peerConnection.close();
        super.onBackPressed();
    }

    private VideoCapturer createVideoCapturer() {
        VideoCapturer videoCapturer;

        CameraEnumerator enumerator;
        if(Camera2Enumerator.isSupported(this))
            enumerator = new Camera2Enumerator(this);
        else
            enumerator = new Camera1Enumerator(true);

        for (String device : enumerator.getDeviceNames()) {
            if(enumerator.isFrontFacing(device)) {
                videoCapturer = enumerator.createCapturer(device, null);
                if(videoCapturer != null)
                    return videoCapturer;
            }
        }

        for (String device : enumerator.getDeviceNames()) {
            if(!enumerator.isFrontFacing(device)) {
                videoCapturer = enumerator.createCapturer(device, null);
                if(videoCapturer != null)
                    return videoCapturer;
            }
        }
        return null;
    }

}

请注意这一行

//mediaStream.addTrack(localAudioTrack);

在 initialize() 函数的末尾。由于注释了此行,因此一切正常,但是如果我取消注释此行,webrtc 将无法创建连接,因为没有成功创建报价,这是我不知道如何解决的问题。我尝试了谷歌搜索,只发现了一些与 webrtc 浏览器 API 无关的问题。我不明白这一行如何阻止整个 webrtc 连接正常工作。(我知道 webrtc 无法创建报价,因为 peerConnection.createOffer() 之后的 onCreateSuccess() 方法,如果我取消注释这一行,则永远不会被调用)。由于这行注释一切正常,我不相信我的执行信号的firestore代码是一个问题。

这是 build.gradle 文件:

apply plugin: 'com.android.application'
apply plugin: 'com.google.gms.google-services'

android {
    compileSdkVersion 29
    buildToolsVersion "29.0.3"

    defaultConfig {
        applicationId "com.example.myapplication"
        minSdkVersion 19
        targetSdkVersion 29
        versionCode 1
        versionName "1.0"
        multiDexEnabled true
        vectorDrawables.useSupportLibrary true
        testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
    }

    buildTypes {
        release {
            minifyEnabled false
            proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
        }
    }
    compileOptions {
        sourceCompatibility JavaVersion.VERSION_1_8
        targetCompatibility JavaVersion.VERSION_1_8
    }

}

dependencies {
    implementation fileTree(dir: 'libs', include: ['*.jar'])

    implementation 'androidx.appcompat:appcompat:1.2.0'
    implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
    implementation 'com.google.firebase:firebase-firestore:21.5.0'
    implementation 'com.android.support:multidex:1.0.3'
    implementation 'com.google.firebase:firebase-auth:19.3.2'
    implementation 'com.google.firebase:firebase-storage:19.1.1'
    implementation 'androidx.navigation:navigation-fragment:2.3.0'
    implementation 'androidx.navigation:navigation-ui:2.3.0'
    implementation 'androidx.legacy:legacy-support-v4:1.0.0'
    implementation 'androidx.lifecycle:lifecycle-extensions:2.2.0'
    implementation 'com.google.firebase:firebase-messaging:20.2.4'
    testImplementation 'junit:junit:4.13'
    androidTestImplementation 'androidx.test.ext:junit:1.1.1'
    androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
    implementation 'com.google.android.material:material:1.2.0'
    implementation 'androidx.documentfile:documentfile:1.0.1'
    implementation "com.mikepenz:materialdrawer:6.1.2"
    implementation 'org.webrtc:google-webrtc:1.0.30039'
}

我已经关注了一些关于 webrtc android 的在线教程,他们都只做了视频流(在我的应用程序中有效)。由于没有官方的 webrtc 文档,我不知道如何解决这个问题。请帮忙!

4

1 回答 1

1

好的,所以我想通了,音频和视频轨道必须具有唯一的 ID。在我的情况下,音频和视频轨道的 id 为“local”,这弄乱了 webrtc,它们必须有不同的 ID。此外,在通话时,客户端应该在流上具有不同的 ID。

在我的情况下,我将音频和视频轨道 ID 分别更改为 _audio 和 _video,并且它有效

于 2020-08-25T04:10:57.470 回答