当用户来调用屏幕视频轨道渲染器已经设置好。用户在不结束视频通话会话的情况下进行另一个活动或返回活动,但是当用户返回通话活动时,我必须设置一个我无法成功实现的远程媒体流。所以,我实现了不同的方法,但是当我将渲染器设置为远程流时应用程序崩溃了。
我第一次设置运行良好的远程媒体流onAddRemoteStream()
。
override fun onAddRemoteStream(remoteStream: MediaStream, pcId: String, endPoint: Int) {
val renderer = VideoRenderer(remoteRender)
remoteStream.videoTracks[0].addRenderer(renderer)
WebrtcClient.getInstance().peerConnections[pcId]?.videoRenderer = renderer
VideoRendererGui.update(remoteRender,
REMOTE_X, REMOTE_Y,
REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, true)
VideoRendererGui.update(localRender,
LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
scalingType, true)
}
初始化 webrtc 客户端
private fun init() {
val displaySize = Point()
windowManager.defaultDisplay.getSize(displaySize)
val params = PeerConnectionParameters(
true, false, displaySize.x, displaySize.y, 30, 1,
VIDEO_CODEC_VP9, true, 1, AUDIO_CODEC_OPUS, true)
WebrtcClient.getInstance().webRtcClient(this, params, VideoRendererGui.getEGLContext())
}
准备 webrtc 客户端
public void webRtcClient(RtcListener listener, PeerConnectionParameters params, EGLContext mEGLcontext) {
mListener = listener;
pcParams = params;
pcConstraints = new MediaConstraints();
PeerConnectionFactory.initializeAndroidGlobals(listener, true, true,
params.videoCodecHwAcceleration, mEGLcontext);
factory = new PeerConnectionFactory();
pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
pcConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
}
第二次我试图在VideoRendererGui.setView(glview_call) { \\... }
里面设置。但是应用程序一次又一次地崩溃。
private fun initWebrtc() {
glview_call.preserveEGLContextOnPause = true
glview_call.keepScreenOn = true
// Video Calling
VideoRendererGui.setView(glview_call) {
if (!WebrtcClient.getInstance().onResumingCalling) {
// Initializing Call
init()
} else {
// on call resuming
// Set local media stream
val localStream = WebrtcClient.getInstance().localMediaStream
if (localStream.videoTracks.size > 0) {
localStream.videoTracks[0].addRenderer(VideoRenderer(localRender))
VideoRendererGui.update(localRender,
LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
scalingType, true)
}
WebrtcClient.getInstance().callingInfo.users.forEach {
val peer = WebrtcClient.getInstance().peerConnections[it.user?.id]
// 'peer' get return each user peer connection
peer?.let { p ->
p.remoteMediaStream?.let { mediaStream ->
// Checking video track linkedList length
if (mediaStream.videoTracks.size > 0) {
// removing old video renderer from video track
mediaStream.videoTracks.first.removeRenderer(p.videoRenderer)
// New renderer to show remote media stream
val renderer = VideoRenderer(remoteRender)
mediaStream.videoTracks[0].addRenderer(renderer)
p.videoRenderer = renderer
VideoRendererGui.update(remoteRender,
REMOTE_X,
REMOTE_Y, REMOTE_WIDTH,
REMOTE_HEIGHT, scalingType, true
)
}
}
}
}
}
}
// remote render
remoteRender = VideoRendererGui.create(
REMOTE_X,
REMOTE_Y, REMOTE_WIDTH,
REMOTE_HEIGHT, scalingType, true)
localRender = VideoRendererGui.create(
LOCAL_X_CONNECTING,
LOCAL_Y_CONNECTING,
LOCAL_WIDTH_CONNECTING,
LOCAL_HEIGHT_CONNECTING, scalingType, true)
}
如果我评论:
val 渲染器 = VideoRenderer(remoteRender)
mediaStream.videoTracks[0].addRenderer(renderer)
VideoRendererGui.update(remoteRender, REMOTE_X, REMOTE_Y, REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, true )
应用程序运行正常,出现黑屏。