1

我在我的应用程序中使用交互式视频广播。我正在附加我正在使用实时流媒体的课程。当我从实时流屏幕返回到上一个屏幕时,我遇到了音频问题。我还在听主持人的声音。以前我使用离开通道方法并销毁 rtc 客户端对象,但是在我从流类返回时实现此操作后,由于离开通道方法,它会关闭所有使用此应用程序的用户屏幕。之后,我从我的 on destroy 方法中删除了这个选项。现在我正在使用禁用音频的方法来禁用音频,但是当我打开实时流媒体类时,它不会启用音频。启用音频方法不起作用我还在用户静音音频方法上使用了静音音频本地流方法和 rtc 处理程序。

我收到错误 -

“LiveStreamingActivity 泄露了最初在此处注册的 IntentReceiver io.agora.rtc.internal.AudioRoutingController$HeadsetBroadcastReceiver@101a7a7。您是否错过了对 unregisterReceiver() 的调用?android.app.IntentReceiverLeaked: Activity com.allin.activities.home.homeActivities .LiveStreamingActivity 泄露了最初在此处注册的 IntentReceiver io.agora.rtc.internal.AudioRoutingController$HeadsetBroadcastReceiver@101a7a7。您是否错过了对 unregisterReceiver() 的调用?

Receiver 正在 SDK 中注册,SDK 内部出现异常,这是我无法编辑的 jar 文件。

请帮助解决我的问题,因为我必须在 Play 商店中使用该应用程序。

//首先我已经尝试过了,但它会自动停止其他设备的流式传输。

     override fun onDestroy() {
    /* if (mRtcEngine != null) {
         leaveChannel()
         RtcEngine.destroy(mRtcEngine)
        mRtcEngine = null
     }*/
    //second I have tried disabling the audio so that user will 
     not hear 
   the host voice 
       if (mRtcEngine != null) //  
    {
     mRtcEngine!!.disableAudio()
    }    
    super.onDestroy()
    }

// 然后,当我从上一个屏幕返回到直播活动时,一切都在重新初始化,但音频无法听到。

 override fun onResume() {
    super.onResume()
    Log.e("resume", "resume")
    if (mRtcEngine != null) {
       mRtcEngine!!.enableAudio()
      // mRtcEngine!!.resumeAudio()
    }

}

我正在使用的代码

//agora rtc engine and handler initialization-----------------

private var mRtcEngine: RtcEngine? = null
private var mRtcEventHandler = object : IRtcEngineEventHandler() {
    @SuppressLint("LongLogTag")
    override fun onFirstRemoteVideoDecoded(uid: Int, width: Int, 
   height: Int, elapsed: Int) {
    }

    override fun onUserOffline(uid: Int, reason: Int) {
        runOnUiThread {
            val a = reason //if login =0 user is offline
            try {

                if (mUid == uid) {
                    if (surfaceView?.parent != null)
                        (surfaceView?.parent as ViewGroup).removeAllViews()

                    if (mRtcEngine != null) {
                        leaveChannel()
                        RtcEngine.destroy(mRtcEngine)
                        mRtcEngine = null
                    }

            setResult(IntentConstants.REQUEST_CODE_LIVE_STREAMING)
                    finish()
                }

            } catch (e: Exception) {
                e.printStackTrace()
            }
        }
    }

    override fun onUserMuteVideo(uid: Int, muted: Boolean) {
        runOnUiThread {
            // onRemoteUserVideoMuted(uid, muted);
            Log.e("video","muted")
        }
    }

    override fun onAudioQuality(uid: Int, quality: Int, delay: 
    Short, lost: Short) {
        super.onAudioQuality(uid, quality, delay, lost)

        Log.e("", "")
    }


    override fun onUserJoined(uid: Int, elapsed: Int) {
        //  super.onUserJoined(uid, elapsed)
        mUid = uid
        runOnUiThread {
            try {
                setupRemoteVideo(mUid!!)
            } catch (e: Exception) {
                e.printStackTrace()
            }
        }
        Log.e("differnt_uid----", mUid.toString())
    }


}

    private fun initAgoraEngineAndJoinChannel() {
    if(mRtcEngine==null)
    {
        initializeAgoraEngine()
        setupVideoProfile()
    }


}

//initializing rtc engine class
@Throws(Exception::class)
private fun initializeAgoraEngine() {
    try {
        var s = RtcEngine.getSdkVersion()
        mRtcEngine = RtcEngine.create(baseContext, AgoraConstants.APPLICATION_ID, mRtcEventHandler)
    } catch (e: Exception) {
        // Log.e(LOG_TAG, Log.getStackTraceString(e));

        throw RuntimeException("NEED TO check rtc sdk init fatal error\n" + Log.getStackTraceString(e))
    }

}

@Throws(Exception::class)
private fun setupVideoProfile() {


    //mRtcEngine?.muteAllRemoteAudioStreams(true)
    // mLogger.log("channelName  account = " + channelName + ",uid = " + 0);
    mRtcEngine?.enableVideo()
    //mRtcEngine.clearVideoCompositingLayout();
    mRtcEngine?.enableLocalVideo(false)
    mRtcEngine?.setEnableSpeakerphone(false)
    mRtcEngine?.muteLocalAudioStream(true)
    joinChannel()

   mRtcEngine?.setVideoProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING, true)
    mRtcEngine?.setChannelProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING)
    mRtcEngine?.setClientRole(Constants.CLIENT_ROLE_AUDIENCE,"")
    val speaker = mRtcEngine?.isSpeakerphoneEnabled
    val camerafocus = mRtcEngine?.isCameraAutoFocusFaceModeSupported

    Log.e("", "")
}

@Throws(Exception::class)
private fun setupRemoteVideo(uid: Int) {
    val container = findViewById<FrameLayout>(R.id.fl_video_container)

    if (container.childCount >= 1) {
        return
    }

    surfaceView = RtcEngine.CreateRendererView(baseContext)
    container.addView(surfaceView)
    mRtcEngine?.setupRemoteVideo(VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid))
    mRtcEngine?.setRemoteVideoStreamType(uid, 1)
    mRtcEngine?.setCameraAutoFocusFaceModeEnabled(false)
    mRtcEngine?.muteRemoteAudioStream(uid, false)
    mRtcEngine?.adjustPlaybackSignalVolume(0)
    // mRtcEngine.setVideoProfile(Constants.VIDEO_PROFILE_180P, false); // Earlier than 2.3.0
    surfaceView?.tag = uid // for mark purpose

    val audioManager: AudioManager =
        this@LiveStreamingActivity.getSystemService(Context.AUDIO_SERVICE) as AudioManager
    //audioManager.mode = AudioManager.MODE_IN_CALL
    val isConnected: Boolean = audioManager.isWiredHeadsetOn
    if (isConnected) {
        /* audioManager.isSpeakerphoneOn = false
         audioManager.isWiredHeadsetOn = true*/

        mRtcEngine?.setEnableSpeakerphone(false)
        mRtcEngine?.setDefaultAudioRoutetoSpeakerphone(false)
        mRtcEngine?.setSpeakerphoneVolume(0)
        mRtcEngine?.enableInEarMonitoring(true)
        // Sets the in-ear monitoring volume to 50% of original volume.
        mRtcEngine?.setInEarMonitoringVolume(200)
        mRtcEngine?.adjustPlaybackSignalVolume(200)
    } else {
        /* audioManager.isSpeakerphoneOn = true
         audioManager.isWiredHeadsetOn = false*/

        mRtcEngine?.setEnableSpeakerphone(true)
        mRtcEngine?.setDefaultAudioRoutetoSpeakerphone(true)
        mRtcEngine?.setSpeakerphoneVolume(50)
        mRtcEngine?.adjustPlaybackSignalVolume(50)
        mRtcEngine?.enableInEarMonitoring(false)
        // Sets the in-ear monitoring volume to 50% of original volume.
        mRtcEngine?.setInEarMonitoringVolume(0)


    }


    Log.e("", "")

}

@Throws(Exception::class)
private fun joinChannel() {
    mRtcEngine?.joinChannel(
        null,
        AgoraConstants.CHANNEL_NAME,
        "Extra Optional Data",
        0
    ) // if you do not specify the uid, we will generate the uid for you
}
@Throws(Exception::class)
private fun leaveChannel() {
    mRtcEngine!!.leaveChannel()
}
4

1 回答 1

1

我想首先你想把 setupRemoteVideo 放在 onFirstRemoteVideoDecoded 回调而不是 onUserJoined 回调中。此外,在 onDestroy 回调中,您应该调用 RtcEngine.destroy() 而不是 RtcEngine.destroy(mRtcEngine)。

于 2020-03-12T19:00:25.437 回答