在android agora rtc sdk中启用音频方法问题

时间:2019-06-27 05:43:42

标签: android broadcasting agora.io

我正在我的应用程序中使用交互式视频广播。 我要附加使用实时流的课程。 从实时流媒体屏幕返回到上一屏幕时,出现音频问题。我仍然在听主持人的音频。 以前我使用了离开通道方法并销毁了rtc客户端对象,但是当我从流类返回时实现了此功能之后,由于离开通道方法,它关闭了所有正在使用此应用的用户屏幕。之后,我从on destroy方法中删除了此选项。 现在,我正在使用禁用音频的方法来禁用音频,但是当我打开直播流类时,它不会启用音频。启用音频方法不起作用我还对用户静音音频方法使用了静音音频本地流方法和rtc处理程序。

我遇到了错误-

  

“ LiveStreamingActivity已泄漏IntentReceiver io.agora.rtc.internal.AudioRoutingController$HeadsetBroadcastReceiver@101a7a7   最初是在这里注册的。您是否错过了致电   unregisterReceiver()? android.app.IntentReceiverLeaked:活动   com.allin.activities.home.homeActivities.LiveStreamingActivity具有   泄漏的IntentReceiver   io.agora.rtc.internal.AudioRoutingController$HeadsetBroadcastReceiver@101a7a7   最初是在这里注册的。您是否错过了致电   unregisterReceiver()?”

接收器正在SDK中注册,并且SDK内出现异常,该文件是我无法编辑的jar文件。

请帮忙解决此问题,因为我必须继续使用该应用    游戏商店。

//首先我已经尝试过了,但是它会自动停止其他     设备流式传输。

     override fun onDestroy() {
    /* if (mRtcEngine != null) {
         leaveChannel()
         RtcEngine.destroy(mRtcEngine)
        mRtcEngine = null
     }*/
    //second I have tried disabling the audio so that user will 
     not hear 
   the host voice 
       if (mRtcEngine != null) //  
    {
     mRtcEngine!!.disableAudio()
    }    
    super.onDestroy()
    }

//然后,当我从上一个屏幕返回进行实时流式传输活动时,一切都在重新初始化,但音频听不到。

 override fun onResume() {
    super.onResume()
    Log.e("resume", "resume")
    if (mRtcEngine != null) {
       mRtcEngine!!.enableAudio()
      // mRtcEngine!!.resumeAudio()
    }

}

我正在使用的代码

//agora rtc engine and handler initialization-----------------

private var mRtcEngine: RtcEngine? = null
private var mRtcEventHandler = object : IRtcEngineEventHandler() {
    @SuppressLint("LongLogTag")
    override fun onFirstRemoteVideoDecoded(uid: Int, width: Int, 
   height: Int, elapsed: Int) {
    }

    override fun onUserOffline(uid: Int, reason: Int) {
        runOnUiThread {
            val a = reason //if login =0 user is offline
            try {

                if (mUid == uid) {
                    if (surfaceView?.parent != null)
                        (surfaceView?.parent as ViewGroup).removeAllViews()

                    if (mRtcEngine != null) {
                        leaveChannel()
                        RtcEngine.destroy(mRtcEngine)
                        mRtcEngine = null
                    }

            setResult(IntentConstants.REQUEST_CODE_LIVE_STREAMING)
                    finish()
                }

            } catch (e: Exception) {
                e.printStackTrace()
            }
        }
    }

    override fun onUserMuteVideo(uid: Int, muted: Boolean) {
        runOnUiThread {
            // onRemoteUserVideoMuted(uid, muted);
            Log.e("video","muted")
        }
    }

    override fun onAudioQuality(uid: Int, quality: Int, delay: 
    Short, lost: Short) {
        super.onAudioQuality(uid, quality, delay, lost)

        Log.e("", "")
    }


    override fun onUserJoined(uid: Int, elapsed: Int) {
        //  super.onUserJoined(uid, elapsed)
        mUid = uid
        runOnUiThread {
            try {
                setupRemoteVideo(mUid!!)
            } catch (e: Exception) {
                e.printStackTrace()
            }
        }
        Log.e("differnt_uid----", mUid.toString())
    }


}

    private fun initAgoraEngineAndJoinChannel() {
    if(mRtcEngine==null)
    {
        initializeAgoraEngine()
        setupVideoProfile()
    }


}

//initializing rtc engine class
@Throws(Exception::class)
private fun initializeAgoraEngine() {
    try {
        var s = RtcEngine.getSdkVersion()
        mRtcEngine = RtcEngine.create(baseContext, AgoraConstants.APPLICATION_ID, mRtcEventHandler)
    } catch (e: Exception) {
        // Log.e(LOG_TAG, Log.getStackTraceString(e));

        throw RuntimeException("NEED TO check rtc sdk init fatal error\n" + Log.getStackTraceString(e))
    }

}

@Throws(Exception::class)
private fun setupVideoProfile() {


    //mRtcEngine?.muteAllRemoteAudioStreams(true)
    // mLogger.log("channelName  account = " + channelName + ",uid = " + 0);
    mRtcEngine?.enableVideo()
    //mRtcEngine.clearVideoCompositingLayout();
    mRtcEngine?.enableLocalVideo(false)
    mRtcEngine?.setEnableSpeakerphone(false)
    mRtcEngine?.muteLocalAudioStream(true)
    joinChannel()

   mRtcEngine?.setVideoProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING, true)
    mRtcEngine?.setChannelProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING)
    mRtcEngine?.setClientRole(Constants.CLIENT_ROLE_AUDIENCE,"")
    val speaker = mRtcEngine?.isSpeakerphoneEnabled
    val camerafocus = mRtcEngine?.isCameraAutoFocusFaceModeSupported

    Log.e("", "")
}

@Throws(Exception::class)
private fun setupRemoteVideo(uid: Int) {
    val container = findViewById<FrameLayout>(R.id.fl_video_container)

    if (container.childCount >= 1) {
        return
    }

    surfaceView = RtcEngine.CreateRendererView(baseContext)
    container.addView(surfaceView)
    mRtcEngine?.setupRemoteVideo(VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid))
    mRtcEngine?.setRemoteVideoStreamType(uid, 1)
    mRtcEngine?.setCameraAutoFocusFaceModeEnabled(false)
    mRtcEngine?.muteRemoteAudioStream(uid, false)
    mRtcEngine?.adjustPlaybackSignalVolume(0)
    // mRtcEngine.setVideoProfile(Constants.VIDEO_PROFILE_180P, false); // Earlier than 2.3.0
    surfaceView?.tag = uid // for mark purpose

    val audioManager: AudioManager =
        this@LiveStreamingActivity.getSystemService(Context.AUDIO_SERVICE) as AudioManager
    //audioManager.mode = AudioManager.MODE_IN_CALL
    val isConnected: Boolean = audioManager.isWiredHeadsetOn
    if (isConnected) {
        /* audioManager.isSpeakerphoneOn = false
         audioManager.isWiredHeadsetOn = true*/

        mRtcEngine?.setEnableSpeakerphone(false)
        mRtcEngine?.setDefaultAudioRoutetoSpeakerphone(false)
        mRtcEngine?.setSpeakerphoneVolume(0)
        mRtcEngine?.enableInEarMonitoring(true)
        // Sets the in-ear monitoring volume to 50% of original volume.
        mRtcEngine?.setInEarMonitoringVolume(200)
        mRtcEngine?.adjustPlaybackSignalVolume(200)
    } else {
        /* audioManager.isSpeakerphoneOn = true
         audioManager.isWiredHeadsetOn = false*/

        mRtcEngine?.setEnableSpeakerphone(true)
        mRtcEngine?.setDefaultAudioRoutetoSpeakerphone(true)
        mRtcEngine?.setSpeakerphoneVolume(50)
        mRtcEngine?.adjustPlaybackSignalVolume(50)
        mRtcEngine?.enableInEarMonitoring(false)
        // Sets the in-ear monitoring volume to 50% of original volume.
        mRtcEngine?.setInEarMonitoringVolume(0)


    }


    Log.e("", "")

}

@Throws(Exception::class)
private fun joinChannel() {
    mRtcEngine?.joinChannel(
        null,
        AgoraConstants.CHANNEL_NAME,
        "Extra Optional Data",
        0
    ) // if you do not specify the uid, we will generate the uid for you
}
@Throws(Exception::class)
private fun leaveChannel() {
    mRtcEngine!!.leaveChannel()
}

1 个答案:

答案 0 :(得分:1)

我认为首先您想将setupRemoteVideo放在onFirstRemoteVideoDecoded回调中,而不是onUserJoined回调中。另外,在onDestroy回调中,应该调用RtcEngine.destroy()而不是RtcEngine.destroy(mRtcEngine)。