当用户进行呼叫时,屏幕视频轨迹渲染器已经设置好。用户要么继续进行另一项活动,要么退出活动而没有结束视频通话会话,但是当用户返回通话活动时,我必须设置一个无法成功实现的远程媒体流。因此,我实现了不同的方法,但是当我将渲染器设置为远程流时,应用程序崩溃了。
我第一次在onAddRemoteStream()
中设置远程媒体流,效果很好。
override fun onAddRemoteStream(remoteStream: MediaStream, pcId: String, endPoint: Int) {
val renderer = VideoRenderer(remoteRender)
remoteStream.videoTracks[0].addRenderer(renderer)
WebrtcClient.getInstance().peerConnections[pcId]?.videoRenderer = renderer
VideoRendererGui.update(remoteRender,
REMOTE_X, REMOTE_Y,
REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, true)
VideoRendererGui.update(localRender,
LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
scalingType, true)
}
初始化webrtc客户端
private fun init() {
val displaySize = Point()
windowManager.defaultDisplay.getSize(displaySize)
val params = PeerConnectionParameters(
true, false, displaySize.x, displaySize.y, 30, 1,
VIDEO_CODEC_VP9, true, 1, AUDIO_CODEC_OPUS, true)
WebrtcClient.getInstance().webRtcClient(this, params, VideoRendererGui.getEGLContext())
}
准备webrtc客户端
public void webRtcClient(RtcListener listener, PeerConnectionParameters params, EGLContext mEGLcontext) {
mListener = listener;
pcParams = params;
pcConstraints = new MediaConstraints();
PeerConnectionFactory.initializeAndroidGlobals(listener, true, true,
params.videoCodecHwAcceleration, mEGLcontext);
factory = new PeerConnectionFactory();
pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
pcConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
}
第二次,我尝试在其中设置VideoRendererGui.setView(glview_call) { \\... }
。但是应用一次又一次崩溃。
private fun initWebrtc() {
glview_call.preserveEGLContextOnPause = true
glview_call.keepScreenOn = true
// Video Calling
VideoRendererGui.setView(glview_call) {
if (!WebrtcClient.getInstance().onResumingCalling) {
// Initializing Call
init()
} else {
// on call resuming
// Set local media stream
val localStream = WebrtcClient.getInstance().localMediaStream
if (localStream.videoTracks.size > 0) {
localStream.videoTracks[0].addRenderer(VideoRenderer(localRender))
VideoRendererGui.update(localRender,
LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
scalingType, true)
}
WebrtcClient.getInstance().callingInfo.users.forEach {
val peer = WebrtcClient.getInstance().peerConnections[it.user?.id]
// 'peer' get return each user peer connection
peer?.let { p ->
p.remoteMediaStream?.let { mediaStream ->
// Checking video track linkedList length
if (mediaStream.videoTracks.size > 0) {
// removing old video renderer from video track
mediaStream.videoTracks.first.removeRenderer(p.videoRenderer)
// New renderer to show remote media stream
val renderer = VideoRenderer(remoteRender)
mediaStream.videoTracks[0].addRenderer(renderer)
p.videoRenderer = renderer
VideoRendererGui.update(remoteRender,
REMOTE_X,
REMOTE_Y, REMOTE_WIDTH,
REMOTE_HEIGHT, scalingType, true
)
}
}
}
}
}
}
// remote render
remoteRender = VideoRendererGui.create(
REMOTE_X,
REMOTE_Y, REMOTE_WIDTH,
REMOTE_HEIGHT, scalingType, true)
localRender = VideoRendererGui.create(
LOCAL_X_CONNECTING,
LOCAL_Y_CONNECTING,
LOCAL_WIDTH_CONNECTING,
LOCAL_HEIGHT_CONNECTING, scalingType, true)
}
如果我发表评论:
val渲染器= VideoRenderer(remoteRender)
mediaStream.videoTracks [0] .addRenderer(renderer)
VideoRendererGui.update(remoteRender, REMOTE_X, REMOTE_Y,REMOTE_WIDTH, REMOTE_HEIGHT,scaleingType,为true )
应用运行正常,出现黑屏。