我在Cocoapods中使用GoogleWebRTC版本1.1.22075创建了我的iOS WebRTC应用程序。另一个客户端是一个Web应用程序。我们已经建立了两个WebRTC客户端之间的连接。音频很好。 WebRTC客户端都可以接收和发送音频,但是当涉及到视频时,iOS本地视频轨道根本没有发送字节。在iOS应用程序的RTCLegacyStatsReport中,用于视频的bytesSent为0。
以下是视频统计信息:
reportId: ssrc_2385920718_send
type: ssrc
googAdaptationChanges: 0
ssrc: 2385920718
googFrameWidthSent: 0
googBandwidthLimitedResolution: false
bytesSent: 0
googCodecName: VP8
codecImplementationName: unknown
googFirsReceived: 0
packetsLost: 0
googHasEnteredLowResolution: false
googCpuLimitedResolution: false
googTrackId: ios_local_video_stream
framesEncoded: 0
googPlisReceived: 0
mediaType: video
packetsSent: 0
googFrameRateSent: 0
googContentType: realtime
googFrameHeightSent: 0
googNacksReceived: 0
googFrameRateInput: 0
transportId: Channel-audio-1
googAvgEncodeMs: 0
googRtt: 0
googEncodeUsagePercent: 0
以下是我如何获取捕获设备,创建本地视频轨道以及渲染视频以在本地查看的代码。
func onLocalStreamReadyForRender() {
print("onLocalStreamReadyForRender")
let frame = localVideoView!.frame
let rtcVideoView = RTCCameraPreviewView.init(frame: CGRect.init())
rtcVideoView.frame = frame
rtcVideoView.frame.origin.x = 0
rtcVideoView.frame.origin.y = 0
self.localVideoView?.addSubview(rtcVideoView)
currVideoCaptureSession!.startRunning()
if let _ = currVideoCaptureSession {
rtcVideoView.captureSession = currVideoCaptureSession!
}
}
fileprivate func onVideoTrackCreated(_ videoTrack: RTCVideoTrack?, _ videoSource: RTCVideoSource?, _ videoCaptureSession: AVCaptureSession?) {
print("onVideoTrackCreated")
currVideoTrack = videoTrack
currVideoSource = videoSource
currVideoCaptureSession = videoCaptureSession
if let _ = currVideoTrack {
localStream!.addVideoTrack(videoTrack!)
print("localstream added video track with track id \(String(describing: currVideoTrack?.trackId))")
onLocalStreamReadyForRender()
}
for stream in peerConn!.localStreams {
if stream.streamId == Config.mediaTrackLabel {
peerConn!.remove(localStream!)
}
}
peerConn!.add(localStream!)
}
func createLocalVideoTrack(position: AVCaptureDevice.Position) {
if let captureDevice = getCaptureDevice(position: position) {
print("createLocalVideoTrack")
let videoSource = peerConnFactory.videoSource()
let rtcCamVidCapturer = RTCCameraVideoCapturer(delegate: videoSource)
rtcCamVidCapturer.startCapture(
with: captureDevice.device,
format: captureDevice.format,
fps: captureDevice.fps,
completionHandler:
{
(error: Error?) in
if let captureError = error {
print("Error RTCCameraVideoCapturer startCapture \(captureError)")
}
else {
print("RTCCameraVideoCapturer startCapture success")
let videoTrack = self.peerConnFactory.videoTrack(with: videoSource, trackId: Config.mediaTrackVideoLabel)
self.mediaTrackDelegate?.onVideoTrackCreated(videoTrack, videoSource, rtcCamVidCapturer.captureSession)
}
})
}
}
func getCaptureDevice(position: AVCaptureDevice.Position) -> (device: AVCaptureDevice, format: AVCaptureDevice.Format, fps: Int)? {
for avCaptureDevice in RTCCameraVideoCapturer.captureDevices() {
if avCaptureDevice.position == position {
let supportedActiveFormat = avCaptureDevice.activeFormat
let maxFrameRate = supportedActiveFormat.videoSupportedFrameRateRanges[0].maxFrameRate
let minFrameRate = supportedActiveFormat.videoSupportedFrameRateRanges[0].minFrameRate
//let midFrameRate = minFrameRate + ((maxFrameRate - minFrameRate)/2)
let fps = maxFrameRate
print("getCaptureDevice uniqueId:\(avCaptureDevice.uniqueID), supportedActiveFormat:\(supportedActiveFormat), fps:\(fps), deviceType:\(avCaptureDevice.deviceType), isConnected:\(avCaptureDevice.isConnected), localizedName:\(avCaptureDevice.localizedName)")
return (device: avCaptureDevice, format: supportedActiveFormat, fps: Int(fps))
}
}
print("Error no capture device returned")
return nil
}
顺便说一句,我在iPad mini第二代操作系统版本11.2.6上测试它
非常感谢任何帮助。谢谢!
答案 0 :(得分:0)
我设法解决了这个问题。它是由RTCCameraVideoCapturer在创建后立即销毁造成的。