我还是Gstreamer和Python-GST的新手。我不知道是否有可能将多个源原始视频流数据收集到一个接收器中。
让我进一步解释一下情况:
我现在有2个视频源服务器,一个是我的网络摄像头,另一个是一个mp4文件,我确实使用以下命令打开了这些源
Source1 :
gst-launch-1.0 v4l2src device = / dev / video0! 'video / x-raw,width = 640,height = 480'! x264enc pass =标准量化器= 20 tune = zerolatency! rtph264pay! udpsink host = 127.0.0.1 port = 5000
Source2 :
gst-launch-1.0 filesrc位置= file_name.mp4! 'video / x-raw,width = 640,height = 480'! x264enc pass =标准量化器= 20 tune = zerolatency! rtph264pay! udpsink host = 127.0.0.1 port = 5000
在尝试使用 H264 编码后,我试图将两个流数据都发送到本地主机 5000 端口。
对于接收,我有一个像这样的Python Sink服务器:
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstApp', '1.0')
from gi.repository import GObject, Gst, GstApp
GObject.threads_init()
Gst.init(None)
GST_DEBUG="6"
class Example:
def __init__(self):
self.mainloop = GObject.MainLoop()
self.pipeline = Gst.Pipeline.new("Pipeline")
self.bus = self.pipeline.get_bus()
self.bus.add_signal_watch()
self.bus.connect("message", self.on_message)
# self.bus.connect('message::eos', self.on_eos)
# self.bus.connect('message::error', self.on_error)
# self.bus.connect("sync-message::element", self.on_sync_message)
#Create QUEUE elements
self.queue1 = Gst.ElementFactory.make("queue", None)
self.queue2 = Gst.ElementFactory.make("queue", None)
# Create the elements
self.source = Gst.ElementFactory.make("udpsrc", None)
self.depay = Gst.ElementFactory.make("rtph264depay", None)
self.parser = Gst.ElementFactory.make("h264parse", None)
self.decoder = Gst.ElementFactory.make("avdec_h264", None)
self.sink = Gst.ElementFactory.make("appsink", None)
# Add elements to pipeline
self.pipeline.add(self.source)
self.pipeline.add(self.queue1)
self.pipeline.add(self.depay)
self.pipeline.add(self.parser)
self.pipeline.add(self.decoder)
self.pipeline.add(self.queue2)
self.pipeline.add(self.sink)
# Set properties
self.source.set_property('port', 5000)
self.source.set_property('caps', Gst.caps_from_string("application/x-rtp, encoding-name=H264,payload=96"))
self.sink.set_property('emit-signals', True)
# turns off sync to make decoding as fast as possible
self.sink.set_property('sync', False)
self.sink.connect('new-sample', self.on_new_buffer, self.sink)
def on_new_buffer(self, appsink, data):
print "exec two..."
appsink_sample = GstApp.AppSink.pull_sample(self.sink)
# with open('example.h264', 'a+') as streamer:
buff = appsink_sample.get_buffer()
size, offset, maxsize = buff.get_sizes()
frame_data = buff.extract_dup(offset, size)
print(size)
# flag, info = buff.map(Gst.MapFlags.READ)
# streamer.write(info.data)
# print(info.size)
return False
def run(self):
ret = self.pipeline.set_state(Gst.State.PLAYING)
if ret == Gst.StateChangeReturn.FAILURE:
print("Unable to set the pipeline to the playing state.")
exit(-1)
self.mainloop.run()
def kill(self):
self.pipeline.set_state(Gst.State.NULL)
self.mainloop.quit()
def on_eos(self, bus, msg):
print('on_eos()')
self.kill()
def on_error(self, bus, msg):
print('on_error():', msg.parse_error())
self.kill()
def on_message(self, bus, message):
t = message.type
if t == Gst.MessageType.EOS:
print "End of Stream :("
self.kill()
elif t == Gst.MessageType.ERROR:
err, debug = message.parse_error()
print "Error: %s" % err, debug
self.kill()
def on_sync_message(self, bus, message):
print message.src
example = Example()
example.run()
首先 AppSink回调不起作用,我不知道为什么吗?我认为我必须使该配置正确到Python代码中。有人可以帮忙弄清楚吗?
第二,当我尝试使用 FFMpeg 和 FFPlay 时,我遇到了很多这样的 H264 编码问题:
主要的困惑是GStreamer能否将多个源数据处理到一个接收器中(我需要区分每个视频帧)。
非常感谢。