问题
如何确定如何解码具有未知属性的流视频数据?
问题:
几周以来,我一直试图让我的相机(HC-V770)流式传输到YouTube。
经过大量的wirehark并确定相机的能力后,我设法让相机将视频数据发送到我的电脑。
它以UDP数据的形式出现,但我似乎无法弄清楚数据究竟是什么。 在发送HTTP请求后,摄像机将开始将UDP流数据发送到指定端口上的请求者设备。
例如:调用http://CAMERAIP/cam.cgi?mode=startstream&value=49152会导致相机通过端口49152将视频数据发送到我的设备
擅自解决方案: VLC不会检测到它。使用udp://@0.0.0.0:49152它会检测到一些传入的数据,但它似乎不知道如何处理它。
我找到了这个脚本(如下图所示和略微修改过的),据说可以用于其他/较旧的松下相机但不适用于此。
该脚本显示一个黑色窗口,并响应“一些传入数据”,但似乎不知道如何处理它。
当数据流停止时,它也将停止运行,确认它实际上正在拾取某些东西
## the following also needs to be updated. figure it out on your own.
MY_IP = "192.168.0.149" ## THIS IS YOUR LOCAL IP ADDRESS
THEIR_IP = "192.168.0.163" ## THIS IS THE IP ADDRESS OF THE CAMERA
RTMP_OUT = "rtmp://a.rtmp.youtube.com/live2/KEYHERE
import gi
gi.require_version('Gst', '1.0')
from gi.repository import GObject, Gst
import numpy as np
import socket
import cv2
import binascii
import threading
import time
import sys
## HHTP req's dont work for dick
#do this to start stram
#http://192.168.0.163/cam.cgi?mode=startstream&value=49152
#do this to refresh & check status
#http://192.168.0.163/cam.cgi?mode=getstate
# then run progrem
GObject.threads_init()
Gst.init(None)
UDP_PORT = 49152
start = binascii.unhexlify(''.join('FF D8'.split()))
end = binascii.unhexlify(''.join('FF D9'.split()))
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.bind((MY_IP, UDP_PORT))
tcpsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
## ## 4K Video Mode = 640x360, but photo mode is 640x480 .. adust the below line accordingly. It's currently set for 640x360, but will scale the output up to 1280x720.
## min-threshold-time=1130000000 can be adjusted to get the audio/video sink more aligned. It should be pretty close as is.
#CLI='appsrc name=mysource format=TIME do-timestamp=TRUE is-live=TRUE caps="video/x-raw,format=BGR,width=640,height=360,framerate=(fraction)30/1,pixel-aspect-ratio=(fraction)1/1" ! videoconvert ! videoscale ! videorate ! capsfilter caps="video/x-raw,format=I420,width=1280,height=720,framerate=(fraction)30/1" ! queue max-size-time=0 max-size-bytes=0 max-size-buffers=0 ! tee name=RAW RAW. ! queue max-size-time=0 max-size-bytes=0 max-size-buffers=0 ! autovideosink sync=false RAW. ! queue max-size-time=0 max-size-bytes=0 max-size-buffers=0 ! x264enc cabac=true aud=true tune=zerolatency byte-stream=false sliced-threads=true threads=4 speed-preset=1 bitrate=2000 key-int-max=20 bframes=0 ! h264parse ! video/x-h264,profile=main ! mux. autoaudiosrc ! audioconvert ! voaacenc bitrate=128000 ! queue max-size-time=0 max-size-bytes=0 max-size-buffers=0 ! aacparse ! audio/mpeg,mpegversion=4,stream-format=raw ! queue max-size-buffers=1 max-size-time=0 max-size-bytes=0 min-threshold-time=1140000000 ! flvmux streamable=true name=mux ! queue max-size-buffers=3 max-size-time=0 max-size-bytes=0 ! rtmpsink location="'+RTMP_OUT+'" sync=false'
CLI='appsrc name=mysource format=TIME do-timestamp=TRUE is-live=TRUE caps="video/x-raw,format=BGR,width=1920,height=1080,framerate=(fraction)50/1,pixel-aspect-ratio=(fraction)1/1" ! videoconvert ! videoscale ! videorate ! capsfilter caps="video/x-raw,format=I420,width=1280,height=720,framerate=(fraction)30/1" ! queue max-size-time=0 max-size-bytes=0 max-size-buffers=0 ! tee name=RAW RAW. ! queue max-size-time=0 max-size-bytes=0 max-size-buffers=0 ! autovideosink sync=false RAW. ! queue max-size-time=0 max-size-bytes=0 max-size-buffers=0 ! x264enc cabac=true aud=true tune=zerolatency byte-stream=false sliced-threads=true threads=4 speed-preset=1 bitrate=2000 key-int-max=20 bframes=0 ! h264parse ! video/x-h264,profile=main ! mux. autoaudiosrc ! audioconvert ! voaacenc bitrate=128000 ! queue max-size-time=0 max-size-bytes=0 max-size-buffers=0 ! aacparse ! audio/mpeg,mpegversion=4,stream-format=raw ! queue max-size-buffers=1 max-size-time=0 max-size-bytes=0 min-threshold-time=1140000000 ! flvmux streamable=true name=mux ! queue max-size-buffers=3 max-size-time=0 max-size-bytes=0 ! rtmpsink location="'+RTMP_OUT+'" sync=false'
pipline=Gst.parse_launch(CLI)
appsrc=pipline.get_by_name("mysource")
#appsink=pipline.get_by_name("sink")
appsrc.set_property('emit-signals',True) #tell sink to emit signals
pipline.set_state(Gst.State.PLAYING)
def keepalive(MY_IP, THEIR_IP):
while True:
try:
tcpsock.sendto("GET /cam.cgi?mode=startstream&value=49152 HTTP/1.1\nHost: "+MY_IP+"\n\nUser-Agent:Mozilla 5.0\n", (MY_IP, 80))
response = tcpsock.recv(1024)
time.sleep( 8 )
print ("keep alive")
except:
tcpsock.connect(("192.168.0.163", 80))
thread = threading.Thread(target=keepalive, args=(MY_IP,THEIR_IP,))
thread.daemon = True
thread.start()
total=0
while (1==1):
#begin = time.time()
data, addr = sock.recvfrom(999999) # buffer size is 1024 bytes
data = data.split(start)[1].split(end)[0]
data = start+data+end
data = np.frombuffer(data, np.uint8) # to array
data = cv2.imdecode(np.array(data),cv2.CV_LOAD_IMAGE_COLOR) #
#print np.shape(data) ## uncomment to see resolution of video
cv2.imshow("img",data) ## 4K Video Mode = 640x360, but photo mode is 640x480 ..
#cv2.waitKey(1)
frame = data.tostring()
buf = Gst.Buffer.new_allocate(None, len(frame), None)
buf.fill(0,frame)
appsrc.emit("push-buffer", buf)
#final = time.time()
#total = total*0.8 + (final - begin)*.2
#print "time",str(1.0/total)
print("EXIT")
sys.exit()