我正在尝试使用gstreamer缩小mpeg2传输流视频的分辨率。我使用tsdemux将流分解为音频部分和视频部分,添加封顶滤镜以缩小视频帧大小,然后使用tsmux合并音频和视频流。它失败并出现错误:
“从元素udpsrc收到:内部数据流错误。调试 信息:gstbasesrc.c(2948):gst_base_src_loop(): / GstPihomerun-pipeline / GstUDPSrc:udpsrc:流媒体任务已暂停,原因 未谈判(-4)“
如果没有添加封顶过滤器,它甚至会失败。
这个问题似乎来自于tsmux中的构图,即兼容的接收器垫 无法生成tsmux,因为它出现了错误 “tsmux_pad_added handler:get sink pad failed” 如果有任何专家可以提供有关如何使用tsmux的帮助信息,或者有关如何更改传输流帧大小的信息,我将不胜感激。
以下是代码:
void demux_pad_added_handler (GstElement *src, GstPad *new_pad, HDHRElements *data) {
GstPadLinkReturn ret;
GstPad *sink_pad = NULL;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
GstStateChangeReturn scret;
g_print("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* Check the new pad's type */
new_pad_caps = gst_pad_get_current_caps (new_pad);
new_pad_caps = gst_pad_get_current_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (g_str_has_prefix(new_pad_type, "audio")) {
sink_pad = gst_element_get_static_pad(data->audioqueue1, "sink");
}
if (g_str_has_prefix(new_pad_type, "video")) {
sink_pad = gst_element_get_static_pad(data->videoqueue1, "sink");
}
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (g_str_has_prefix(new_pad_type, "audio")) {
sink_pad = gst_element_get_static_pad(data->audioqueue1, "sink");
}
if (g_str_has_prefix(new_pad_type, "video")) {
sink_pad = gst_element_get_static_pad(data->videoqueue1, "sink");
}
if (sink_pad != NULL) {
if (gst_pad_is_linked(sink_pad)) {
g_print("tsdemux pad ('%s') sink pad already linked (ignoring)\n",new_pad_type);
} else {
ret = gst_pad_link(new_pad, sink_pad);
if (GST_PAD_LINK_FAILED(ret)) {
g_print("tsdemux pad ('%s') but link failed.\n", new_pad_type);
} else {
g_print("tsdemux pad ('%s') link succeeded.\n", new_pad_type);
scret = gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
if (scret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
}
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS((GstBin*) data->pipeline, GST_DEBUG_GRAPH_SHOW_ALL, "newpad");
}
gst_object_unref(sink_pad);
}
}
if (new_pad_caps != NULL) {
gst_caps_unref(new_pad_caps);
}
}
void tsmux_pad_added_handler (GstElement *mux, GstPad *pad, HDHRElements *data) {
GstPad* sinkpad;
g_print ("tsmux %s [linking] %s ", gst_pad_get_name(pad), gst_element_get_name(mux));
if((sinkpad = gst_element_get_compatible_pad (mux, pad, NULL)) == NULL) {
g_printerr("tsmux_pad_added handler: get sink pad failed\n");
return;
} else {
g_print("tsmux_pad_added handler: get sink pad succeeded\n");
if (gst_pad_is_linked(sinkpad)) {
g_print("tsmux sink pad %s already linked (ignoring)\n", gst_pad_get_name(sinkpad));
} else {
GstPadLinkReturn ret = gst_pad_link (pad, sinkpad);
if(ret != GST_PAD_LINK_OK){
g_printerr(" tsmux link failed [return code]:%d\n", ret);
}else{
g_print("tsmux link successful: %s to %s\n", gst_pad_get_name(pad), gst_pad_get_name(sinkpad));
}
}
gst_object_unref (sinkpad);
}
}
int main(int argc, char *argv[]) {
HDHRElements myelements;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn scret;
unsigned short gstdport;
char *end;
GMainLoop *loop;
struct in_addr remote;
GstCaps *caps;
inet_aton(arg[1], &remote);
gstdport = strtol(argv[2], &end, 10);
/* Initialize GStreamer */
gst_init (&argc, &argv);
myelements.udpsrc = gst_element_factory_make("udpsrc", "udpsrc");
myelements.rtpmp2tdepay = gst_element_factory_make("rtpmp2tdepay", "rtpmp2tdepay”);
myelements.tsdemux = gst_element_factory_make("tsdemux", "tsdemux");");
myelements.rtpmp2tpay = gst_element_factory_make("rtpmp2tpay", "rtpmp2tpay");
myelements.tsmux = gst_element_factory_make("mpegtsmux", "tsmux");
myelements.audioqueue1 = gst_element_factory_make("queue", "audioqueue1");
myelements.videoqueue1 = gst_element_factory_make("queue", "videoqueue1");
myelements.rtpmp2tpay = gst_element_factory_make("rtpmp2tpay", "rtpmp2tpay");
myelements.udpsink = gst_element_factory_make("udpsink", "udpsink");
myelements.pipeline = gst_pipeline_new ("hdhomerun-pipeline");
gst_bin_add_many (GST_BIN (myelements.pipeline),
myelements.udpsrc,
myelements.udpsink,
myelements.rtpmp2tdepay,
myelements.rtpmp2tpay,
myelements.tsdemux,
myelements.tsmux,
myelements.videoqueue1,
NULL);
g_object_set(G_OBJECT(myelements.udpsrc), "port", 1234, NULL);
GstCaps *udpcaps = gst_caps_from_string("application/x-rtp, media=video, payload=33, clock-rate=90000");
g_object_set(G_OBJECT(myelements.udpsrc), "caps", udpcaps, NULL);
g_object_set(G_OBJECT(myelements.udpsink), "host", inet_ntoa(remote), NULL);
g_object_set(G_OBJECT(myelements.udpsink), "port", gstdport, NULL);
g_object_set(G_OBJECT(myelements.tsdemux), "program-number", 3, NULL);
g_object_set(G_OBJECT(myelements.tsdemux), "name", "demux", NULL);
g_signal_connect(G_OBJECT(myelements.tsdemux), "pad-added", G_CALLBACK(demux_pad_added_handler), &myelements);
g_object_set(G_OBJECT(myelements.tsmux), "name", "tsmux", NULL);
g_signal_connect(G_OBJECT(myelements.tsmux), "pad-added", G_CALLBACK(tsmux_pad_added_handler), &myelements);
gst_element_link(myelements.udpsrc, myelements.rtpmp2tdepay);
gst_element_link(myelements.rtpmp2tdepay, myelements.tsdemux);
gst_element_link(myelements.audioqueue1, myelements.tsmux);
gst_element_link(myelements.videoqueue1, myelements.tsmux);
gst_element_link(myelements.tsmux, myelements.rtpmp2tpay);
gst_element_link(myelements.rtpmp2tpay, myelements.udpsink);
gst_element_set_state (myelements.pipeline, GST_STATE_PLAYING);
return 0;
}