以下代码是使用updsink元素在网络上流式传输mpeg4视频(使用以下功能的v4l2src元素:width = 640,height = 480,帧率= 30/1),并且功能值应更改为(在键盘上按“1”后,宽度= 352,高度= 288,帧速率= 15/1)。我尝试通过在按下“1”后暂停管道,然后将大写字母重新设置为新值然后再次将管道置于播放状态,但这对我不起作用。
我在暂停管道之前打印了大写字母值,并且在做出更改之后,我可以看到大写字母的更改已完成,但在管道再次进入播放状态时未应用()
请帮助.......
您可以在运行以下代码之前在新终端中输入以下命令来播放此流:
gst-launch udpsrc multicast-group = 127.0.0.1 port = 8999! mpeg4videoparse! ffdec_mpeg4! ffmpegcolorspace! autovideosink
======================
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include <glib-object.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default: break;
}
return TRUE;
}
int main (int argc, char *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *source, *filter, *vrate, *encoder, *conv, *sink;
GstBus *bus;
GstCaps *filtercaps;
gint width, height, num, denom;
const GstStructure *str;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Check input arguments */
if (argc != 2) {
g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
return -1;
}
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("video-player");
source = gst_element_factory_make ("v4l2src", "file-source");
vrate = gst_element_factory_make ("videorate", "video-rate");
filter = gst_element_factory_make ("capsfilter", "filter");
conv = gst_element_factory_make ("ffmpegcolorspace","converter");
encoder = gst_element_factory_make ("ffenc_mpeg4","mpeg-decoder");
sink = gst_element_factory_make ("udpsink","audio-output");
if (!pipeline || !source || !filter || !vrate || !conv || !encoder || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* Set up the pipeline */
/* we set the input filename to the source element */
filtercaps = gst_caps_new_simple ("video/x-raw-yuv",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"framerate", GST_TYPE_FRACTION, 30, 1,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
g_object_set (G_OBJECT (encoder), "bitrate" , 384 , NULL);
g_object_set (G_OBJECT (sink), "host" , argv[1] , NULL);
g_object_set (G_OBJECT (sink), "port" , 8999 , NULL);
g_object_set (G_OBJECT (sink), "async" , FALSE , NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
/* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output */
gst_bin_add_many (GST_BIN (pipeline), source, vrate, filter, conv, encoder, sink, NULL);
/* we link the elements together */
/* file-source -> ogg-demuxer ~> vorbis-decoder -> converter -> alsa-output */
gst_element_link_many (source, vrate, filter, conv, encoder, sink, NULL);
/* Set the pipeline to "playing" state*/
g_print ("Now playing: \n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Print out the frame size and rate */
str = gst_caps_get_structure (filtercaps, 0);
if (!gst_structure_get_int (str, "width", &width) || !gst_structure_get_int (str, "height", &height) ||
!gst_structure_get_fraction (str, "framerate", &num, &denom))
g_print ("No width/height available\n");
g_print ("The video size of this set of capabilities is %dx%d and the frame rate is %d/%d\n", width, height, num, denom);
/* Pausing the streame */
int in;
if (scanf ("%d", &in) == 1){
g_print ("Now pausing: \n");
gst_element_set_state (pipeline, GST_STATE_PAUSED );
g_assert (GST_STATE (pipeline) == GST_STATE_PAUSED);
g_assert (GST_STATE (source) == GST_STATE_PAUSED);
g_assert (GST_STATE (filter) == GST_STATE_PAUSED);
g_assert (GST_STATE (vrate) == GST_STATE_PAUSED);
g_assert (GST_STATE (encoder) == GST_STATE_PAUSED);
g_assert (GST_STATE (conv) == GST_STATE_PAUSED);
g_assert (GST_STATE (sink) == GST_STATE_PAUSED);
/* apply the alterations to the caps now */
gst_caps_set_simple (filtercaps, "width", G_TYPE_INT, 352, "height", G_TYPE_INT, 288, "framerate", GST_TYPE_FRACTION, 15, 1, NULL);
/* Print out the frame size and rate after alteration*/
str = gst_caps_get_structure (filtercaps, 0);
if (!gst_structure_get_int (str, "width", &width) || !gst_structure_get_int (str, "height", &height) ||
!gst_structure_get_fraction (str, "framerate", &num, &denom))
g_print ("No width/height available\n");
g_print ("The video size of this set of capabilities is %dx%d and the frame rate is %d/%d\n", width, height, num, denom);
/* set back to playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_assert (GST_STATE (pipeline) == GST_STATE_PLAYING);
g_assert (GST_STATE (source) == GST_STATE_PLAYING);
g_assert (GST_STATE (filter) == GST_STATE_PLAYING);
g_assert (GST_STATE (vrate) == GST_STATE_PLAYING);
g_assert (GST_STATE (encoder) == GST_STATE_PLAYING);
g_assert (GST_STATE (conv) == GST_STATE_PLAYING);
g_assert (GST_STATE (sink) == GST_STATE_PLAYING);
}
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
干杯。
伊布拉
答案 0 :(得分:3)
你错过了g_object_set(G_OBJECT(filter), "caps", filtercaps, NULL);