我为do GUI工作。我使用林间空地创建GUI并使用gtk。我使用gstreamer播放视频(Gstreamer基础教程5),我努力在视频上制作透明图像,但还不能这样做。我使用林间空地overlay
对象来提高透明度,但没有任何结果。我的屏幕截图如下:
我在同一盒子(视频位置)中使用了皮卡丘图片(视频上20x20像素),但是小图片和大图片都移到了下面。他们使用视频的地方不同。这段时间视频减少。我不知道问题在哪里? Glade还是Gtk还是gstreamer?我怎样才能解决这个问题? 滑行代码:
<?xml version="1.0" encoding="UTF-8"?>
<!-- Generated with glade 3.16.1 -->
<interface>
<requires lib="gtk+" version="3.10"/>
<object class="GtkWindow" id="main_window">
<property name="width_request">480</property>
<property name="height_request">800</property>
<property name="can_focus">False</property>
<property name="opacity">0.98999999999999999</property>
<property name="resizable">False</property>
<property name="default_width">480</property>
<property name="default_height">800</property>
<property name="deletable">False</property>
<property name="gravity">north</property>
<property name="has_resize_grip">False</property>
<child>
<object class="GtkFixed" id="fixed1">
<property name="width_request">480</property>
<property name="height_request">400</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkBox" id="box1">
<property name="width_request">480</property>
<property name="height_request">400</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="orientation">vertical</property>
<child>
<object class="GtkImage" id="image2">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="opacity">0.5</property>
<property name="pixbuf">index.jpeg</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">0</property>
</packing>
</child>
</object>
<packing>
<property name="y">400</property>
</packing>
</child>
<child>
<object class="GtkImage" id="image1">
<property name="width_request">240</property>
<property name="height_request">400</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="pixbuf">upArrow2.png</property>
</object>
</child>
<child>
<object class="GtkLabel" id="label1">
<property name="width_request">240</property>
<property name="height_request">400</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">label</property>
</object>
<packing>
<property name="x">240</property>
</packing>
</child>
<child>
<object class="GtkOverlay" id="overlay1">
<property name="width_request">100</property>
<property name="height_request">80</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="opacity">0.29999999999999999</property>
<child>
<object class="GtkRevealer" id="revealer1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="opacity">0.69999999999999996</property>
<property name="transition_type">none</property>
<property name="reveal_child">True</property>
<child>
<object class="GtkBox" id="box3">
<property name="width_request">100</property>
<property name="height_request">100</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="opacity">0.5</property>
<property name="orientation">vertical</property>
<child>
<object class="GtkOverlay" id="overlay2">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkRevealer" id="revealer2">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="transition_type">none</property>
<property name="reveal_child">True</property>
<child>
<object class="GtkImage" id="image3">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="pixbuf">pik.png</property>
</object>
</child>
</object>
</child>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">0</property>
</packing>
</child>
</object>
</child>
</object>
</child>
</object>
<packing>
<property name="x">75</property>
<property name="y">600</property>
</packing>
</child>
</object>
</child>
</object>
</interface>
和 gtk代码:
#include <string.h>
#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <gdk/gdk.h>
#if defined (GDK_WINDOWING_X11)
#include <gdk/gdkx.h>
#elif defined (GDK_WINDOWING_WIN32)
#include <gdk/gdkwin32.h>
#elif defined (GDK_WINDOWING_QUARTZ)
#include <gdk/gdkquartz.h>
#endif
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
GstElement *playbin, *pipeline; /* Our one and only pipeline */
GtkWidget *slider; /* Slider widget to keep track of current position */
GtkWidget *streams_list; /* Text widget to display info about the streams */
gulong slider_update_signal_id; /* Signal ID for the slider update signal */
GstState state; /* Current state of the pipeline */
gint64 duration; /* Duration of the clip, in nanoseconds */
} CustomData;
/* This function is called when the GUI toolkit creates the physical window that will hold the video.
* At this point we can retrieve its handler (which has a different meaning depending on the windowing system)
* and pass it to GStreamer through the VideoOverlay interface. */
static void realize_cb (GtkWidget *widget, CustomData *data) {
GdkWindow *window = gtk_widget_get_window (widget);
guintptr window_handle;
if (!gdk_window_ensure_native (window))
g_error ("Couldn't create native window needed for GstVideoOverlay!");
/* Retrieve window handler from GDK */
#if defined (GDK_WINDOWING_WIN32)
window_handle = (guintptr)GDK_WINDOW_HWND (window);
#elif defined (GDK_WINDOWING_QUARTZ)
window_handle = gdk_quartz_window_get_nsview (window);
#elif defined (GDK_WINDOWING_X11)
window_handle = GDK_WINDOW_XID (window);
#endif
/* Pass it to playbin, which implements VideoOverlay and will forward it to the video sink */
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->playbin), window_handle);
}
/* This function is called when the PLAY button is clicked */
static void play_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin, GST_STATE_PLAYING);
}
/* This function is called when the PAUSE button is clicked */
static void pause_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin, GST_STATE_PAUSED);
}
/* This function is called when the STOP button is clicked */
static void stop_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin, GST_STATE_READY);
}
/* This function is called when the main window is closed */
static void delete_event_cb (GtkWidget *widget, GdkEvent *event, CustomData *data) {
stop_cb (NULL, data);
gtk_main_quit ();
}
/* This function is called everytime the video window needs to be redrawn (due to damage/exposure,
* rescaling, etc). GStreamer takes care of this in the PAUSED and PLAYING states, otherwise,
* we simply draw a black rectangle to avoid garbage showing up. */
static gboolean draw_cb (GtkWidget *widget, cairo_t *cr, CustomData *data) {
if (data->state < GST_STATE_PAUSED) {
GtkAllocation allocation;
/* Cairo is a 2D graphics library which we use here to clean the video window.
* It is used by GStreamer for other reasons, so it will always be available to us. */
gtk_widget_get_allocation (widget, &allocation);
cairo_set_source_rgb (cr, 0, 0, 0);
cairo_rectangle (cr, 0, 0, allocation.width, allocation.height);
cairo_fill (cr);
}
return FALSE;
}
/* This creates all the GTK+ widgets that compose our application, and registers the callbacks */
static void create_ui (CustomData *data) {
GtkWidget *main_window; /* The uppermost window, containing all other windows */
GtkWidget *video_window; /* The drawing area where the video will be shown */
GtkWidget *main_box; /* VBox to hold main_hbox and the controls */
GtkWidget *main_hbox; /* HBox to hold the video_window and the stream info text widget */
GtkWidget *controls; /* HBox to hold the buttons and the slider */
GtkWidget *play_button, *pause_button, *stop_button; /* Buttons */
GtkWidget *label, *image, *image2, *image3;
GtkWidget *overlay;
// GtkWidget *grid;
// GtkWidget *image1;
//overlay=gtk_overlay_new();
// gtk_overlay_add_overlay(GTK_OVERLAY(overlay), GTK_WIDGET(image1));
GtkBuilder *builder; //for glade
main_window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
g_signal_connect (G_OBJECT (main_window), "delete-event", G_CALLBACK (delete_event_cb), data);
label = gtk_label_new("deeneme");
//image_alt = gtk_image_new_from_file("/home/x/Desktop/arcelik.png");
video_window = gtk_drawing_area_new ();
gtk_widget_set_double_buffered (video_window, FALSE);
g_signal_connect (video_window, "realize", G_CALLBACK (realize_cb), data);
g_signal_connect (video_window, "draw", G_CALLBACK (draw_cb), data);
play_button = gtk_button_new_from_icon_name ("media-playback-start", GTK_ICON_SIZE_SMALL_TOOLBAR);
g_signal_connect (G_OBJECT (play_button), "clicked", G_CALLBACK (play_cb), data);
pause_button = gtk_button_new_from_icon_name ("media-playback-pause", GTK_ICON_SIZE_SMALL_TOOLBAR);
g_signal_connect (G_OBJECT (pause_button), "clicked", G_CALLBACK (pause_cb), data);
stop_button = gtk_button_new_from_icon_name ("media-playback-stop", GTK_ICON_SIZE_SMALL_TOOLBAR);
g_signal_connect (G_OBJECT (stop_button), "clicked", G_CALLBACK (stop_cb), data);
data->streams_list = gtk_text_view_new ();
gtk_text_view_set_editable (GTK_TEXT_VIEW (data->streams_list), FALSE);
/*
controls = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 0);
gtk_box_pack_start (GTK_BOX (controls), image, FALSE, FALSE, 50);
gtk_box_pack_start (GTK_BOX (controls), label, FALSE, FALSE, 0);
main_hbox = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 0);
gtk_box_pack_start (GTK_BOX (main_hbox), video_window, TRUE, TRUE, 0);
main_box = gtk_box_new (GTK_ORIENTATION_VERTICAL, 0);
gtk_box_pack_start (GTK_BOX (main_box), controls, FALSE, FALSE, 0);
gtk_box_pack_start (GTK_BOX (main_box), main_hbox, TRUE, TRUE, 0);
gtk_container_add (GTK_CONTAINER (main_window), main_box);
gtk_window_set_default_size (GTK_WINDOW (main_window), 480, 800);
*/
builder = gtk_builder_new();
gtk_builder_add_from_file (builder, "/home/faruk/Desktop/testcode/c/GUI.glade", NULL);
main_window = GTK_WIDGET(gtk_builder_get_object(builder, "main_window"));
gtk_builder_connect_signals(builder, NULL);
// overlay = GTK_WIDGET(gtk_builder_get_object(builder, "overlay1"));
// overlay = gtk_image_new_from_file("/home/faruk/Desktop/testcode/c/fire_new.png");
// image1 = GTK_WIDGET(gtk_builder_get_object(builder, "image1"));
// image1 = gtk_image_new_from_file("/home/faruk/Desktop/testcode/c/fire_new.png");
image = GTK_WIDGET(gtk_builder_get_object(builder, "image1"));
image = gtk_image_new_from_file("/home/faruk/Desktop/guii/Uart/Media/upArrow2.png");
image2 = GTK_WIDGET(gtk_builder_get_object(builder,"image2"));
image2 = gtk_image_new_from_file("/home/faruk/Desktop/testcode/c/index.jpeg");
image3 = GTK_WIDGET(gtk_builder_get_object(builder,"image3"));
image3 = gtk_image_new_from_file("/home/faruk/Desktop/testcode/c/pik.png");
label = GTK_WIDGET(gtk_builder_get_object(builder, "label1"));
overlay= GTK_WIDGET(gtk_builder_get_object(builder,"overlay1"));
overlay = gtk_image_new_from_file("/home/faruk/Desktop/testcode/c/index.jpeg");
main_hbox = GTK_WIDGET(gtk_builder_get_object(builder, "box3"));
gtk_box_pack_start (GTK_BOX (main_hbox), video_window, TRUE, TRUE, 0);
gtk_window_set_decorated(GTK_WINDOW(main_window), FALSE); //Hide menu bar
gtk_container_add (GTK_CONTAINER (main_window), main_hbox);
// realize_cb(main_hbox, data);
gtk_label_set_markup(GTK_LABEL(label), "<span foreground=\"white\" font=\"Arial\" size='300100'>1</span>"); //Set number color and size......1234
//Set background color
GdkRGBA color;
gdk_rgba_parse (&color, "#2080FF");
gtk_widget_override_background_color(main_window, GTK_STATE_FLAG_NORMAL, &color);
g_object_unref(builder);
gtk_widget_show_all (main_window);
}
/* This function is called periodically to refresh the GUI */
static gboolean refresh_ui (CustomData *data) {
gint64 current = -1;
/* We do not want to update anything unless we are in the PAUSED or PLAYING states */
if (data->state < GST_STATE_PAUSED)
return TRUE;
/* If we didn't know it yet, query the stream duration */
if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
if (!gst_element_query_duration (data->playbin, GST_FORMAT_TIME, &data->duration)) {
g_printerr ("Could not query current duration.\n");
} else {
/* Set the range of the slider to the clip duration, in SECONDS */
//gtk_range_set_range (GTK_RANGE (data->slider), 0, (gdouble)data->duration / GST_SECOND);
}
}
if (gst_element_query_position (data->playbin, GST_FORMAT_TIME, ¤t)) {
/* Block the "value-changed" signal, so the slider_cb function is not called
* (which would trigger a seek the user has not requested) */
//g_signal_handler_block (data->slider, data->slider_update_signal_id);
/* Set the position of the slider to the current pipeline positoin, in SECONDS */
//gtk_range_set_value (GTK_RANGE (data->slider), (gdouble)current / GST_SECOND);
/* Re-enable the signal */
//g_signal_handler_unblock (data->slider, data->slider_update_signal_id);
}
return TRUE;
}
/* This function is called when new metadata is discovered in the stream */
static void tags_cb (GstElement *playbin, gint stream, CustomData *data) {
/* We are possibly in a GStreamer working thread, so we notify the main
* thread of this event through a message in the bus */
gst_element_post_message (playbin,
gst_message_new_application (GST_OBJECT (playbin),
gst_structure_new_empty ("tags-changed")));
}
/* This function is called when an error message is posted on the bus */
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err;
gchar *debug_info;
/* Print error details on the screen */
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
/* Set the pipeline to READY (which stops playback) */
gst_element_set_state (data->playbin, GST_STATE_READY);
}
/* This function is called when an End-Of-Stream message is posted on the bus.
* We just set the pipeline to READY (which stops playback) */
static void eos_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
g_print ("End-Of-Stream reached.\n");
gst_element_set_state (data->playbin, GST_STATE_READY);
}
/* This function is called when the pipeline changes states. We use it to
* keep track of the current state. */
static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
data->state = new_state;
g_print ("State set to %s\n", gst_element_state_get_name (new_state));
//Video durdugunda tekrar baslat
if(new_state == GST_STATE_READY){
gst_element_set_state (data->playbin, GST_STATE_PLAYING); //Start playing
}
if (old_state == GST_STATE_READY && new_state == GST_STATE_PAUSED) {
/* For extra responsiveness, we refresh the GUI as soon as we reach the PAUSED state */
refresh_ui (data);
}
}
}
///* Extract metadata from all the streams and write it to the text widget in the GUI */
//static void analyze_streams (CustomData *data) {
// gint i;
// GstTagList *tags;
// gchar *str, *total_str;
// guint rate;
// gint n_video, n_audio, n_text;
// GtkTextBuffer *text;
//
// /* Clean current contents of the widget */
// text = gtk_text_view_get_buffer (GTK_TEXT_VIEW (data->streams_list));
// gtk_text_buffer_set_text (text, "", -1);
//
// /* Read some properties */
// g_object_get (data->playbin, "n-video", &n_video, NULL);
// g_object_get (data->playbin, "n-audio", &n_audio, NULL);
// g_object_get (data->playbin, "n-text", &n_text, NULL);
//
// for (i = 0; i < n_video; i++) {
// tags = NULL;
// /* Retrieve the stream's video tags */
// g_signal_emit_by_name (data->playbin, "get-video-tags", i, &tags);
// if (tags) {
// total_str = g_strdup_printf ("video stream %d:\n", i);
// gtk_text_buffer_insert_at_cursor (text, total_str, -1);
// g_free (total_str);
// gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str);
// total_str = g_strdup_printf (" codec: %s\n", str ? str : "unknown");
// gtk_text_buffer_insert_at_cursor (text, total_str, -1);
// g_free (total_str);
// g_free (str);
// gst_tag_list_free (tags);
// }
// }
//
// for (i = 0; i < n_audio; i++) {
// tags = NULL;
// /* Retrieve the stream's audio tags */
// g_signal_emit_by_name (data->playbin, "get-audio-tags", i, &tags);
// if (tags) {
// total_str = g_strdup_printf ("\naudio stream %d:\n", i);
// gtk_text_buffer_insert_at_cursor (text, total_str, -1);
// g_free (total_str);
// if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) {
// total_str = g_strdup_printf (" codec: %s\n", str);
// gtk_text_buffer_insert_at_cursor (text, total_str, -1);
// g_free (total_str);
// g_free (str);
// }
// if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
// total_str = g_strdup_printf (" language: %s\n", str);
// gtk_text_buffer_insert_at_cursor (text, total_str, -1);
// g_free (total_str);
// g_free (str);
// }
// if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) {
// total_str = g_strdup_printf (" bitrate: %d\n", rate);
// gtk_text_buffer_insert_at_cursor (text, total_str, -1);
// g_free (total_str);
// }
// gst_tag_list_free (tags);
// }
// }
//
// for (i = 0; i < n_text; i++) {
// tags = NULL;
// /* Retrieve the stream's subtitle tags */
// g_signal_emit_by_name (data->playbin, "get-text-tags", i, &tags);
// if (tags) {
// total_str = g_strdup_printf ("\nsubtitle stream %d:\n", i);
// gtk_text_buffer_insert_at_cursor (text, total_str, -1);
// g_free (total_str);
// if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
// total_str = g_strdup_printf (" language: %s\n", str);
// gtk_text_buffer_insert_at_cursor (text, total_str, -1);
// g_free (total_str);
// g_free (str);
// }
// gst_tag_list_free (tags);
// }
// }
//}
/* This function is called when an "application" message is posted on the bus.
* Here we retrieve the message posted by the tags_cb callback */
static void application_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
if (g_strcmp0 (gst_structure_get_name (gst_message_get_structure (msg)), "tags-changed") == 0) {
/* If the message is the "tags-changed" (only one we are currently issuing), update
* the stream info GUI */
//analyze_streams (data); Uygulamanın kapanmasına sebep oldugu için kullanılmayacaksa sil, dallandıgı fonksiyonuda sil
}
}
int main(int argc, char *argv[]) {
CustomData data;
GstStateChangeReturn ret;
GstBus *bus;
GstMessage *msg;
/* Initialize GTK */
gtk_init (&argc, &argv);
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Initialize our data structure */
memset (&data, 0, sizeof (data));
data.duration = GST_CLOCK_TIME_NONE;
/* Create the elements */
data.playbin = gst_element_factory_make ("playbin", "playbin");
data.pipeline = gst_parse_launch ("playbin uri=file:///home/faruk/Desktop/Petra_ses_dosyaları/TR/5.wav", NULL);
gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
bus = gst_element_get_bus (data.pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
if (!data.playbin) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set the URI to play */
g_object_set (data.playbin, "uri", "file:///home/faruk/Desktop/guii/Uart/Media/sample.mp4", NULL);
/* Connect to interesting signals in playbin */
g_signal_connect (G_OBJECT (data.playbin), "video-tags-changed", (GCallback) tags_cb, &data);
g_signal_connect (G_OBJECT (data.playbin), "audio-tags-changed", (GCallback) tags_cb, &data);
g_signal_connect (G_OBJECT (data.playbin), "text-tags-changed", (GCallback) tags_cb, &data);
/* Create the GUI */
create_ui (&data);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
bus = gst_element_get_bus (data.playbin);
gst_bus_add_signal_watch (bus);
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, &data);
g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, &data);
g_signal_connect (G_OBJECT (bus), "message::application", (GCallback)application_cb, &data);
gst_object_unref (bus);
/* Start playing */
ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.playbin);
return -1;
}
/* Register a function that GLib will call every second */
g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data);
/* Start the GTK main loop. We will not regain control until gtk_main_quit is called. */
gtk_main ();
/* Free resources */
gst_element_set_state (data.playbin, GST_STATE_NULL);
gst_object_unref (data.playbin);
return 0;
}
谢谢。