我在我的ios应用程序中集成了GStreamer库1.12.2版本,该应用程序通过raspberry pi设备支持音频和视频流。已实施的功能如下: 1.一旦应用程序启动并在主屏幕上,视频流必须在我的应用程序中一直运行。 2.有一个用于接收音频的按钮以及无缝运行的视频流 为此我创建了两个管道,一个用于视频,一个用于从raspberry pi设备到我的ios应用程序的音频。
我遇到了崩溃,因为在切换音频时尝试取消引用垃圾指针,即播放和暂停音频或让我的应用程序静止几分钟就会崩溃。
我附加了一些用于创建视频和音频管道的代码。 / *用于视频和音频覆盆子pi到ios app * /
-(void) app_function
{
GstBus *bus;
GSource *bus_source;
GError *error = NULL;
GstBus *bus1;
GSource *bus_source1;
GST_DEBUG ("Creating pipeline");
/* Create our own GLib Main Context and make it the default one */
context = g_main_context_new ();
g_main_context_push_thread_default(context);
// du to app recieve working fine
audiopipeline = gst_parse_launch("udpsrc port=5001 caps=\"audio/x-raw, rate = 8000, format=S16LE, channels=2\" !audioconvert ! audioresample ! autoaudiosink sync=false", &error);
if (error) {
gchar *message = g_strdup_printf("Unable to build audio pipeline: %s", error->message);
g_clear_error (&error);
[self setUIMessage:message];
g_free (message);
return;
}
error = NULL;
pipeline = gst_parse_launch("udpsrc port=5000 ! application/x-rtp, payload=96 ! rtph264depay ! avdec_h264 ! autovideosink sync=false text-overlay=false", &error);
if (error) {
gchar *message = g_strdup_printf("Unable to build video pipeline: %s", error->message);
g_clear_error (&error);
[self setUIMessage:message];
g_free (message);
return;
}
/**************** for video *****************/
/* Set the pipeline to READY, so it can already accept a window handle */
gst_element_set_state(pipeline, GST_STATE_READY);
video_sink = gst_bin_get_by_interface(GST_BIN(pipeline), GST_TYPE_VIDEO_OVERLAY);
if (!video_sink) {
GST_ERROR ("Could not retrieve video sink");
return;
}
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(video_sink), (guintptr) (id) ui_video_view);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
bus = gst_element_get_bus (pipeline);
bus_source = gst_bus_create_watch (bus);
g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
g_source_attach (bus_source, context);
g_source_unref (bus_source);
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, (__bridge void *)self);
g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, (__bridge void *)self);
gst_object_unref (bus);
// for audio
/**************** for audio **************/
bus1 = gst_element_get_bus (audiopipeline);
bus_source1 = gst_bus_create_watch (bus1);
g_source_set_callback (bus_source1, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
g_source_attach (bus_source1, context);
g_source_unref (bus_source1);
g_signal_connect (G_OBJECT (bus1), "message::error", (GCallback)error_cb_audio, (__bridge void *)self);
g_signal_connect (G_OBJECT (bus1), "message::state-changed", (GCallback)state_changed_cb_audio, (__bridge void *)self);
gst_object_unref (bus1);
//Create a GLib Main Loop and set it to run
GST_DEBUG ("Entering main loop...");
main_loop = g_main_loop_new (context, FALSE);
[self check_initialization_complete];
g_main_loop_run (main_loop);
GST_DEBUG ("Exited main loop");
g_main_loop_unref (main_loop);
main_loop = NULL;
// Free resources
g_main_context_pop_thread_default(context);
g_main_context_unref (context);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
gst_element_set_state (audiopipeline, GST_STATE_NULL);
gst_object_unref (audiopipeline);
return;
}