GStreamer:ximagesink 在嵌入 GTK+ 时不起作用 window
GStreamer: ximagesink doesn't work when embedded in GTK+ window
我正在学习如何在 Linux 平台上使用 GStreamer。我已经完成了 basic tutorials,我想我明白我在那里做了什么。
我现在正在尝试修改 GTK+ 集成教程 (#5),以便它使用实时视频管道 (v4l2src !videoconvert !ximagesink) 而不是 playbin。
当我 运行 它时,我的 GTK+ 应用程序 window 打开并且流进入播放状态,但我没有看到任何视频。如果我注释掉对 gst_video_overlay_set_window_handle
的调用,则 ximagesink 元素会打开另一个 window,我会在其中看到视频按预期工作。
所以我认为我对管道本身没有任何问题,但是关于如何在 GTK+ 绘图区域小部件中将内容显示为覆盖层,我还没有弄清楚。
这是我目前拥有的应用程序的精简版:
#include <string.h>
#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <gst/video/video.h>
#include <gdk/gdk.h>
#include <gdk/gdkx.h>
typedef struct CustomData
{
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstState state; // Current stat of the pipeline
} CustomData;
static void realize_cb(GtkWidget *widget, CustomData *data)
{
GdkWindow *window;
guintptr window_handle;
window = gtk_widget_get_window(widget);
if (!gdk_window_ensure_native(window))
g_error ("Couldn't create native window needed for GstVideoOverlay!");
window_handle = GDK_WINDOW_XID(window);
// Comment off the next line and the app works, opening a new window
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(data->sink),
window_handle);
}
static void delete_event_cb(GtkWidget *widget,
GdkEvent *event,
CustomData *data)
{
gtk_main_quit();
}
static gboolean draw_cb(GtkWidget *widget, cairo_t *cr, CustomData *data)
{
if (data->state < GST_STATE_PAUSED)
{
GtkAllocation allocation;
gtk_widget_get_allocation(widget, &allocation);
cairo_set_source_rgb(cr, 0, 0, 0);
cairo_rectangle(cr, 0, 0, allocation.width, allocation.height);
cairo_fill(cr);
}
return FALSE;
}
static void create_ui(CustomData *data)
{
GtkWidget *main_window; // The uppermost window, containing all others
GtkWidget *video_window; // The drawing area where the video will be shown
GtkWidget *controls; // HBox to hold the buttons and slider
GtkWidget *main_box; // VBox to hold video window and controls
GtkWidget *play_button, *pause_button, *stop_button;
main_window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
g_signal_connect(G_OBJECT(main_window), "delete-event",
G_CALLBACK(delete_event_cb), data);
video_window = gtk_drawing_area_new();
g_signal_connect(G_OBJECT(video_window), "realize",
G_CALLBACK(realize_cb), data);
g_signal_connect(G_OBJECT(video_window), "draw",
G_CALLBACK(draw_cb), data);
main_box = gtk_box_new(GTK_ORIENTATION_VERTICAL, 0);
gtk_box_pack_start(GTK_BOX(main_box), video_window, TRUE, TRUE, 0);
gtk_container_add(GTK_CONTAINER(main_window), main_box);
gtk_window_set_default_size(GTK_WINDOW(main_window), 640, 480);
gtk_widget_show_all(main_window);
}
static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
GError *err;
gchar *debug_info;
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n",
GST_OBJECT_NAME(msg->src), err->message);
g_printerr("Debugging information; %s\n",
debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
gtk_main_quit();
};
static void state_changed_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state,
&pending_state);
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->pipeline))
{
data->state = new_state;
g_print("State set to %s:\n", gst_element_state_get_name(new_state));
}
}
int main(int argc, char *argv[])
{
CustomData data = {};
GstBus *bus;
gtk_init(&argc, &argv);
gst_init(&argc, &argv);
data.source = gst_element_factory_make("v4l2src", "source");
data.convert = gst_element_factory_make("videoconvert", "convert");
data.sink = gst_element_factory_make("ximagesink", "sink");
data.pipeline = gst_pipeline_new("pipeline");
gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.convert,
data.sink, NULL);
gst_element_link_many(data.source, data.convert, data.sink, NULL);
g_object_set(data.source, "device", "/dev/video0", NULL);
create_ui(&data);
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message::error",
(GCallback)error_cb, &data);
g_signal_connect(G_OBJECT(bus), "message::state-changed",
(GCallback)state_changed_cb, &data);
gst_object_unref(bus);
gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
gtk_main();
gst_element_set_state(data.pipeline, GST_STATE_NULL);
gst_object_unref(data.pipeline);
return 0;
}
非常感谢在此方面的任何帮助。
我从不属于该组的人那里得到了答案。这就是答案。在实现 X window 时,将视频接收器分配给叠加层可能还为时过早,并且您需要将其绑定到的特定 GST 元素可能不是您创建的元素(例如,它可能已由您创建的接收器元素在内部创建。
为了解决这个问题,支持overlay的GST sinks会在合适的时候生成一个明确的通知(通过总线同步机制)。应用程序应注册一个总线同步处理程序,并在收到适当的消息(视频覆盖准备 window 句柄)时,将 X window 绑定到该消息的源。
见https://gstreamer.freedesktop.org/documentation/video/gstvideooverlay.html?gi-language=c
这是有效的更新代码(注意对 realize_cb
的更改和新的 bus_sync_handler
函数):
#include <string.h>
#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <gst/video/video.h>
#include <gdk/gdk.h>
#include <gdk/gdkx.h>
typedef struct CustomData
{
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstState state; // Current state of the pipeline
guintptr video_window_handle;
} CustomData;
static void realize_cb(GtkWidget *widget, CustomData *data)
{
GdkWindow *window;
window = gtk_widget_get_window(widget);
if (!gdk_window_ensure_native(window))
{
g_error ("Couldn't create native window needed for GstVideoOverlay!");
}
data->video_window_handle = GDK_WINDOW_XID(window);
}
static GstBusSyncReply bus_sync_handler(GstBus *bus,
GstMessage *message,
CustomData *data)
{
// Ignore all but prepare-window-handle messages
if (!gst_is_video_overlay_prepare_window_handle_message(message))
{
return GST_BUS_PASS;
}
if (data->video_window_handle)
{
g_print("About to assign window to overlay\n");
gst_video_overlay_set_window_handle(
GST_VIDEO_OVERLAY(GST_MESSAGE_SRC(message)),
data->video_window_handle);
}
else
{
g_warning("Should have gotten a video window handle by now\n");
}
}
static void delete_event_cb(GtkWidget *widget,
GdkEvent *event,
CustomData *data)
{
gtk_main_quit();
}
static gboolean draw_cb(GtkWidget *widget, cairo_t *cr, CustomData *data)
{
if (data->state < GST_STATE_PAUSED)
{
GtkAllocation allocation;
gtk_widget_get_allocation(widget, &allocation);
cairo_set_source_rgb(cr, 0, 0, 0);
cairo_rectangle(cr, 0, 0, allocation.width, allocation.height);
cairo_fill(cr);
}
return FALSE;
}
static void create_ui(CustomData *data)
{
GtkWidget *main_window; // The uppermost window, containing all others
GtkWidget *video_window; // The drawing area where the video will be shown
GtkWidget *controls; // HBox to hold the buttons and slider
GtkWidget *main_box; // VBox to hold video window and controls
GtkWidget *play_button, *pause_button, *stop_button;
main_window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
g_signal_connect(G_OBJECT(main_window), "delete-event",
G_CALLBACK(delete_event_cb), data);
video_window = gtk_drawing_area_new();
g_signal_connect(G_OBJECT(video_window), "realize",
G_CALLBACK(realize_cb), data);
g_signal_connect(G_OBJECT(video_window), "draw",
G_CALLBACK(draw_cb), data);
main_box = gtk_box_new(GTK_ORIENTATION_VERTICAL, 0);
gtk_box_pack_start(GTK_BOX(main_box), video_window, TRUE, TRUE, 0);
gtk_container_add(GTK_CONTAINER(main_window), main_box);
gtk_window_set_default_size(GTK_WINDOW(main_window), 640, 480);
gtk_widget_show_all(main_window);
}
static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
GError *err;
gchar *debug_info;
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n",
GST_OBJECT_NAME(msg->src), err->message);
g_printerr("Debugging information; %s\n",
debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
gtk_main_quit();
};
static void state_changed_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state,
&pending_state);
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->pipeline))
{
data->state = new_state;
g_print("State set to %s:\n", gst_element_state_get_name(new_state));
}
}
int main(int argc, char *argv[])
{
CustomData data = {};
GstBus *bus;
gtk_init(&argc, &argv);
gst_init(&argc, &argv);
data.source = gst_element_factory_make("v4l2src", "source");
data.convert = gst_element_factory_make("videoconvert", "convert");
data.sink = gst_element_factory_make("ximagesink", "sink");
data.pipeline = gst_pipeline_new("pipeline");
gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.convert,
data.sink, NULL);
gst_element_link_many(data.source, data.convert, data.sink, NULL);
g_object_set(data.source, "device", "/dev/video0", NULL);
create_ui(&data);
bus = gst_element_get_bus(data.pipeline);
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)bus_sync_handler,
&data, NULL);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message::error",
(GCallback)error_cb, &data);
g_signal_connect(G_OBJECT(bus), "message::state-changed",
(GCallback)state_changed_cb, &data);
gst_object_unref(bus);
gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
gtk_main();
gst_element_set_state(data.pipeline, GST_STATE_NULL);
gst_object_unref(data.pipeline);
return 0;
}
我正在学习如何在 Linux 平台上使用 GStreamer。我已经完成了 basic tutorials,我想我明白我在那里做了什么。
我现在正在尝试修改 GTK+ 集成教程 (#5),以便它使用实时视频管道 (v4l2src !videoconvert !ximagesink) 而不是 playbin。
当我 运行 它时,我的 GTK+ 应用程序 window 打开并且流进入播放状态,但我没有看到任何视频。如果我注释掉对 gst_video_overlay_set_window_handle
的调用,则 ximagesink 元素会打开另一个 window,我会在其中看到视频按预期工作。
所以我认为我对管道本身没有任何问题,但是关于如何在 GTK+ 绘图区域小部件中将内容显示为覆盖层,我还没有弄清楚。
这是我目前拥有的应用程序的精简版:
#include <string.h>
#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <gst/video/video.h>
#include <gdk/gdk.h>
#include <gdk/gdkx.h>
typedef struct CustomData
{
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstState state; // Current stat of the pipeline
} CustomData;
static void realize_cb(GtkWidget *widget, CustomData *data)
{
GdkWindow *window;
guintptr window_handle;
window = gtk_widget_get_window(widget);
if (!gdk_window_ensure_native(window))
g_error ("Couldn't create native window needed for GstVideoOverlay!");
window_handle = GDK_WINDOW_XID(window);
// Comment off the next line and the app works, opening a new window
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(data->sink),
window_handle);
}
static void delete_event_cb(GtkWidget *widget,
GdkEvent *event,
CustomData *data)
{
gtk_main_quit();
}
static gboolean draw_cb(GtkWidget *widget, cairo_t *cr, CustomData *data)
{
if (data->state < GST_STATE_PAUSED)
{
GtkAllocation allocation;
gtk_widget_get_allocation(widget, &allocation);
cairo_set_source_rgb(cr, 0, 0, 0);
cairo_rectangle(cr, 0, 0, allocation.width, allocation.height);
cairo_fill(cr);
}
return FALSE;
}
static void create_ui(CustomData *data)
{
GtkWidget *main_window; // The uppermost window, containing all others
GtkWidget *video_window; // The drawing area where the video will be shown
GtkWidget *controls; // HBox to hold the buttons and slider
GtkWidget *main_box; // VBox to hold video window and controls
GtkWidget *play_button, *pause_button, *stop_button;
main_window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
g_signal_connect(G_OBJECT(main_window), "delete-event",
G_CALLBACK(delete_event_cb), data);
video_window = gtk_drawing_area_new();
g_signal_connect(G_OBJECT(video_window), "realize",
G_CALLBACK(realize_cb), data);
g_signal_connect(G_OBJECT(video_window), "draw",
G_CALLBACK(draw_cb), data);
main_box = gtk_box_new(GTK_ORIENTATION_VERTICAL, 0);
gtk_box_pack_start(GTK_BOX(main_box), video_window, TRUE, TRUE, 0);
gtk_container_add(GTK_CONTAINER(main_window), main_box);
gtk_window_set_default_size(GTK_WINDOW(main_window), 640, 480);
gtk_widget_show_all(main_window);
}
static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
GError *err;
gchar *debug_info;
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n",
GST_OBJECT_NAME(msg->src), err->message);
g_printerr("Debugging information; %s\n",
debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
gtk_main_quit();
};
static void state_changed_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state,
&pending_state);
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->pipeline))
{
data->state = new_state;
g_print("State set to %s:\n", gst_element_state_get_name(new_state));
}
}
int main(int argc, char *argv[])
{
CustomData data = {};
GstBus *bus;
gtk_init(&argc, &argv);
gst_init(&argc, &argv);
data.source = gst_element_factory_make("v4l2src", "source");
data.convert = gst_element_factory_make("videoconvert", "convert");
data.sink = gst_element_factory_make("ximagesink", "sink");
data.pipeline = gst_pipeline_new("pipeline");
gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.convert,
data.sink, NULL);
gst_element_link_many(data.source, data.convert, data.sink, NULL);
g_object_set(data.source, "device", "/dev/video0", NULL);
create_ui(&data);
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message::error",
(GCallback)error_cb, &data);
g_signal_connect(G_OBJECT(bus), "message::state-changed",
(GCallback)state_changed_cb, &data);
gst_object_unref(bus);
gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
gtk_main();
gst_element_set_state(data.pipeline, GST_STATE_NULL);
gst_object_unref(data.pipeline);
return 0;
}
非常感谢在此方面的任何帮助。
我从不属于该组的人那里得到了答案。这就是答案。在实现 X window 时,将视频接收器分配给叠加层可能还为时过早,并且您需要将其绑定到的特定 GST 元素可能不是您创建的元素(例如,它可能已由您创建的接收器元素在内部创建。
为了解决这个问题,支持overlay的GST sinks会在合适的时候生成一个明确的通知(通过总线同步机制)。应用程序应注册一个总线同步处理程序,并在收到适当的消息(视频覆盖准备 window 句柄)时,将 X window 绑定到该消息的源。
见https://gstreamer.freedesktop.org/documentation/video/gstvideooverlay.html?gi-language=c
这是有效的更新代码(注意对 realize_cb
的更改和新的 bus_sync_handler
函数):
#include <string.h>
#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <gst/video/video.h>
#include <gdk/gdk.h>
#include <gdk/gdkx.h>
typedef struct CustomData
{
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstState state; // Current state of the pipeline
guintptr video_window_handle;
} CustomData;
static void realize_cb(GtkWidget *widget, CustomData *data)
{
GdkWindow *window;
window = gtk_widget_get_window(widget);
if (!gdk_window_ensure_native(window))
{
g_error ("Couldn't create native window needed for GstVideoOverlay!");
}
data->video_window_handle = GDK_WINDOW_XID(window);
}
static GstBusSyncReply bus_sync_handler(GstBus *bus,
GstMessage *message,
CustomData *data)
{
// Ignore all but prepare-window-handle messages
if (!gst_is_video_overlay_prepare_window_handle_message(message))
{
return GST_BUS_PASS;
}
if (data->video_window_handle)
{
g_print("About to assign window to overlay\n");
gst_video_overlay_set_window_handle(
GST_VIDEO_OVERLAY(GST_MESSAGE_SRC(message)),
data->video_window_handle);
}
else
{
g_warning("Should have gotten a video window handle by now\n");
}
}
static void delete_event_cb(GtkWidget *widget,
GdkEvent *event,
CustomData *data)
{
gtk_main_quit();
}
static gboolean draw_cb(GtkWidget *widget, cairo_t *cr, CustomData *data)
{
if (data->state < GST_STATE_PAUSED)
{
GtkAllocation allocation;
gtk_widget_get_allocation(widget, &allocation);
cairo_set_source_rgb(cr, 0, 0, 0);
cairo_rectangle(cr, 0, 0, allocation.width, allocation.height);
cairo_fill(cr);
}
return FALSE;
}
static void create_ui(CustomData *data)
{
GtkWidget *main_window; // The uppermost window, containing all others
GtkWidget *video_window; // The drawing area where the video will be shown
GtkWidget *controls; // HBox to hold the buttons and slider
GtkWidget *main_box; // VBox to hold video window and controls
GtkWidget *play_button, *pause_button, *stop_button;
main_window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
g_signal_connect(G_OBJECT(main_window), "delete-event",
G_CALLBACK(delete_event_cb), data);
video_window = gtk_drawing_area_new();
g_signal_connect(G_OBJECT(video_window), "realize",
G_CALLBACK(realize_cb), data);
g_signal_connect(G_OBJECT(video_window), "draw",
G_CALLBACK(draw_cb), data);
main_box = gtk_box_new(GTK_ORIENTATION_VERTICAL, 0);
gtk_box_pack_start(GTK_BOX(main_box), video_window, TRUE, TRUE, 0);
gtk_container_add(GTK_CONTAINER(main_window), main_box);
gtk_window_set_default_size(GTK_WINDOW(main_window), 640, 480);
gtk_widget_show_all(main_window);
}
static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
GError *err;
gchar *debug_info;
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n",
GST_OBJECT_NAME(msg->src), err->message);
g_printerr("Debugging information; %s\n",
debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
gtk_main_quit();
};
static void state_changed_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state,
&pending_state);
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->pipeline))
{
data->state = new_state;
g_print("State set to %s:\n", gst_element_state_get_name(new_state));
}
}
int main(int argc, char *argv[])
{
CustomData data = {};
GstBus *bus;
gtk_init(&argc, &argv);
gst_init(&argc, &argv);
data.source = gst_element_factory_make("v4l2src", "source");
data.convert = gst_element_factory_make("videoconvert", "convert");
data.sink = gst_element_factory_make("ximagesink", "sink");
data.pipeline = gst_pipeline_new("pipeline");
gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.convert,
data.sink, NULL);
gst_element_link_many(data.source, data.convert, data.sink, NULL);
g_object_set(data.source, "device", "/dev/video0", NULL);
create_ui(&data);
bus = gst_element_get_bus(data.pipeline);
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)bus_sync_handler,
&data, NULL);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message::error",
(GCallback)error_cb, &data);
g_signal_connect(G_OBJECT(bus), "message::state-changed",
(GCallback)state_changed_cb, &data);
gst_object_unref(bus);
gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
gtk_main();
gst_element_set_state(data.pipeline, GST_STATE_NULL);
gst_object_unref(data.pipeline);
return 0;
}