在 Qt 和 Raspberry Pi 中使用 qtgstreamer
Using qtgstreamer with Qt and Raspberry Pi
我在将视频流从 raspberry pi 解码到带有 Qt GUI 的笔记本电脑时遇到问题。
我的 pi 管道是(使用 adafruit raspberry pi 相机):
raspivid -t 999999 -h 480 -w 640 -fps 25 -hf -b 2000000 -o - | gst-launch-1.0 -v fdsrc ! h264parse ! rtph264pay config-interval=1 pt=96 ! gdppay ! tcpserversink host=10.0.0.128 port=5000
只需在带管道的笔记本电脑上使用查看器:
gst-launch-1.0 -v tcpclientsrc host=10.0.0.128 port=5000 ! gdpdepay ! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink sync=false
虽然我没有测量帧速率,但以相当不错的速率提供了非常漂亮的彩色视频。
当我在我的 GUI 应用程序中使用 qtgstreamer 时(源宽度 = 640,高度 = 480,我假设一个 8 位 RGB 图像)我在下面的代码中得到缓冲区大小 460800,我期望它是 921600。如果我使用 QImage::Format_RGB888 程序将崩溃,因为图像缓冲区太小。如果我使用 QImage::Format_Index8 它将 运行 正常,在我的 GUI 中显示视频,除了黑白之外都是。谁有想法?这是我的相关代码:
bool CameraStreamer::initStreamer()
{
gst_init (NULL, NULL);
//gst-launch-1.0 -v tcpclientsrc host=10.0.0.128 port=5000 ! gdpdepay ! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink sync=false
pipeline = gst_pipeline_new("Camera");
source = gst_element_factory_make ("tcpclientsrc", "cam-source");
depay = gst_element_factory_make("gdpdepay", "depay");
rtpdepay = gst_element_factory_make("rtph264depay","rtp-depay");
decoder = gst_element_factory_make ("avdec_h264", "videodecoder");
videoconvert = gst_element_factory_make("videoconvert","video-convert");
sink = gst_element_factory_make ("appsink", "video-output");
if (!pipeline || !source || !depay || !rtpdepay || !decoder || !videoconvert || !sink ) {
qDebug() << "One element could not be created. Exiting.\n";
return false;
}
callbacks.eos = NULL;
callbacks.new_sample = newBufferCallback;
callbacks.new_preroll = NULL;
gst_app_sink_set_callbacks((GstAppSink *) sink, &callbacks, this, NULL);
g_object_set (G_OBJECT(source), "port", 5001, NULL);
g_object_set (G_OBJECT(source),"host","10.0.0.128",NULL);
gst_bin_add_many (GST_BIN (pipeline),
source, depay,rtpdepay,decoder, videoconvert,sink, NULL);
if (!gst_element_link_many (source, depay,rtpdepay,decoder, videoconvert,sink, NULL))
g_warning ("Main pipeline link Fail...");
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
g_printerr ("Unable to set the pipeline to the playing state.");
gst_object_unref (pipeline);
return false;
}
return true;
}
GstFlowReturn CameraStreamer::newBufferCallback(GstAppSink *app_sink, void *obj)
{
if(app_sink == NULL)
{
qDebug() << "app_sink is NULL";
return GST_FLOW_ERROR;
}
GstSample* sample = gst_app_sink_pull_sample(app_sink);
if(!sample)
{
qDebug() << "Error retreiving buffer...";
return GST_FLOW_ERROR;
}
GstCaps* caps = gst_sample_get_caps (sample);
if (!caps) {
qDebug() << "could not get snapshot format\n";
exit (-1);
}
gint width, height;
GstStructure* s = gst_caps_get_structure (caps, 0);
int res = gst_structure_get_int (s, "width", &width)
| gst_structure_get_int (s, "height", &height);
if (!res) {
qDebug() << "could not get snapshot dimension\n";
exit (-1);
}
GstMapInfo map;
GstBuffer *buffer = gst_sample_get_buffer (sample);
qDebug() << "size: " << gst_buffer_get_size(buffer);
gst_buffer_map (buffer, &map, GST_MAP_READ);
QImage img(map.data,width,height, QImage::Format_RGB888);
img = img.copy();
((CameraStreamer*)obj)->emitNewImage(img);
gst_buffer_unmap (buffer, &map);
gst_sample_unref (sample);
return GST_FLOW_OK;
}
如果是 I420,则布局为:
460800 = 640 * 480 + 320 * 240 + 320 * 240
亮度平面 Y 为 640 * 480,色度平面 U 和 V 均为 320 * 240。因此 UV 平面的分辨率较小,在循环这些数组时将其考虑在内。
来自维基百科的颜色转换公式:
R = Y + 1.140 * V
G = Y - 0.395 * U - 0.581 * V
B = Y + 2.032 * U
因此,经过大量时间和谷歌搜索后,我找到了答案。我最终使用 opencv 进行了实际的颜色转换。这是我的方法(从上面继续):
GstBuffer *buffer = gst_sample_get_buffer (sample);
gst_buffer_map (buffer, &map, GST_MAP_READ);
cv::Mat temp_mat = cv::Mat(cv::Size(width, height+height/2), CV_8UC1, (char*)map.data);
cv::Mat result(height,width,3);
cv::cvtColor(temp_mat,result,CV_YUV2RGB_I420,3);
QImage rgb(result.size().width,result.size().height,QImage::Format_RGB888);
memcpy(rgb.scanLine(0), (unsigned char*)result.data, rgb.width() * rgb.height() * result.channels());
((CameraStreamer*)obj)->emitNewImage(rgb);
gst_buffer_unmap (buffer, &map);
gst_sample_unref (sample);
我会 post 关于我的应用程序 git 存储库的更多信息,但我认为这可能会对其他人有所帮助。
这是 link:camera streamer example
我在将视频流从 raspberry pi 解码到带有 Qt GUI 的笔记本电脑时遇到问题。
我的 pi 管道是(使用 adafruit raspberry pi 相机):
raspivid -t 999999 -h 480 -w 640 -fps 25 -hf -b 2000000 -o - | gst-launch-1.0 -v fdsrc ! h264parse ! rtph264pay config-interval=1 pt=96 ! gdppay ! tcpserversink host=10.0.0.128 port=5000
只需在带管道的笔记本电脑上使用查看器:
gst-launch-1.0 -v tcpclientsrc host=10.0.0.128 port=5000 ! gdpdepay ! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink sync=false
虽然我没有测量帧速率,但以相当不错的速率提供了非常漂亮的彩色视频。
当我在我的 GUI 应用程序中使用 qtgstreamer 时(源宽度 = 640,高度 = 480,我假设一个 8 位 RGB 图像)我在下面的代码中得到缓冲区大小 460800,我期望它是 921600。如果我使用 QImage::Format_RGB888 程序将崩溃,因为图像缓冲区太小。如果我使用 QImage::Format_Index8 它将 运行 正常,在我的 GUI 中显示视频,除了黑白之外都是。谁有想法?这是我的相关代码:
bool CameraStreamer::initStreamer()
{
gst_init (NULL, NULL);
//gst-launch-1.0 -v tcpclientsrc host=10.0.0.128 port=5000 ! gdpdepay ! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink sync=false
pipeline = gst_pipeline_new("Camera");
source = gst_element_factory_make ("tcpclientsrc", "cam-source");
depay = gst_element_factory_make("gdpdepay", "depay");
rtpdepay = gst_element_factory_make("rtph264depay","rtp-depay");
decoder = gst_element_factory_make ("avdec_h264", "videodecoder");
videoconvert = gst_element_factory_make("videoconvert","video-convert");
sink = gst_element_factory_make ("appsink", "video-output");
if (!pipeline || !source || !depay || !rtpdepay || !decoder || !videoconvert || !sink ) {
qDebug() << "One element could not be created. Exiting.\n";
return false;
}
callbacks.eos = NULL;
callbacks.new_sample = newBufferCallback;
callbacks.new_preroll = NULL;
gst_app_sink_set_callbacks((GstAppSink *) sink, &callbacks, this, NULL);
g_object_set (G_OBJECT(source), "port", 5001, NULL);
g_object_set (G_OBJECT(source),"host","10.0.0.128",NULL);
gst_bin_add_many (GST_BIN (pipeline),
source, depay,rtpdepay,decoder, videoconvert,sink, NULL);
if (!gst_element_link_many (source, depay,rtpdepay,decoder, videoconvert,sink, NULL))
g_warning ("Main pipeline link Fail...");
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
g_printerr ("Unable to set the pipeline to the playing state.");
gst_object_unref (pipeline);
return false;
}
return true;
}
GstFlowReturn CameraStreamer::newBufferCallback(GstAppSink *app_sink, void *obj)
{
if(app_sink == NULL)
{
qDebug() << "app_sink is NULL";
return GST_FLOW_ERROR;
}
GstSample* sample = gst_app_sink_pull_sample(app_sink);
if(!sample)
{
qDebug() << "Error retreiving buffer...";
return GST_FLOW_ERROR;
}
GstCaps* caps = gst_sample_get_caps (sample);
if (!caps) {
qDebug() << "could not get snapshot format\n";
exit (-1);
}
gint width, height;
GstStructure* s = gst_caps_get_structure (caps, 0);
int res = gst_structure_get_int (s, "width", &width)
| gst_structure_get_int (s, "height", &height);
if (!res) {
qDebug() << "could not get snapshot dimension\n";
exit (-1);
}
GstMapInfo map;
GstBuffer *buffer = gst_sample_get_buffer (sample);
qDebug() << "size: " << gst_buffer_get_size(buffer);
gst_buffer_map (buffer, &map, GST_MAP_READ);
QImage img(map.data,width,height, QImage::Format_RGB888);
img = img.copy();
((CameraStreamer*)obj)->emitNewImage(img);
gst_buffer_unmap (buffer, &map);
gst_sample_unref (sample);
return GST_FLOW_OK;
}
如果是 I420,则布局为:
460800 = 640 * 480 + 320 * 240 + 320 * 240
亮度平面 Y 为 640 * 480,色度平面 U 和 V 均为 320 * 240。因此 UV 平面的分辨率较小,在循环这些数组时将其考虑在内。
来自维基百科的颜色转换公式:
R = Y + 1.140 * V
G = Y - 0.395 * U - 0.581 * V
B = Y + 2.032 * U
因此,经过大量时间和谷歌搜索后,我找到了答案。我最终使用 opencv 进行了实际的颜色转换。这是我的方法(从上面继续):
GstBuffer *buffer = gst_sample_get_buffer (sample);
gst_buffer_map (buffer, &map, GST_MAP_READ);
cv::Mat temp_mat = cv::Mat(cv::Size(width, height+height/2), CV_8UC1, (char*)map.data);
cv::Mat result(height,width,3);
cv::cvtColor(temp_mat,result,CV_YUV2RGB_I420,3);
QImage rgb(result.size().width,result.size().height,QImage::Format_RGB888);
memcpy(rgb.scanLine(0), (unsigned char*)result.data, rgb.width() * rgb.height() * result.channels());
((CameraStreamer*)obj)->emitNewImage(rgb);
gst_buffer_unmap (buffer, &map);
gst_sample_unref (sample);
我会 post 关于我的应用程序 git 存储库的更多信息,但我认为这可能会对其他人有所帮助。
这是 link:camera streamer example