如何使用 Gstreamermm 传递帧
How to pass frames with Gstreamermm
我需要将一些 H.264 帧解码为原始格式 (YUV420)。
我通过某些自定义协议收到包含帧的数据包。
如何将接收到的 H.264 帧传递给 GStreamermm API 进行解码?
目前我阅读了它们 tutorials(不幸的是这是 GST - API 的 C 版本),但找不到实际的 GStreamermm API 文档。
请在任何文档或示例中指出我如何做。
我能够通过管道实现传输数据并使用 C++ 版本检索解码原始视频帧。这是原始示例:
struct WebPipeline {
Glib::RefPtr<Gst::AppSrc> appsrc;
Glib::RefPtr<Gst::AppSink> appdst;
Glib::RefPtr<Gst::Element> h264parser;
Glib::RefPtr<Gst::Element> avdec_h264;
Glib::RefPtr<Gst::Element> jpegenc;
Glib::RefPtr<Gst::Pipeline> pipe;
bool accepts_data {false};
};
WebPipePtr ExampleClass::CreatePipeline() {
auto web_pipe = std::make_shared<WebPipeline>();
web_pipe->appsrc = Gst::AppSrc::create("web_appsrc");
if (!web_pipe->appsrc) {
throw std::runtime_error("Can't create AppSrc");
}
web_pipe->appdst = Gst::AppSink::create("web_appdst");
if (!web_pipe->appdst) {
throw std::runtime_error("Can't create AppSink");
}
web_pipe->h264parser = Gst::ElementFactory::create_element("h264parse", "h264_parser");
if (!web_pipe->h264parser) {
throw std::runtime_error("Can't create h264parse");
}
web_pipe->avdec_h264 = Gst::ElementFactory::create_element("avdec_h264", "avdec264");
if (!web_pipe->avdec_h264) {
throw std::runtime_error("Can't create avdec_h264");
}
web_pipe->jpegenc = Gst::ElementFactory::create_element("jpegenc");
if (!web_pipe->jpegenc) {
throw std::runtime_error("Can't create jpegenc");
}
web_pipe->pipe = Gst::Pipeline::create("websocket_pipe");
if (!web_pipe->pipe) {
throw std::runtime_error("Can't create pipeline");
}
web_pipe->appdst->property_emit_signals() = true;
web_pipe->appdst->set_sync(false);
web_pipe->appdst->signal_new_sample().connect(sigc::bind(sigc::mem_fun(this, &ExampleClass::PullFromPipe), web_pipe->appdst));
web_pipe->appsrc->property_emit_signals() = true;
web_pipe->appsrc->signal_need_data().connect(sigc::bind(sigc::mem_fun(this, &ExampleClass::EnableAcceptance), web_pipe));
web_pipe->appsrc->signal_enough_data().connect(sigc::bind(sigc::mem_fun(this, &ExampleClass::DisableAcceptance), web_pipe));
web_pipe->pipe->add(web_pipe->appsrc)->add(web_pipe->h264parser)->add(web_pipe->avdec_h264)->add(web_pipe->jpegenc)->add(web_pipe->appdst);
web_pipe->appsrc->link(web_pipe->h264parser)->link(web_pipe->avdec_h264)->link(web_pipe->jpegenc)->link(web_pipe->appdst);
web_pipe->pipe->set_state(Gst::STATE_PLAYING);
return web_pipe;
}
void ExampleClass::EnableAcceptance(guint, WebPipePtr pipe) {
if (!pipe->accepts_data) {
BOOST_LOG_SEV(GetLogger(), log::info) << "Begin push frames";
pipe->accepts_data = true;
}
}
void ExampleClass::DisableAcceptance(WebPipePtr pipe) {
if (pipe->accepts_data) {
BOOST_LOG_SEV(GetLogger(), log::info) << "Begin drop frames";
pipe->accepts_data = false;
}
}
void ExampleClass::PushToPipe(WebPipePtr pipe, std::vector<uint8_t>&& frames) {
if (!pipe->accepts_data) {
return Gst::FLOW_CUSTOM_ERROR;
}
GstBuffer* buffer = gst_buffer_new_wrapped_full(static_cast<GstMemoryFlags>(GST_MEMORY_FLAG_READONLY | GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS),
const_cast<uint8_t*>(frame.data()),
frame.size(),
0,
frame.size(),
reinterpret_cast<gpointer>(frame_ref), // inner implementation of some sort of wrapper
destroy); // lamda-destructor
buffer->set_pts(time);
return pipe->appsrc->push_buffer(buffer);
}
Gst::FlowReturn ExampleClass::PullFromPipe(const Glib::RefPtr<Gst::AppSink>& appsink) {
auto sample = appsink->pull_sample();
if (!sample) {
return Gst::FLOW_ERROR;
}
if (appsink->property_eos()) {
return Gst::FLOW_EOS;
}
Gst::ClockTime timestamp = 0;
{
auto buffer = sample->get_buffer();
if (!buffer) {
throw std::runtime_error("Can't get buffer from sample");
}
timestamp = buffer->get_pts();
}
// process sample...
return Gst::FLOW_OK;
}
我需要将一些 H.264 帧解码为原始格式 (YUV420)。 我通过某些自定义协议收到包含帧的数据包。
如何将接收到的 H.264 帧传递给 GStreamermm API 进行解码?
目前我阅读了它们 tutorials(不幸的是这是 GST - API 的 C 版本),但找不到实际的 GStreamermm API 文档。 请在任何文档或示例中指出我如何做。
我能够通过管道实现传输数据并使用 C++ 版本检索解码原始视频帧。这是原始示例:
struct WebPipeline {
Glib::RefPtr<Gst::AppSrc> appsrc;
Glib::RefPtr<Gst::AppSink> appdst;
Glib::RefPtr<Gst::Element> h264parser;
Glib::RefPtr<Gst::Element> avdec_h264;
Glib::RefPtr<Gst::Element> jpegenc;
Glib::RefPtr<Gst::Pipeline> pipe;
bool accepts_data {false};
};
WebPipePtr ExampleClass::CreatePipeline() {
auto web_pipe = std::make_shared<WebPipeline>();
web_pipe->appsrc = Gst::AppSrc::create("web_appsrc");
if (!web_pipe->appsrc) {
throw std::runtime_error("Can't create AppSrc");
}
web_pipe->appdst = Gst::AppSink::create("web_appdst");
if (!web_pipe->appdst) {
throw std::runtime_error("Can't create AppSink");
}
web_pipe->h264parser = Gst::ElementFactory::create_element("h264parse", "h264_parser");
if (!web_pipe->h264parser) {
throw std::runtime_error("Can't create h264parse");
}
web_pipe->avdec_h264 = Gst::ElementFactory::create_element("avdec_h264", "avdec264");
if (!web_pipe->avdec_h264) {
throw std::runtime_error("Can't create avdec_h264");
}
web_pipe->jpegenc = Gst::ElementFactory::create_element("jpegenc");
if (!web_pipe->jpegenc) {
throw std::runtime_error("Can't create jpegenc");
}
web_pipe->pipe = Gst::Pipeline::create("websocket_pipe");
if (!web_pipe->pipe) {
throw std::runtime_error("Can't create pipeline");
}
web_pipe->appdst->property_emit_signals() = true;
web_pipe->appdst->set_sync(false);
web_pipe->appdst->signal_new_sample().connect(sigc::bind(sigc::mem_fun(this, &ExampleClass::PullFromPipe), web_pipe->appdst));
web_pipe->appsrc->property_emit_signals() = true;
web_pipe->appsrc->signal_need_data().connect(sigc::bind(sigc::mem_fun(this, &ExampleClass::EnableAcceptance), web_pipe));
web_pipe->appsrc->signal_enough_data().connect(sigc::bind(sigc::mem_fun(this, &ExampleClass::DisableAcceptance), web_pipe));
web_pipe->pipe->add(web_pipe->appsrc)->add(web_pipe->h264parser)->add(web_pipe->avdec_h264)->add(web_pipe->jpegenc)->add(web_pipe->appdst);
web_pipe->appsrc->link(web_pipe->h264parser)->link(web_pipe->avdec_h264)->link(web_pipe->jpegenc)->link(web_pipe->appdst);
web_pipe->pipe->set_state(Gst::STATE_PLAYING);
return web_pipe;
}
void ExampleClass::EnableAcceptance(guint, WebPipePtr pipe) {
if (!pipe->accepts_data) {
BOOST_LOG_SEV(GetLogger(), log::info) << "Begin push frames";
pipe->accepts_data = true;
}
}
void ExampleClass::DisableAcceptance(WebPipePtr pipe) {
if (pipe->accepts_data) {
BOOST_LOG_SEV(GetLogger(), log::info) << "Begin drop frames";
pipe->accepts_data = false;
}
}
void ExampleClass::PushToPipe(WebPipePtr pipe, std::vector<uint8_t>&& frames) {
if (!pipe->accepts_data) {
return Gst::FLOW_CUSTOM_ERROR;
}
GstBuffer* buffer = gst_buffer_new_wrapped_full(static_cast<GstMemoryFlags>(GST_MEMORY_FLAG_READONLY | GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS),
const_cast<uint8_t*>(frame.data()),
frame.size(),
0,
frame.size(),
reinterpret_cast<gpointer>(frame_ref), // inner implementation of some sort of wrapper
destroy); // lamda-destructor
buffer->set_pts(time);
return pipe->appsrc->push_buffer(buffer);
}
Gst::FlowReturn ExampleClass::PullFromPipe(const Glib::RefPtr<Gst::AppSink>& appsink) {
auto sample = appsink->pull_sample();
if (!sample) {
return Gst::FLOW_ERROR;
}
if (appsink->property_eos()) {
return Gst::FLOW_EOS;
}
Gst::ClockTime timestamp = 0;
{
auto buffer = sample->get_buffer();
if (!buffer) {
throw std::runtime_error("Can't get buffer from sample");
}
timestamp = buffer->get_pts();
}
// process sample...
return Gst::FLOW_OK;
}