如何通过将 gstreamer 与 python 子进程模块或 gst-launch-1.0 命令一起使用来接收字节流?

How to receive byte-stream by using gstreamer with python subprocess module or gst-launch-1.0 command?

我想通过使用带有 python 子进程模块的 gstreamer 来接收字节流。 现在我可以成功地使用 ffmpeg 来拉字节流了。如下图

import cv2
import subprocess as sp


height = 714
width = 420
rtsp_url = 'rtsp://127.0.0.1:8554/video'

# command
command = ['ffmpeg',
            '-i', rtsp_url,
            '-f', 'rawvideo',
            '-s',str(width)+'*'+str(height),
            '-pix_fmt', 'bgr24',
            '-fflags', 'nobuffer',
            '-']

p = sp.Popen(command, stdout=sp.PIPE, bufsize=10**8)

while True:
    raw_image = p.stdout.read(width*height*3)
    image =  np.fromstring(raw_image, dtype='uint8')
    image = image.reshape((height,width,3)).copy()
    cv2.imshow('image', image)
    key = cv2.waitKey(20)

我想使用 gstreamer 命令而不是 ffmpeg。至此,我已经实现了使用gstreamer命令行将字节流写入文件

gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/video latency=0 drop-on-latency=true ! rtph264depay ! video/x-h264, stream-format='byte-stream' ! filesink location=/home/name/stdout

但它不能输出字节流到管道,所以终端不显示字节流,不像ffmpeg命令。如何更改此命令以通过管道输出字节流,以便我可以从管道读取。 感谢您抽出时间为我解答!

这是 RTSP 流代码。

import cv2
import time
import subprocess as sp
import numpy as np


rtsp_url = 'rtsp://127.0.0.1:8554/video'
video_path = r'test.mp4'
cap = cv2.VideoCapture(video_path)

# Get video information
fps = int(cap.get(cv2.CAP_PROP_FPS))
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
print('fps={}'.format(fps))

# command
command = ['ffmpeg',
            '-re',
            '-y',
            '-stream_loop', '-1',
            '-f', 'rawvideo',
            '-vcodec', 'rawvideo',
            '-pix_fmt', 'bgr24',
            '-s', "{}x{}".format(width, height),
            '-r', str(fps),
            '-i', '-',
            '-c:v', 'libx264',
            '-pix_fmt', 'yuv420p',
            '-preset', 'ultrafast',
            # '-flags2', 'local_header',
            '-bsf:v', "'dump_extra=freq=k'", 
            '-keyint_min', '60',
            '-g', '60',
            '-sc_threshold', '0', 
            '-f', 'rtsp',
            '-rtsp_transport', 'tcp',
            '-muxdelay', '0.1', 
            rtsp_url]

p = sp.Popen(command, stdin=sp.PIPE)

cnt = 0
t_start = time.time()
while (cap.isOpened()):
    t_cur = time.time()-t_start

    ret, frame = cap.read()
    if not ret:
        cnt += 1
        print("count: {}".format(cnt))
        cap = cv2.VideoCapture(video_path)
        continue

    p.stdin.write(frame.tobytes())

    cv2.imshow('real_time', frame)

    key = cv2.waitKey(20)
    if key == 27:
        p.terminate()
        break

我已经成功创建了一个适用于 Linux 的示例。

我无法模拟 RTSP 摄像头,所以我使用 MP4 文件作为输入。

在 Python(用于测试)中使用 FFmpeg CLI 创建 MP4 输入文件:

sp.run(shlex.split(f'ffmpeg -y -f lavfi -i testsrc=size={width}x{height}:rate=25:duration=100 -vcodec libx264 -pix_fmt yuv420p {input_file_name}'))

GStreamer 命令是:

p = sp.Popen(shlex.split(f'{gstreamer_exe} --quiet filesrc location={input_file_name} ! qtdemux ! video/x-h264 ! avdec_h264 ! videoconvert ! capsfilter caps="video/x-raw, format=BGR" ! filesink location={stdout_file_name}'), stdout=sp.PIPE)

    使用
  • --quiet 是因为 GStreamer 将消息打印到标准输出。
  • filesrc location... 用于读取 MP4 输入 - 用 RTSP 管道替换它。
  • videoconvert ! capsfilter caps="video/x-raw, format=BGR" 将视频格式转换为原始 BGR。
  • filesink location=/dev/stdout 将输出重定向到标准输出(在 Linux 中)。

代码示例:

import cv2
import numpy as np
import subprocess as sp
import shlex
from sys import platform

width = 714
height = 420

input_file_name = 'input.mp4'  # For testing, use MP4 input file instead of RTSP input.

# Build MP4 synthetic input video file for testing:
sp.run(shlex.split(f'ffmpeg -y -f lavfi -i testsrc=size={width}x{height}:rate=25:duration=100 -vcodec libx264 -pix_fmt yuv420p {input_file_name}'))

if platform == "win32":
    # stdout_file_name = "con:"
    # gstreamer_exe = 'c:/gstreamer/1.0/msvc_x86_64/bin/gst-launch-1.0.exe'
    raise Exception('win32 system is not supported')
else:
    stdout_file_name = "/dev/stdout"
    gstreamer_exe = 'gst-launch-1.0'

# 
p = sp.Popen(shlex.split(f'{gstreamer_exe} --quiet filesrc location={input_file_name} ! qtdemux ! video/x-h264 ! avdec_h264 ! videoconvert ! capsfilter caps="video/x-raw, format=BGR" ! filesink location={stdout_file_name}'), stdout=sp.PIPE)

while True:
    raw_image = p.stdout.read(width * height * 3)

    if len(raw_image) < width*height*3:
        break

    image = np.frombuffer(raw_image, dtype='uint8').reshape((height, width, 3))
    cv2.imshow('image', image)
    key = cv2.waitKey(1)

p.stdout.close()
p.wait()
cv2.destroyAllWindows()

更新:

基于您的,我设法创建了 RTSP 捕获示例:

import cv2
import numpy as np
import subprocess as sp
import shlex

width = 240
height = 160

rtsp_url = 'rtsp://wowzaec2demo.streamlock.net/vod/mp4:BigBuckBunny_115k.mp4'  # For testing, use public RTSP input.

gstreamer_exe = 'gst-launch-1.0'  # '/usr/bin/gst-launch-1.0'

# 
p = sp.Popen(shlex.split(f'{gstreamer_exe} --quiet rtspsrc location={rtsp_url} ! queue2 ! rtph264depay ! avdec_h264 ! videoconvert ! capsfilter caps="video/x-raw, format=BGR" ! fdsink'), stdout=sp.PIPE)

while True:
    raw_image = p.stdout.read(width * height * 3)

    if len(raw_image) < width*height*3:
        break

    image = np.frombuffer(raw_image, np.uint8).reshape((height, width, 3))
    cv2.imshow('image', image)
    key = cv2.waitKey(1)

p.stdout.close()
p.wait()
cv2.destroyAllWindows()