使用 FFMPEG 从 Android 流式传输图像

Stream image from Android with FFMPEG

我目前正在从外部源接收图像作为字节数组,我想通过 ffmpeg 将其作为原始视频格式发送到流 URL,我有一个接收 RTSP 流的 RTSP 服务器(a similar unanswered question)。但是,我没有在 Java 中使用过 FFMPEG,所以我找不到有关如何操作的示例。我有一个将图像字节复制到字节数组的回调,如下所示:

        public class MainActivity extends Activity {
            final String rtmp_url = "rtmp://192.168.0.12:1935/live/test";
            private int PREVIEW_WIDTH = 384;
            private int PREVIEW_HEIGHT = 292;
            private String TAG = "MainActivity";
            String ffmpeg = Loader.load(org.bytedeco.ffmpeg.ffmpeg.class);
            final String command[] = {ffmpeg,
                            "-y",  //Add "-re" for simulated readtime streaming.
                            "-f", "rawvideo",
                            "-vcodec", "rawvideo",
                            "-pix_fmt", "bgr24",
                            "-s", (Integer.toString(PREVIEW_WIDTH) + "x" + Integer.toString(PREVIEW_HEIGHT)),
                            "-r", "10",
                            "-i", "pipe:",
                            "-c:v", "libx264",
                            "-pix_fmt", "yuv420p",
                            "-preset", "ultrafast",
                            "-f", "flv",
                            rtmp_url};
            
      private UVCCamera mUVCCamera;

public void handleStartPreview(Object surface) throws InterruptedException, IOException {
    Log.e(TAG, "handleStartPreview:mUVCCamera" + mUVCCamera + " mIsPreviewing:");
    if ((mUVCCamera == null)) return;
    Log.e(TAG, "handleStartPreview2 ");
    try {
        mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 26, 0, UVCCamera.DEFAULT_BANDWIDTH, 0);
        Log.e(TAG, "handleStartPreview3 mWidth: " + mWidth + "mHeight:" + mHeight);
    } catch (IllegalArgumentException e) {
        try {
            // fallback to YUV mode
            mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 26, UVCCamera.DEFAULT_PREVIEW_MODE, UVCCamera.DEFAULT_BANDWIDTH, 0);
            Log.e(TAG, "handleStartPreview4");
        } catch (IllegalArgumentException e1) {
            callOnError(e1);
            return;
        }
    }
    Log.e(TAG, "handleStartPreview: startPreview1");
    int result = mUVCCamera.startPreview();
    mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_RGBX);
    mUVCCamera.startCapture();
    Toast.makeText(MainActivity.this,"Camera Started",Toast.LENGTH_SHORT).show();
    ProcessBuilder pb = new ProcessBuilder(command);
    pb.redirectErrorStream(true);
    Process process = pb.start();
    BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
    OutputStream writer = process.getOutputStream();
    byte img[] = new byte[192*108*3];
    for (int i = 0; i < 10; i++)
    {
        for (int y = 0; y < 108; y++)
        {
            for (int x = 0; x < 192; x++)
            {
                byte r = (byte)((x * y + i) % 255);
                byte g = (byte)((x * y + i*10) % 255);
                byte b = (byte)((x * y + i*20) % 255);
                img[(y*192 + x)*3] = b;
                img[(y*192 + x)*3+1] = g;
                img[(y*192 + x)*3+2] = r;
            }
        }

        writer.write(img);
    }

    writer.close();
    String line;
    while ((line = reader.readLine()) != null)
    {
        System.out.println(line);
    }

    process.waitFor();
}
public static void buildRawFrame(Mat img, int i)
{
    int p = img.cols() / 60;
    img.setTo(new Scalar(60, 60, 60));
    String text = Integer.toString(i+1);
    int font = Imgproc.FONT_HERSHEY_SIMPLEX;
    Point pos = new Point(img.cols()/2-p*10*(text.length()), img.rows()/2+p*10);
    Imgproc.putText(img, text, pos, font, p, new Scalar(255, 30, 30), p*2);  //Blue number
}

另外:Android Camera Capture using FFmpeg

使用 ffmpeg 从原生 android 相机逐帧捕捉,而不是通过 RTMP 推送,他们过去常常生成视频文件作为输出。虽然没有告知图像是如何通过ffmpeg传递的。

frameData 是我的字节数组,我想知道如何使用 ProcessBuilder 编写必要的 ffmpeg 命令以使用 ffmpeg 通过 RTSP 为给定的 URL.

发送图像

我正在尝试做的一个例子,在 Python 3 中,我可以通过以下方式轻松做到:

import cv2
import numpy as np
import socket
import sys
import pickle
import struct
import subprocess

fps = 25
width = 224
height = 224
rtmp_url = 'rtmp://192.168.0.13:1935/live/test'
    
    
    
    command = ['ffmpeg',
               '-y',
               '-f', 'rawvideo',
               '-vcodec', 'rawvideo',
               '-pix_fmt', 'bgr24',
               '-s', "{}x{}".format(width, height),
               '-r', str(fps),
               '-i', '-',
               '-c:v', 'libx264',
               '-pix_fmt', 'yuv420p',
               '-preset', 'ultrafast',
               '-f', 'flv',
               rtmp_url]
    
    p = subprocess.Popen(command, stdin=subprocess.PIPE)
    
    while(True):
        frame = np.random.randint([255], size=(224, 224, 3))
        frame = frame.astype(np.uint8)
        p.stdin.write(frame.tobytes())

我想在 Android

做同样的事情

更新:虽然我可以在 Netbeans 上重现@Rotem 的回答,但在 Android 中,我在尝试执行 pb.start() 时收到 NullPointer 异常错误。

    Process: com.infiRay.XthermMini, PID: 32089
    java.lang.NullPointerException
        at java.lang.ProcessBuilder.start(ProcessBuilder.java:1012)
        at com.infiRay.XthermMini.MainActivity.handleStartPreview(MainActivity.java:512)
        at com.infiRay.XthermMini.MainActivity.startPreview(MainActivity.java:563)
        at com.infiRay.XthermMini.MainActivity.access00(MainActivity.java:49)
        at com.infiRay.XthermMini.MainActivity.onConnect(MainActivity.java:316)
        at com.serenegiant.usb.USBMonitor.run(USBMonitor.java:620)
        at android.os.Handler.handleCallback(Handler.java:938)
        at android.os.Handler.dispatchMessage(Handler.java:99)
        at android.os.Looper.loopOnce(Looper.java:226)
        at android.os.Looper.loop(Looper.java:313)
        at android.os.HandlerThread.run(HandlerThread.java:67)
2022-06-02 11:47:20.300 32089-1049/com.infiRay.XthermMini E/libUVCCamera: [1049*UVCPreviewIR.cpp:505:uvc_preview_frame_callback]:receive err data
2022-06-02 11:47:20.304 32089-1049/com.infiRay.XthermMini E/libUVCCamera: [1049*UVCPreviewIR.cpp:505:uvc_preview_frame_callback]:receive err data
2022-06-02 11:47:20.304 32089-1049/com.infiRay.XthermMini E/libUVCCamera: [1049*UVCPreviewIR.cpp:505:uvc_preview_frame_callback]:receive err data
2022-06-02 11:47:20.308 32089-1049/com.infiRay.XthermMini E/libUVCCamera: [1049*UVCPreviewIR.cpp:505:uvc_preview_frame_callback]:receive err data
2022-06-02 11:47:20.312 32089-32089/com.infiRay.XthermMini E/MainActivity: onPause:
2022-06-02 11:47:20.314 32089-32581/com.infiRay.XthermMini I/Process: Sending signal. PID: 32089 SIG: 9

这是一个类似于 Python 代码的 JAVA 实现:

该示例将原始视频帧(字节数组)写入 FFmpeg 的标准输入管道 sub-process:

 _____________             ___________                  ________ 
| JAVA byte   |           |           |                |        |
| Array       |   stdin   | FFmpeg    |                | Output |
| BGR (format)| --------> | process   | -------------> | stream |
|_____________| raw frame |___________| encoded video  |________|

主要阶段:

  • 初始化FFmpeg命令参数:

     final String command[] = {"ffmpeg", "-f", "rawvideo", ...}
    
  • 创建 ProcessBuilder 执行 FFmpeg 作为 sub-process:

     ProcessBuilder pb = new ProcessBuilder(command);
    
  • 重定向 stderr(读取 FFmpeg 消息所需),没有它,sub-process 停止:

     pb.redirectErrorStream(true);
    
  • 启动 FFmpeg sub-process,并创建 BufferedReader:

     Process process = pb.start();
     BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
    
  • 创建 OutputStream 用于写入 FFmpeg 的标准输入管道 sub-process:

     OutputStream writer = process.getOutputStream();
    
  • 循环将原始视频帧写入 FFmpeg sub-process 的标准输入管道:

     byte img[] = new byte[width*height*3];
    
     for (int i = 0; i < n_frmaes; i++)
     {
         //Fill img with pixel data
         ...
         writer.write(img);
     }
    
  • 关闭stdin,读取并打印stderr内容,等待sub-process完成:

     writer.close();
    
     String line;
     while ((line = reader.readLine()) != null)
     {
         System.out.println(line);
     }        
    
     process.waitFor();
    

代码示例:
以下代码示例将 10 个大小为 192x108 的原始视频帧写入 FFmpeg。
我们不是将流式传输到 RTMP,而是将结果写入 test.flv 文件(用于测试)。
该示例使用硬编码字符串和数字(​​为简单起见)。

注:
代码示例假设 FFmpeg 可执行文件在执行路径中。

package myproject;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;

public class FfmpegVideoWriter {
    public static void main(String[] args) throws IOException, InterruptedException {
        final String rtmp_url = "test.flv"; //Set output file (instead of output URL) for testing.
        
        final String command[] = {"ffmpeg",
                                  "-y",  //Add "-re" for simulated readtime streaming.
                                  "-f", "rawvideo",
                                  "-vcodec", "rawvideo",
                                  "-pix_fmt", "bgr24",
                                  "-s", "192x108",
                                  "-r", "10",
                                  "-i", "pipe:",
                                  "-c:v", "libx264",
                                  "-pix_fmt", "yuv420p",
                                  "-preset", "ultrafast",
                                  "-f", "flv",
                                  rtmp_url};
        
        //
        ProcessBuilder pb = new ProcessBuilder(command);    //Create ProcessBuilder
        pb.redirectErrorStream(true); //Redirect stderr
        Process process = pb.start();               
        BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
        
        //Create OutputStream for writing to stdin pipe of FFmpeg sub-process.
        OutputStream writer = process.getOutputStream();
        
        byte img[] = new byte[192*108*3];   //Dummy image 
        
        //Write 10 video frames to stdin pipe of FFmpeg sub-process
        for (int i = 0; i < 10; i++)
        {
            //Fill image with some arbitrary pixel values
            for (int y = 0; y < 108; y++)
            {
                for (int x = 0; x < 192; x++)
                {
                    //Arbitrary RGB values:
                    byte r = (byte)((x * y + i) % 255); //Red component
                    byte g = (byte)((x * y + i*10) % 255); //Green component
                    byte b = (byte)((x * y + i*20) % 255); //Blue component
                    img[(y*192 + x)*3] = b; 
                    img[(y*192 + x)*3+1] = g;
                    img[(y*192 + x)*3+2] = r;
                }
            }
            
            writer.write(img);  //Write img to FFmpeg
        }
        
        writer.close();  //Close stdin pipe.

        //Read and print stderr content
        //Note: there may be cases when FFmpeg keeps printing messages, so it may not be the best solution to empty the buffer only at the end.
        //We may consider adding an argument `-loglevel error` for reducing verbosity.
        String line;
        while ((line = reader.readLine()) != null)
        {
            System.out.println(line);
        }        
       
        process.waitFor();
    }
}

该代码已在我的 PC 上进行了测试(使用 Windows 10),但我不确定它是否适用于 Android...

上面的示例简单而通用,在您的情况下,您可以使用 rgba 像素格式并在 onFrame 方法中写入 FrameData

示例视频帧(“任意像素值”):


更新:

以下代码示例使用 JavaCV - 将 Mat 数据写入 FFmpeg:

package myproject;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;

import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.core.Point;
import org.opencv.imgproc.Imgproc;

public class FfmpegVideoWriter {
    static { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); }
    
    //Build synthetic "raw BGR" image for testing
    public static void buildRawFrame(Mat img, int i)
    {
        int p = img.cols() / 60;    //Used as font size factor.
        img.setTo(new Scalar(60, 60, 60));  //Fill image with dark gray color
        String text = Integer.toString(i+1);
        int font = Imgproc.FONT_HERSHEY_SIMPLEX;
        Point pos = new Point(img.cols()/2-p*10*(text.length()), img.rows()/2+p*10);
        Imgproc.putText(img, text, pos, font, p, new Scalar(255, 30, 30), p*2);  //Blue number
    }
    
    public static void main(String[] args) throws IOException, InterruptedException {
        final int cols = 192;
        final int rows = 108;
        
        final String rtmp_url = "test.flv"; //Set output file (instead of output URL) for testing.
        
        final String command[] = {"ffmpeg",
                                  "-y",  //Add "-re" for simulated readtime streaming.
                                  "-f", "rawvideo",
                                  "-vcodec", "rawvideo",
                                  "-pix_fmt", "bgr24",
                                  "-s", (Integer.toString(cols) + "x" + Integer.toString(rows)),
                                  "-r", "10",
                                  "-i", "pipe:",
                                  "-c:v", "libx264",
                                  "-pix_fmt", "yuv420p",
                                  "-preset", "ultrafast",
                                  "-f", "flv",
                                  rtmp_url};
        
        //
        ProcessBuilder pb = new ProcessBuilder(command);    //Create ProcessBuilder
        pb.redirectErrorStream(true); //Redirect stderr
        Process process = pb.start();               
        BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
        
        //Create OutputStream for writing to stdin pipe of FFmpeg sub-process.
        OutputStream writer = process.getOutputStream();
        
        //Dummy image (BGR pixel format).
        Mat img = new Mat(rows, cols, CvType.CV_8UC3, Scalar.all(0));
        
        byte buffer[] = new byte[cols*rows*3]; //Byte array for storing img data    
        
        //Write 10 video frames to stdin pipe of FFmpeg sub-process
        for (int i = 0; i < 10; i++)
        {
            buildRawFrame(img, i); //Build image with blue frame counter.
                       
            img.get(0, 0, buffer); //Copy img data to buffer (not sure if this is the best solution).  
            
            writer.write(buffer); //Write buffer (raw video frame as byte array) to FFmpeg
        }
        
        writer.close(); //Close stdin pipe.

        //Read and print stderr content
        String line;
        while ((line = reader.readLine()) != null)
        {
            System.out.println(line);
        }        
       
        process.waitFor();
    }
}

示例输出帧: