MediaCodec.dequeueOutputBuffer 在 Android 上编码 h264 需要很长时间
MediaCodec.dequeueOutputBuffer taking very long when encoding h264 on Android
我正在尝试使用 MediaCodec 对 Android 上的 h264 视频进行编码,以便使用 MediaCodec 进行实时视频流传输,但 dequeueOutputBuffer 一直花费很长时间(实际上它有时非常快但有时非常慢,请参阅下面的日志输出).我已经看到输出缓冲区准备就绪甚至需要 200 毫秒。我的代码有什么问题吗?或者你认为这是 OMX.Nvidia.h264.encoder?
的问题
也许我需要将图像从 1280x720 缩小到更小的尺寸?或者,也许我在等待输出缓冲区时需要出列和排队更多输入缓冲区? (有 6 个输入和 6 个输出缓冲器可用)。我用的是AndroidAPI19,所以不能用异步的MediaCodec处理方式。我实际上是从 Google Project Tango 平板电脑流式传输图像,所以我的另一个怀疑是 Tango 的后台操作可能花费的时间太长,导致编码器变慢。有什么想法可能会减慢速度吗?
01-20 23:36:30.728 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.400666ms.
01-20 23:36:30.855 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 94.290667ms.
01-20 23:36:30.880 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.57ms.
01-20 23:36:30.929 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 4.878417ms.
01-20 23:36:31.042 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 77.495417ms.
01-20 23:36:31.064 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.3225ms.
01-20 23:36:31.182 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 74.777583ms.
01-20 23:36:31.195 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.23ms.
01-20 23:36:31.246 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 17.243583ms.
01-20 23:36:31.350 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 80.14725ms.
01-20 23:36:31.373 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 2.493834ms.
01-20 23:36:31.421 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 13.273ms.
01-20 23:36:31.546 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 93.543667ms.
01-20 23:36:31.576 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 5.309334ms.
01-20 23:36:31.619 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 13.402583ms.
01-20 23:36:31.686 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 22.5485ms.
01-20 23:36:31.809 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 91.392083ms.
我的相关代码如下:
public class StreamingThread extends Thread {
...
// encoding
private MediaCodec mVideoEncoder = null;
private ByteBuffer[] mEncoderInputBuffers = null;
private ByteBuffer[] mEncoderOutputBuffers = null;
private NV21Convertor mNV21Converter = null;
public static native VideoFrame getNewFrame();
public StreamingThread()
{
this.setPriority(MAX_PRIORITY);
}
@Override
public void run()
{
Looper.prepare();
init();
Looper.loop();
}
private void init()
{
mHandler = new Handler() {
public void handleMessage(Message msg) {
// process incoming messages here
switch(msg.what)
{
case HAVE_NEW_FRAME: // new frame has arrived (signaled from main thread)
processBufferedFrames();
break;
case CLOSE_THREAD:
close();
break;
default:
Log.e(LOGTAG, "received unknown message!");
}
}
};
try {
...
// set up video encoding
final String mime = "video/avc"; // H.264/AVC
listAvailableEncoders(mime); // (this creates some debug output only)
String codec = "OMX.Nvidia.h264.encoder"; // instead, hard-code the codec we want to use for now
mVideoEncoder = MediaCodec.createByCodecName(codec);
if(mVideoEncoder == null)
Log.e(LOGTAG, "Media codec " + codec + " is not available!");
// TODO: change, based on what we're streaming...
int FRAME_WIDTH = 1280;
int FRAME_HEIGHT = 720;
// https://github.com/fyhertz/libstreaming/blob/ac44416d88ed3112869ef0f7eab151a184bbb78d/src/net/majorkernelpanic/streaming/hw/EncoderDebugger.java
mNV21Converter = new NV21Convertor();
mNV21Converter.setSize(FRAME_WIDTH, FRAME_HEIGHT);
mNV21Converter.setEncoderColorFormat(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
mNV21Converter.setColorPanesReversed(true);
mNV21Converter.setYPadding(0);
MediaFormat format = MediaFormat.createVideoFormat(mime, FRAME_WIDTH, FRAME_HEIGHT);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
// TODO: optimize bit rate
format.setInteger(MediaFormat.KEY_BIT_RATE, 250000); // 4 Million bits/second = 0.48 Megabytes/s
mVideoEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mVideoEncoder.start();
mEncoderInputBuffers = mVideoEncoder.getInputBuffers();
mEncoderOutputBuffers = mVideoEncoder.getOutputBuffers();
Log.d(LOGTAG, "Number of input buffers " + mEncoderInputBuffers.length);
Log.d(LOGTAG, "Number of output buffers " + mEncoderOutputBuffers.length);
initialized = true;
} catch (Exception e) {
e.printStackTrace();
}
}
private void close()
{
Looper.myLooper().quit();
mVideoEncoder.stop();
mVideoEncoder.release();
mVideoEncoder = null;
}
private void processBufferedFrames()
{
if (!initialized)
return;
VideoFrame frame = getNewFrame();
try {
sendTCPFrame(frame);
} catch (Exception e) {
e.printStackTrace();
}
}
private void sendTCPFrame(VideoFrame frame)
{
long start = System.nanoTime();
long start2 = System.nanoTime();
int inputBufferIndex = -1;
while((inputBufferIndex = mVideoEncoder.dequeueInputBuffer(-1)) < 0 ) { // -1: wait indefinitely for the buffer
switch(inputBufferIndex) {
default:
Log.e(LOGTAG, "dequeueInputBuffer returned unknown value: " + inputBufferIndex);
}
}
// fill in input (raw) data:
mEncoderInputBuffers[inputBufferIndex].clear();
long stop2 = System.nanoTime();
Log.d(LOGTAG, "dequeueInputBuffer took " + (stop2 - start2) / 1e6 + "ms.");
start2 = System.nanoTime();
byte[] pixels = mNV21Converter.convert(frame.pixels);
stop2 = System.nanoTime();
Log.d(LOGTAG, "mNV21Converter.convert took " + (stop2-start2)/1e6 + "ms.");
start2 = System.nanoTime();
mEncoderInputBuffers[inputBufferIndex].put(pixels);
stop2 = System.nanoTime();
Log.d(LOGTAG, "mEncoderInputBuffers[inputBufferIndex].put(pixels) took " + (stop2 - start2) / 1e6 + "ms.");
start2 = System.nanoTime();
//mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, 0, 0);
//mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, System.nanoTime() / 1000, 0);
mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, System.nanoTime(), 0);
stop2 = System.nanoTime();
Log.d(LOGTAG, "queueInputBuffer took " + (stop2 - start2) / 1e6 + "ms.");
start2 = System.nanoTime();
// wait for encoded data to become available:
int outputBufferIndex = -1;
MediaCodec.BufferInfo bufInfo = new MediaCodec.BufferInfo();
long timeoutUs = -1;//10000; // microseconds
while((outputBufferIndex = mVideoEncoder.dequeueOutputBuffer(bufInfo, timeoutUs)) < 0 ) { // -1: wait indefinitely for the buffer
Log.i(LOGTAG, "dequeueOutputBuffer returned value: " + outputBufferIndex);
switch(outputBufferIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
// output buffers have changed, move reference
mEncoderOutputBuffers = mVideoEncoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
// Subsequent data will conform to new format.
//MediaFormat format = codec.getOutputFormat();
Log.e(LOGTAG, "dequeueOutputBuffer returned INFO_OUTPUT_FORMAT_CHANGED ?!");
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.w(LOGTAG, "dequeueOutputBuffer return INFO_TRY_AGAIN_LATER");
break;
default:
Log.e(LOGTAG, "dequeueOutputBuffer returned unknown value: " + outputBufferIndex);
}
}
stop2 = System.nanoTime();
Log.d(LOGTAG, "dequeueOutputBuffer took " + (stop2 - start2) / 1e6 + "ms.");
// output (encoded) data available!
Log.d(LOGTAG, "encoded buffer info: size = " + bufInfo.size + ", offset = " + bufInfo.offset + ", presentationTimeUs = " + bufInfo.presentationTimeUs + ", flags = " + bufInfo.flags);
ByteBuffer encodedData = mEncoderOutputBuffers[outputBufferIndex];
final int sizeOfImageData = bufInfo.size;
long stop = System.nanoTime();
Log.d(LOGTAG, "Encoding image took " + (stop-start)/1e6 + "ms.");
start = System.nanoTime();
// assemble header:
...
encodedData.rewind();
// copy (!) raw image data to "direct" (array-backed) buffer:
ByteBuffer imageBuffer = ByteBuffer.allocateDirect(encodedData.remaining());
imageBuffer.put(encodedData); // TODO: can this copy be avoided?
stop = System.nanoTime();
Log.d(LOGTAG, "Preparing content for streaming took " + (stop - start) / 1e6 + "ms.");
// do streaming via TCP
...
mVideoEncoder.releaseOutputBuffer(outputBufferIndex, false);
}
// see http://developer.android.com/reference/android/media/MediaCodecInfo.html
private void listAvailableEncoders(String mimeType)
{
Log.d(LOGTAG, "Available encoders for mime type " + mimeType + ":");
for (int i = 0; i < MediaCodecList.getCodecCount(); i++) {
MediaCodecInfo codec = MediaCodecList.getCodecInfoAt(i);
if (!codec.isEncoder())
continue;
String[] types = codec.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
//if (types[j].equalsIgnoreCase(mimeType)) {
String msg = "- name: " + codec.getName() + ", supported color formats for " + mimeType + ":";
MediaCodecInfo.CodecCapabilities cap = codec.getCapabilitiesForType(mimeType);
for(int k = 0; k < cap.colorFormats.length; ++k) msg = msg + " " + cap.colorFormats[k];
Log.d(LOGTAG, msg);
// break;
//}
}
}
}
是的,您的代码有问题 - 您正在同步等待当前帧从编码器输出,然后再继续下一帧。大多数硬件编解码器的延迟比您预期的要长一些,为了获得编码器能够达到的适当吞吐量,您需要异步使用它。
即发送一个输入缓冲区进行编码后,不等待编码后的输出缓冲区,只检查是否有输出。然后您应该继续并输入下一个缓冲区,并再次检查是否有任何可用的输出。只有当您没有立即获得输入缓冲区时,您才可以开始等待输出。这样一来,总是有多个输入缓冲区可供编码器开始工作,使其忙碌以实际达到它能够达到的帧速率。
(如果您可以接受 Android 5.0,您可以看看 MediaCodec.setCallback
,这使得异步工作更容易。)
甚至有一些编解码器(不过主要是解码器,如果我没记错的话)在你通过多个输入缓冲区之前甚至不会输出第一个缓冲区。
我正在尝试使用 MediaCodec 对 Android 上的 h264 视频进行编码,以便使用 MediaCodec 进行实时视频流传输,但 dequeueOutputBuffer 一直花费很长时间(实际上它有时非常快但有时非常慢,请参阅下面的日志输出).我已经看到输出缓冲区准备就绪甚至需要 200 毫秒。我的代码有什么问题吗?或者你认为这是 OMX.Nvidia.h264.encoder?
的问题也许我需要将图像从 1280x720 缩小到更小的尺寸?或者,也许我在等待输出缓冲区时需要出列和排队更多输入缓冲区? (有 6 个输入和 6 个输出缓冲器可用)。我用的是AndroidAPI19,所以不能用异步的MediaCodec处理方式。我实际上是从 Google Project Tango 平板电脑流式传输图像,所以我的另一个怀疑是 Tango 的后台操作可能花费的时间太长,导致编码器变慢。有什么想法可能会减慢速度吗?
01-20 23:36:30.728 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.400666ms.
01-20 23:36:30.855 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 94.290667ms.
01-20 23:36:30.880 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.57ms.
01-20 23:36:30.929 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 4.878417ms.
01-20 23:36:31.042 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 77.495417ms.
01-20 23:36:31.064 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.3225ms.
01-20 23:36:31.182 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 74.777583ms.
01-20 23:36:31.195 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.23ms.
01-20 23:36:31.246 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 17.243583ms.
01-20 23:36:31.350 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 80.14725ms.
01-20 23:36:31.373 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 2.493834ms.
01-20 23:36:31.421 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 13.273ms.
01-20 23:36:31.546 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 93.543667ms.
01-20 23:36:31.576 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 5.309334ms.
01-20 23:36:31.619 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 13.402583ms.
01-20 23:36:31.686 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 22.5485ms.
01-20 23:36:31.809 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 91.392083ms.
我的相关代码如下:
public class StreamingThread extends Thread {
...
// encoding
private MediaCodec mVideoEncoder = null;
private ByteBuffer[] mEncoderInputBuffers = null;
private ByteBuffer[] mEncoderOutputBuffers = null;
private NV21Convertor mNV21Converter = null;
public static native VideoFrame getNewFrame();
public StreamingThread()
{
this.setPriority(MAX_PRIORITY);
}
@Override
public void run()
{
Looper.prepare();
init();
Looper.loop();
}
private void init()
{
mHandler = new Handler() {
public void handleMessage(Message msg) {
// process incoming messages here
switch(msg.what)
{
case HAVE_NEW_FRAME: // new frame has arrived (signaled from main thread)
processBufferedFrames();
break;
case CLOSE_THREAD:
close();
break;
default:
Log.e(LOGTAG, "received unknown message!");
}
}
};
try {
...
// set up video encoding
final String mime = "video/avc"; // H.264/AVC
listAvailableEncoders(mime); // (this creates some debug output only)
String codec = "OMX.Nvidia.h264.encoder"; // instead, hard-code the codec we want to use for now
mVideoEncoder = MediaCodec.createByCodecName(codec);
if(mVideoEncoder == null)
Log.e(LOGTAG, "Media codec " + codec + " is not available!");
// TODO: change, based on what we're streaming...
int FRAME_WIDTH = 1280;
int FRAME_HEIGHT = 720;
// https://github.com/fyhertz/libstreaming/blob/ac44416d88ed3112869ef0f7eab151a184bbb78d/src/net/majorkernelpanic/streaming/hw/EncoderDebugger.java
mNV21Converter = new NV21Convertor();
mNV21Converter.setSize(FRAME_WIDTH, FRAME_HEIGHT);
mNV21Converter.setEncoderColorFormat(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
mNV21Converter.setColorPanesReversed(true);
mNV21Converter.setYPadding(0);
MediaFormat format = MediaFormat.createVideoFormat(mime, FRAME_WIDTH, FRAME_HEIGHT);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
// TODO: optimize bit rate
format.setInteger(MediaFormat.KEY_BIT_RATE, 250000); // 4 Million bits/second = 0.48 Megabytes/s
mVideoEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mVideoEncoder.start();
mEncoderInputBuffers = mVideoEncoder.getInputBuffers();
mEncoderOutputBuffers = mVideoEncoder.getOutputBuffers();
Log.d(LOGTAG, "Number of input buffers " + mEncoderInputBuffers.length);
Log.d(LOGTAG, "Number of output buffers " + mEncoderOutputBuffers.length);
initialized = true;
} catch (Exception e) {
e.printStackTrace();
}
}
private void close()
{
Looper.myLooper().quit();
mVideoEncoder.stop();
mVideoEncoder.release();
mVideoEncoder = null;
}
private void processBufferedFrames()
{
if (!initialized)
return;
VideoFrame frame = getNewFrame();
try {
sendTCPFrame(frame);
} catch (Exception e) {
e.printStackTrace();
}
}
private void sendTCPFrame(VideoFrame frame)
{
long start = System.nanoTime();
long start2 = System.nanoTime();
int inputBufferIndex = -1;
while((inputBufferIndex = mVideoEncoder.dequeueInputBuffer(-1)) < 0 ) { // -1: wait indefinitely for the buffer
switch(inputBufferIndex) {
default:
Log.e(LOGTAG, "dequeueInputBuffer returned unknown value: " + inputBufferIndex);
}
}
// fill in input (raw) data:
mEncoderInputBuffers[inputBufferIndex].clear();
long stop2 = System.nanoTime();
Log.d(LOGTAG, "dequeueInputBuffer took " + (stop2 - start2) / 1e6 + "ms.");
start2 = System.nanoTime();
byte[] pixels = mNV21Converter.convert(frame.pixels);
stop2 = System.nanoTime();
Log.d(LOGTAG, "mNV21Converter.convert took " + (stop2-start2)/1e6 + "ms.");
start2 = System.nanoTime();
mEncoderInputBuffers[inputBufferIndex].put(pixels);
stop2 = System.nanoTime();
Log.d(LOGTAG, "mEncoderInputBuffers[inputBufferIndex].put(pixels) took " + (stop2 - start2) / 1e6 + "ms.");
start2 = System.nanoTime();
//mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, 0, 0);
//mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, System.nanoTime() / 1000, 0);
mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, System.nanoTime(), 0);
stop2 = System.nanoTime();
Log.d(LOGTAG, "queueInputBuffer took " + (stop2 - start2) / 1e6 + "ms.");
start2 = System.nanoTime();
// wait for encoded data to become available:
int outputBufferIndex = -1;
MediaCodec.BufferInfo bufInfo = new MediaCodec.BufferInfo();
long timeoutUs = -1;//10000; // microseconds
while((outputBufferIndex = mVideoEncoder.dequeueOutputBuffer(bufInfo, timeoutUs)) < 0 ) { // -1: wait indefinitely for the buffer
Log.i(LOGTAG, "dequeueOutputBuffer returned value: " + outputBufferIndex);
switch(outputBufferIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
// output buffers have changed, move reference
mEncoderOutputBuffers = mVideoEncoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
// Subsequent data will conform to new format.
//MediaFormat format = codec.getOutputFormat();
Log.e(LOGTAG, "dequeueOutputBuffer returned INFO_OUTPUT_FORMAT_CHANGED ?!");
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.w(LOGTAG, "dequeueOutputBuffer return INFO_TRY_AGAIN_LATER");
break;
default:
Log.e(LOGTAG, "dequeueOutputBuffer returned unknown value: " + outputBufferIndex);
}
}
stop2 = System.nanoTime();
Log.d(LOGTAG, "dequeueOutputBuffer took " + (stop2 - start2) / 1e6 + "ms.");
// output (encoded) data available!
Log.d(LOGTAG, "encoded buffer info: size = " + bufInfo.size + ", offset = " + bufInfo.offset + ", presentationTimeUs = " + bufInfo.presentationTimeUs + ", flags = " + bufInfo.flags);
ByteBuffer encodedData = mEncoderOutputBuffers[outputBufferIndex];
final int sizeOfImageData = bufInfo.size;
long stop = System.nanoTime();
Log.d(LOGTAG, "Encoding image took " + (stop-start)/1e6 + "ms.");
start = System.nanoTime();
// assemble header:
...
encodedData.rewind();
// copy (!) raw image data to "direct" (array-backed) buffer:
ByteBuffer imageBuffer = ByteBuffer.allocateDirect(encodedData.remaining());
imageBuffer.put(encodedData); // TODO: can this copy be avoided?
stop = System.nanoTime();
Log.d(LOGTAG, "Preparing content for streaming took " + (stop - start) / 1e6 + "ms.");
// do streaming via TCP
...
mVideoEncoder.releaseOutputBuffer(outputBufferIndex, false);
}
// see http://developer.android.com/reference/android/media/MediaCodecInfo.html
private void listAvailableEncoders(String mimeType)
{
Log.d(LOGTAG, "Available encoders for mime type " + mimeType + ":");
for (int i = 0; i < MediaCodecList.getCodecCount(); i++) {
MediaCodecInfo codec = MediaCodecList.getCodecInfoAt(i);
if (!codec.isEncoder())
continue;
String[] types = codec.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
//if (types[j].equalsIgnoreCase(mimeType)) {
String msg = "- name: " + codec.getName() + ", supported color formats for " + mimeType + ":";
MediaCodecInfo.CodecCapabilities cap = codec.getCapabilitiesForType(mimeType);
for(int k = 0; k < cap.colorFormats.length; ++k) msg = msg + " " + cap.colorFormats[k];
Log.d(LOGTAG, msg);
// break;
//}
}
}
}
是的,您的代码有问题 - 您正在同步等待当前帧从编码器输出,然后再继续下一帧。大多数硬件编解码器的延迟比您预期的要长一些,为了获得编码器能够达到的适当吞吐量,您需要异步使用它。
即发送一个输入缓冲区进行编码后,不等待编码后的输出缓冲区,只检查是否有输出。然后您应该继续并输入下一个缓冲区,并再次检查是否有任何可用的输出。只有当您没有立即获得输入缓冲区时,您才可以开始等待输出。这样一来,总是有多个输入缓冲区可供编码器开始工作,使其忙碌以实际达到它能够达到的帧速率。
(如果您可以接受 Android 5.0,您可以看看 MediaCodec.setCallback
,这使得异步工作更容易。)
甚至有一些编解码器(不过主要是解码器,如果我没记错的话)在你通过多个输入缓冲区之前甚至不会输出第一个缓冲区。