MediaCodec 从 RTSP 解码 AAC 音频块并播放
MediaCodec decode AAC audio chunks from RTSP and play
我正在从 FFServer 实例接收包含由 libvo_aacenc
(44100hz 128kbps 2ch) 编码的 aac 音频块的 rtp 数据包。我正在尝试在 Android 中使用 MediaCodec 对它们进行解码,并在解码块后立即播放。
Client.java
Player player = new Player();
//RTSP listener
@Override
public void onRTSPPacketReceived(RTPpacket packet) {
byte [] aac_chunk = packet.getpayload();
player.playAAC(aac_chunk);
}
Player.java
private MediaCodec decoder;
private AudioTrack audioTrack;
private MediaExtractor extractor;
public Player(){
extractor = new MediaExtractor();
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
44100, AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT,
44100,
AudioTrack.MODE_STREAM);
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_BIT_RATE, 128 * 1024);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 2);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, 44100);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectHE);
try{
decoder = MediaCodec.createDecoderByType("audio/mp4a-latm");
decoder.configure(format, null, null, 0);
} catch (IOException e) {
e.printStackTrace();
}
decoder.start();
audioTrack.play();
}
//Decode and play one aac_chunk
public void playAAC(byte [] data){
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
int inIndex = decoder.dequeueInputBuffer(-1);
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
buffer.put(data, 0, data.length);
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inIndex, 0, sampleSize, presentationTimeUs, 0);
}
}
int outIndex = decoder.dequeueOutputBuffer(info, TIMEOUT);
while(outIndex >= 0){
ByteBuffer outBuffer = outputBuffers[outIndex];
byte[] decoded_chunk = new byte[info.size];
outBuffer.get(decoded_chunk); // Read the buffer all at once
outBuffer.clear();
//!! Decoded decoded_chunk.length = 0 !!
System.out.println("DECODED CHUNK SIZE: "+decoded_chunk.length);
//Instant play of the decoded chunk
audioTrack.write(decoded_chunk, info.offset, info.offset + info.size);
decoder.releaseOutputBuffer(outIndex, false);
break;
}
decoder.flush();
}
启动时,MediaCodec 正确启动。
MediaCodec: (0xa5040280) start
MediaCodec: (0xa5040280) input buffers allocated
MediaCodec: (0xa5040280) numBuffers (4)
MediaCodec: (0xa5040280) output buffers allocated
MediaCodec: (0xa5040280) numBuffers (4)
问题
我实际上没有听到任何声音。 MediaCodec 正在工作,但看起来它没有将任何内容解码到他的输出缓冲区中,因为 decoded_chunk.length = 0
和 outBuffer.limit() = 0
.
问题
我应该异步填充 MediaCodec 输入缓冲区吗?不幸的是,我在找到的示例中没有找到任何有关此问题的信息:即时解码和播放。
我遵循了这些示例:
我已经在异步模式下使用 MediaCodec 和 MediaCodec.Callback 解决了这个问题,如官方文档 here 中所述 仅适用于 Android minSdkVersion 21。
基本上,我为收到的每个 RTP 音频块都使用了一个队列,然后每次 MediaCodec 缓冲状态更改时我都会收到通知。处理解码器流程实际上更容易。
decoder.setCallback(new MediaCodec.Callback() {
@Override
public void onInputBufferAvailable(@NonNull MediaCodec mediaCodec, int i) {
//One InputBuffer is available to decode
while (true) {
if(queue.size() > 0) {
byte[] data = queue.removeFirst();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
ByteBuffer buffer = mediaCodec.getInputBuffer(i);
buffer.put(data, 0, data.length);
mediaCodec.queueInputBuffer(i, 0, data.length, 0, 0);
break;
}
}
}
@Override
public void onOutputBufferAvailable(@NonNull MediaCodec mediaCodec, int i, @NonNull MediaCodec.BufferInfo info) {
//DECODING PACKET ENDED
ByteBuffer outBuffer = mediaCodec.getOutputBuffer(i);
byte[] chunk = new byte[info.size];
outBuffer.get(chunk); // Read the buffer all at once
outBuffer.clear();
audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data
mediaCodec.releaseOutputBuffer(i, false);
}
@Override
public void onError(@NonNull MediaCodec mediaCodec, @NonNull MediaCodec.CodecException e) {}
@Override
public void onOutputFormatChanged(@NonNull MediaCodec mediaCodec, @NonNull MediaFormat mediaFormat) {}
});
我正在从 FFServer 实例接收包含由 libvo_aacenc
(44100hz 128kbps 2ch) 编码的 aac 音频块的 rtp 数据包。我正在尝试在 Android 中使用 MediaCodec 对它们进行解码,并在解码块后立即播放。
Client.java
Player player = new Player();
//RTSP listener
@Override
public void onRTSPPacketReceived(RTPpacket packet) {
byte [] aac_chunk = packet.getpayload();
player.playAAC(aac_chunk);
}
Player.java
private MediaCodec decoder;
private AudioTrack audioTrack;
private MediaExtractor extractor;
public Player(){
extractor = new MediaExtractor();
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
44100, AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT,
44100,
AudioTrack.MODE_STREAM);
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_BIT_RATE, 128 * 1024);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 2);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, 44100);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectHE);
try{
decoder = MediaCodec.createDecoderByType("audio/mp4a-latm");
decoder.configure(format, null, null, 0);
} catch (IOException e) {
e.printStackTrace();
}
decoder.start();
audioTrack.play();
}
//Decode and play one aac_chunk
public void playAAC(byte [] data){
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
int inIndex = decoder.dequeueInputBuffer(-1);
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
buffer.put(data, 0, data.length);
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inIndex, 0, sampleSize, presentationTimeUs, 0);
}
}
int outIndex = decoder.dequeueOutputBuffer(info, TIMEOUT);
while(outIndex >= 0){
ByteBuffer outBuffer = outputBuffers[outIndex];
byte[] decoded_chunk = new byte[info.size];
outBuffer.get(decoded_chunk); // Read the buffer all at once
outBuffer.clear();
//!! Decoded decoded_chunk.length = 0 !!
System.out.println("DECODED CHUNK SIZE: "+decoded_chunk.length);
//Instant play of the decoded chunk
audioTrack.write(decoded_chunk, info.offset, info.offset + info.size);
decoder.releaseOutputBuffer(outIndex, false);
break;
}
decoder.flush();
}
启动时,MediaCodec 正确启动。
MediaCodec: (0xa5040280) start
MediaCodec: (0xa5040280) input buffers allocated
MediaCodec: (0xa5040280) numBuffers (4)
MediaCodec: (0xa5040280) output buffers allocated
MediaCodec: (0xa5040280) numBuffers (4)
问题
我实际上没有听到任何声音。 MediaCodec 正在工作,但看起来它没有将任何内容解码到他的输出缓冲区中,因为 decoded_chunk.length = 0
和 outBuffer.limit() = 0
.
问题
我应该异步填充 MediaCodec 输入缓冲区吗?不幸的是,我在找到的示例中没有找到任何有关此问题的信息:即时解码和播放。
我遵循了这些示例:
我已经在异步模式下使用 MediaCodec 和 MediaCodec.Callback 解决了这个问题,如官方文档 here 中所述 仅适用于 Android minSdkVersion 21。
基本上,我为收到的每个 RTP 音频块都使用了一个队列,然后每次 MediaCodec 缓冲状态更改时我都会收到通知。处理解码器流程实际上更容易。
decoder.setCallback(new MediaCodec.Callback() {
@Override
public void onInputBufferAvailable(@NonNull MediaCodec mediaCodec, int i) {
//One InputBuffer is available to decode
while (true) {
if(queue.size() > 0) {
byte[] data = queue.removeFirst();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
ByteBuffer buffer = mediaCodec.getInputBuffer(i);
buffer.put(data, 0, data.length);
mediaCodec.queueInputBuffer(i, 0, data.length, 0, 0);
break;
}
}
}
@Override
public void onOutputBufferAvailable(@NonNull MediaCodec mediaCodec, int i, @NonNull MediaCodec.BufferInfo info) {
//DECODING PACKET ENDED
ByteBuffer outBuffer = mediaCodec.getOutputBuffer(i);
byte[] chunk = new byte[info.size];
outBuffer.get(chunk); // Read the buffer all at once
outBuffer.clear();
audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data
mediaCodec.releaseOutputBuffer(i, false);
}
@Override
public void onError(@NonNull MediaCodec mediaCodec, @NonNull MediaCodec.CodecException e) {}
@Override
public void onOutputFormatChanged(@NonNull MediaCodec mediaCodec, @NonNull MediaFormat mediaFormat) {}
});