如何使用 ByteBuffer(无颜色校正)获取 RGB 位图?
How get an RGB Bitmap by using ByteBuffer (no color correction)?
我的 SD 卡中有一个 mp4 视频
MediaFormat = {
repeat-previous-frame-after=66666,
mime=video/avc,
frame-rate=15,
color-format=2130708361,
height=720,
width=1280,
bitrate=1000000,
i-frame-interval=1
}
如果我在 MediaCodec.Decoder 配置中设置 Surface,则可以正确创建图像。
如果我不这样做并使用 ByteBuffer 创建位图,我会得到不正确的彩色图像。
我尝试了 YuvImage、将 YUV420 转换为 RGB 的方法和 ScriptIntrinsicYuvToRGB,但无法获得正确的位图;
我需要在 MediaCodec.Decoder.configure!
中创建位图而不是设置表面
private class PlayerThread extends Thread {
private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
private boolean needStop = false;
final int TIMEOUT_USEC = 10000;
PlayerThread(Surface surface) {
this.surface = surface;
}
@Override
public void run() {
extractor = new MediaExtractor();
try {
extractor.setDataSource(SAMPLE);
} catch (IOException e) {
e.printStackTrace();
}
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
try {
decoder = MediaCodec.createDecoderByType(mime);
} catch (IOException e) {
e.printStackTrace();
}
decoder.configure(format, /*surface*/ null, null, 0);
break;
}
}
if (decoder == null) {
return;
}
decoder.start();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
long startMs = System.currentTimeMillis();
boolean isEOS = false;
while (!Thread.interrupted() && !needStop) {
if (!isEOS) {
int inIndex = -1;
try {
inIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
} catch (IllegalStateException e) {
e.printStackTrace();
}
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
if (!needStop) {
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
}
} else {
try {
if (!needStop) {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
}
}
int outIndex = MediaCodec.INFO_TRY_AGAIN_LATER;
try {
if (!needStop) {
outIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
}
} catch (IllegalStateException e) {
e.printStackTrace();
}
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
break;
default:
ByteBuffer buffer = outputBuffers[outIndex];
buffer.position(info.offset);
buffer.limit(info.offset + info.size);
byte[] ba = new byte[buffer.remaining()];
buffer.get(ba);
//this i use many algorithm conversion for get bitmap
YuvImage yuvimage = new YuvImage(ba, ImageFormat.NV21, 1280, 720, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0, 1280, 720), 80, baos);
byte[] jdata = baos.toByteArray();
final Bitmap bmp = BitmapFactory.decodeByteArray(jdata, 0, jdata.length);
if (bmp != null) {
srcRect.left = 0;
srcRect.top = 0;
srcRect.bottom = 720;
srcRect.right = 1280;
Canvas canvas = surface.lockCanvas(dstRect);
try {
if (canvas != null) {
canvas.drawBitmap(bmp, srcRect, dstRect, null);
}
} finally {
if (canvas != null) {
surface.unlockCanvasAndPost(canvas);
}
}
} else {
Log.e(TAG, "bmp = BAD");
}
while (info.presentationTimeUs / 1000 > System.currentTimeMillis() -
startMs && !needStop) {
try {
sleep(10);
} catch (InterruptedException e) {
PlayerThread.this.interrupt();
e.printStackTrace();
break;
}
}
decoder.releaseOutputBuffer(outIndex, false);
break;
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
decoder.stop();
decoder.release();
extractor.release();
}
}
如何正确转换Bitmap中的ByteBuffer(解码器)?
我找到了解决问题的方法。现在我不使用 ByteBuffer 而使用解码器图像,它可以帮助我解决可能的问题
现在需要重构代码并在动态模式下实现视频大小、表面大小等变量
更正代码
private class PlayerThread extends Thread {
private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
private boolean needStop = false;
final int TIMEOUT_USEC = 10000;
PlayerThread(Surface surface) {
this.surface = surface;
}
@Override
public void run() {
extractor = new MediaExtractor();
try {
extractor.setDataSource(SAMPLE); //path MP4 file
} catch (IOException e) {
e.printStackTrace();
}
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
try {
decoder = MediaCodec.createDecoderByType(mime);
} catch (IOException e) {
e.printStackTrace();
}
decoder.configure(format, /*surface*/ null, null, 0);
break;
}
}
if (decoder == null) {
return;
}
decoder.start();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
long startMs = System.currentTimeMillis();
boolean isEOS = false;
while (!Thread.interrupted() && !needStop) {
if (!isEOS) {
int inIndex = -1;
try {
inIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
} catch (IllegalStateException e) {
e.printStackTrace();
}
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
if (!needStop) {
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
}
} else {
try {
if (!needStop) {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
}
}
int outIndex = MediaCodec.INFO_TRY_AGAIN_LATER;
try {
if (!needStop) {
outIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
}
} catch (IllegalStateException e) {
e.printStackTrace();
}
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
break;
default:
Image image = decoder.getOutputImage(outIndex);
Image.Plane[] plants = image.getPlanes();
Bitmap bmp = null;
if (plants != null && plants.length > 0) {
YuvImage yuvimage = new YuvImage(YUV_420_888toNV21(image), ImageFormat.NV21, 1280, 720, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0, 1280, 720), 80, baos);
byte[] jdata = baos.toByteArray();
bmp = BitmapFactory.decodeByteArray(jdata, 0, jdata.length);
}
if (bmp != null) {
srcRect.left = 0;
srcRect.top = 0;
srcRect.bottom = 720;
srcRect.right = 1280;
Canvas canvas = surface.lockCanvas(dstRect);
try {
if (canvas != null) {
canvas.drawBitmap(bmp, srcRect, dstRect /*0,0, surfaceChanged
dstRect.right = width;
dstRect.bottom = height;*/,
null);
}
} finally {
if (canvas != null) {
surface.unlockCanvasAndPost(canvas);
}
}
}
while (info.presentationTimeUs / 1000 > System.currentTimeMillis() -
startMs && !needStop) {
try {
sleep(10);
} catch (InterruptedException e) {
PlayerThread.this.interrupt();
e.printStackTrace();
break;
}
}
decoder.releaseOutputBuffer(outIndex, false);
break;
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
decoder.stop();
decoder.release();
extractor.release();
}
}
private static byte[] YUV_420_888toNV21(Image image) {
byte[] nv21;
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();
int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();
nv21 = new byte[ySize + uSize + vSize];
//U and V are swapped
yBuffer.get(nv21, 0, ySize);
vBuffer.get(nv21, ySize, vSize);
uBuffer.get(nv21, ySize + vSize, uSize);
return nv21;
}
我的 SD 卡中有一个 mp4 视频
MediaFormat = {
repeat-previous-frame-after=66666,
mime=video/avc,
frame-rate=15,
color-format=2130708361,
height=720,
width=1280,
bitrate=1000000,
i-frame-interval=1
}
如果我在 MediaCodec.Decoder 配置中设置 Surface,则可以正确创建图像。
如果我不这样做并使用 ByteBuffer 创建位图,我会得到不正确的彩色图像。
我尝试了 YuvImage、将 YUV420 转换为 RGB 的方法和 ScriptIntrinsicYuvToRGB,但无法获得正确的位图;
我需要在 MediaCodec.Decoder.configure!
中创建位图而不是设置表面private class PlayerThread extends Thread {
private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
private boolean needStop = false;
final int TIMEOUT_USEC = 10000;
PlayerThread(Surface surface) {
this.surface = surface;
}
@Override
public void run() {
extractor = new MediaExtractor();
try {
extractor.setDataSource(SAMPLE);
} catch (IOException e) {
e.printStackTrace();
}
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
try {
decoder = MediaCodec.createDecoderByType(mime);
} catch (IOException e) {
e.printStackTrace();
}
decoder.configure(format, /*surface*/ null, null, 0);
break;
}
}
if (decoder == null) {
return;
}
decoder.start();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
long startMs = System.currentTimeMillis();
boolean isEOS = false;
while (!Thread.interrupted() && !needStop) {
if (!isEOS) {
int inIndex = -1;
try {
inIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
} catch (IllegalStateException e) {
e.printStackTrace();
}
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
if (!needStop) {
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
}
} else {
try {
if (!needStop) {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
}
}
int outIndex = MediaCodec.INFO_TRY_AGAIN_LATER;
try {
if (!needStop) {
outIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
}
} catch (IllegalStateException e) {
e.printStackTrace();
}
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
break;
default:
ByteBuffer buffer = outputBuffers[outIndex];
buffer.position(info.offset);
buffer.limit(info.offset + info.size);
byte[] ba = new byte[buffer.remaining()];
buffer.get(ba);
//this i use many algorithm conversion for get bitmap
YuvImage yuvimage = new YuvImage(ba, ImageFormat.NV21, 1280, 720, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0, 1280, 720), 80, baos);
byte[] jdata = baos.toByteArray();
final Bitmap bmp = BitmapFactory.decodeByteArray(jdata, 0, jdata.length);
if (bmp != null) {
srcRect.left = 0;
srcRect.top = 0;
srcRect.bottom = 720;
srcRect.right = 1280;
Canvas canvas = surface.lockCanvas(dstRect);
try {
if (canvas != null) {
canvas.drawBitmap(bmp, srcRect, dstRect, null);
}
} finally {
if (canvas != null) {
surface.unlockCanvasAndPost(canvas);
}
}
} else {
Log.e(TAG, "bmp = BAD");
}
while (info.presentationTimeUs / 1000 > System.currentTimeMillis() -
startMs && !needStop) {
try {
sleep(10);
} catch (InterruptedException e) {
PlayerThread.this.interrupt();
e.printStackTrace();
break;
}
}
decoder.releaseOutputBuffer(outIndex, false);
break;
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
decoder.stop();
decoder.release();
extractor.release();
}
}
如何正确转换Bitmap中的ByteBuffer(解码器)?
我找到了解决问题的方法。现在我不使用 ByteBuffer 而使用解码器图像,它可以帮助我解决可能的问题
现在需要重构代码并在动态模式下实现视频大小、表面大小等变量
更正代码
private class PlayerThread extends Thread {
private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
private boolean needStop = false;
final int TIMEOUT_USEC = 10000;
PlayerThread(Surface surface) {
this.surface = surface;
}
@Override
public void run() {
extractor = new MediaExtractor();
try {
extractor.setDataSource(SAMPLE); //path MP4 file
} catch (IOException e) {
e.printStackTrace();
}
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
try {
decoder = MediaCodec.createDecoderByType(mime);
} catch (IOException e) {
e.printStackTrace();
}
decoder.configure(format, /*surface*/ null, null, 0);
break;
}
}
if (decoder == null) {
return;
}
decoder.start();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
long startMs = System.currentTimeMillis();
boolean isEOS = false;
while (!Thread.interrupted() && !needStop) {
if (!isEOS) {
int inIndex = -1;
try {
inIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
} catch (IllegalStateException e) {
e.printStackTrace();
}
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
if (!needStop) {
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
}
} else {
try {
if (!needStop) {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
}
}
int outIndex = MediaCodec.INFO_TRY_AGAIN_LATER;
try {
if (!needStop) {
outIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
}
} catch (IllegalStateException e) {
e.printStackTrace();
}
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
break;
default:
Image image = decoder.getOutputImage(outIndex);
Image.Plane[] plants = image.getPlanes();
Bitmap bmp = null;
if (plants != null && plants.length > 0) {
YuvImage yuvimage = new YuvImage(YUV_420_888toNV21(image), ImageFormat.NV21, 1280, 720, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0, 1280, 720), 80, baos);
byte[] jdata = baos.toByteArray();
bmp = BitmapFactory.decodeByteArray(jdata, 0, jdata.length);
}
if (bmp != null) {
srcRect.left = 0;
srcRect.top = 0;
srcRect.bottom = 720;
srcRect.right = 1280;
Canvas canvas = surface.lockCanvas(dstRect);
try {
if (canvas != null) {
canvas.drawBitmap(bmp, srcRect, dstRect /*0,0, surfaceChanged
dstRect.right = width;
dstRect.bottom = height;*/,
null);
}
} finally {
if (canvas != null) {
surface.unlockCanvasAndPost(canvas);
}
}
}
while (info.presentationTimeUs / 1000 > System.currentTimeMillis() -
startMs && !needStop) {
try {
sleep(10);
} catch (InterruptedException e) {
PlayerThread.this.interrupt();
e.printStackTrace();
break;
}
}
decoder.releaseOutputBuffer(outIndex, false);
break;
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
decoder.stop();
decoder.release();
extractor.release();
}
}
private static byte[] YUV_420_888toNV21(Image image) {
byte[] nv21;
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();
int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();
nv21 = new byte[ySize + uSize + vSize];
//U and V are swapped
yBuffer.get(nv21, 0, ySize);
vBuffer.get(nv21, ySize, vSize);
uBuffer.get(nv21, ySize + vSize, uSize);
return nv21;
}