Android Camera2:ImageReader 图像没有步幅值
Android Camera 2: ImageReader's Images Have no Stride Values
我一直在尝试将图像从相机路由到 ImageReader,以便我可以使用 Camera2 API 直接操作图像。当我将捕获会话流传输到 SurfaceView 时,流工作得很好。然后,当我将捕获会话流设置为我的 ImageReader 时,我注意到图像不知何故无效。
在我的 ImageReader 的 OnImageAvailable 回调函数中,我拉出下一个可用图像并尝试读取它。这就是我遇到的问题。图像不为空并且飞机在那里,但飞机的缓冲区起初为空。当我尝试获取缓冲区时,它们突然不为空,但尝试从中读取会使应用程序崩溃而没有堆栈跟踪。此外,平面中的像素和行步幅设置为 0。不过,图像的宽度和高度设置正确。
因此,我认为我没有正确设置我的 ImageReader。那么问题是我做错了什么?
代码:
public class CompatibleCamera {
private static final int CAMERA2_API_LEVEL = 23;
public static final int FORMAT_RAW = ImageFormat.RAW_SENSOR;
public static final int FORMAT_JPEG = ImageFormat.JPEG;
private static final int MAX_IMAGES = 2;
// Interface for the user to use. User supplies the function to manipulate the image
public interface ImageTransform
{
void doTransform(Image image);
}
//***********Camera 2 API Members***********
// The camera2 API CameraManager. Used to access the camera device
private CameraManager mCamera2Manager;
// The information used by the device to reference the camera. Not a camera object itself
private CameraDevice mCamera2Device;
private String mCamera2DeviceID = "";
// The class that allows us to get the camera's image
private ImageReader mImageReader;
// This listener is where we have the programmer deal with the image. Just edit the interface
private ImageReader.OnImageAvailableListener mListener;
// This is the thread for the handler. It keeps it off the UI thread so we don't block the GUI
private HandlerThread mCameraCaptureHandlerThread;
// This runs in the background and handles the camera feed, activating the OnImageAvailableListener
private Handler mCameraCaptureHandler;
private HandlerThread mImageAvailableHandlerThread;
// This runs in the background and handles the camera feed, activating the OnImageAvailableListener
private Handler mImageAvailableHandler;
// This object is the camera feed, essentially. We store it so we can properly close it later
private CameraCaptureSession cameraCaptureSession;
// DEBUG
private boolean TEST_SURFACE_VIEW = false;
private Surface dbSurface;
// Mutex lock. Locks and unlocks when the ImageReader is pulling and processing an image
private Semaphore imageReaderLock = new Semaphore(1);
//***********Common Members***********
// The context of the activity holding this object
private Context mContext;
// Our ImageTransform implementation to alter the image as it comes in
private ImageTransform mTransform;
private int iImageFormat= FORMAT_RAW;
//==========Methods==========
public CompatibleCamera(Context context, ImageTransform transform, int imageFormat)
{
mContext = context;
mTransform = transform;
mListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader imageReader) {
try {
imageReaderLock.acquire();
Image image = imageReader.acquireNextImage();
//<--------------Problem With Image is Here-------------->
mTransform.doTransform(image);
image.close();
imageReaderLock.release();
}
catch(InterruptedException ex)
{
ex.printStackTrace();
}
}
};
}
private boolean camera2GetManager()
{
//----First, get the CameraManager and a Camera Device----
mCamera2Manager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
if (mCamera2Manager == null) {
System.out.println(" DEBUG: Manager is null");
return false;
}
else {
System.out.println(" DEBUG: Camera Manager obtained");
try {
String[] cameraIDs = mCamera2Manager.getCameraIdList();
for (String cameraID : cameraIDs) {
CameraCharacteristics cameraCharacteristics = mCamera2Manager.getCameraCharacteristics(cameraID);
if (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) ==
CameraCharacteristics.LENS_FACING_BACK) {
mCamera2DeviceID = cameraID;
break;
}
}
if (mCamera2DeviceID.equals("")) {
System.out.println("No back camera, exiting");
return false;
}
System.out.println(" DEBUG: Camera Device obtained");
// Open the Camera Device
} catch (Exception ex) {
ex.printStackTrace();
return false;
}
return camera2OpenCamera();
}
}
private boolean camera2SetupImageReader()
{
// Get the largest image size available
CameraCharacteristics cameraCharacteristics;
try {
cameraCharacteristics= mCamera2Manager.getCameraCharacteristics(mCamera2DeviceID);
} catch(Exception e) {
e.printStackTrace();
return false;
}
StreamConfigurationMap map = cameraCharacteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size largestSize = Collections.max(
Arrays.asList(map.getOutputSizes(iImageFormat)),
new CompareSizesByArea());
// Set up the handler
mCameraCaptureHandlerThread = new HandlerThread("cameraCaptureHandlerThread");
mCameraCaptureHandlerThread.start();
mCameraCaptureHandler = new Handler(mCameraCaptureHandlerThread.getLooper());
mImageAvailableHandlerThread = new HandlerThread("imageReaderHandlerThread");
mImageAvailableHandlerThread.start();
mImageAvailableHandler = new Handler(mImageAvailableHandlerThread.getLooper());
mImageReader = ImageReader.newInstance( largestSize.getWidth(),
largestSize.getHeight(),
iImageFormat,
MAX_IMAGES);
mImageReader.setOnImageAvailableListener(mListener, mImageAvailableHandler);
// This callback is used to asynchronously set up the capture session on our end
final CameraCaptureSession.StateCallback captureStateCallback = new CameraCaptureSession.StateCallback() {
// When configured, set the target surface
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
try
{
CaptureRequest.Builder requestBuilder = session.getDevice().createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
if (TEST_SURFACE_VIEW)
requestBuilder.addTarget(dbSurface);
else
requestBuilder.addTarget(mImageReader.getSurface());
//set to null - image data will be produced but will not receive metadata
session.setRepeatingRequest(requestBuilder.build(), null, mCameraCaptureHandler);
cameraCaptureSession = session;
}
catch (Exception ex)
{
ex.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
System.out.println("Failed to configure the capture session :(");
}
};
ArrayList<Surface> surfaces = new ArrayList<>();
if (TEST_SURFACE_VIEW)
surfaces.add(dbSurface);
else
surfaces.add(mImageReader.getSurface());
try
{
mCamera2Device.createCaptureSession(surfaces, captureStateCallback, mCameraCaptureHandler);
}
catch(Exception ex)
{
ex.printStackTrace();
}
return true;
}
}
RAW_SENSOR 是一种特殊的格式。
General raw camera sensor image format, usually representing a single-channel Bayer-mosaic image. Each pixel color sample is stored with 16 bits of precision.
The layout of the color mosaic, the maximum and minimum encoding values of the raw pixel data, the color space of the image, and all other needed information to interpret a raw sensor image must be queried from the android.hardware.camera2.CameraDevice which produced the image.
你不应该尝试直接使用它的步幅信息,就好像它是一个 YUV 帧一样。
我一直在尝试将图像从相机路由到 ImageReader,以便我可以使用 Camera2 API 直接操作图像。当我将捕获会话流传输到 SurfaceView 时,流工作得很好。然后,当我将捕获会话流设置为我的 ImageReader 时,我注意到图像不知何故无效。
在我的 ImageReader 的 OnImageAvailable 回调函数中,我拉出下一个可用图像并尝试读取它。这就是我遇到的问题。图像不为空并且飞机在那里,但飞机的缓冲区起初为空。当我尝试获取缓冲区时,它们突然不为空,但尝试从中读取会使应用程序崩溃而没有堆栈跟踪。此外,平面中的像素和行步幅设置为 0。不过,图像的宽度和高度设置正确。
因此,我认为我没有正确设置我的 ImageReader。那么问题是我做错了什么?
代码:
public class CompatibleCamera {
private static final int CAMERA2_API_LEVEL = 23;
public static final int FORMAT_RAW = ImageFormat.RAW_SENSOR;
public static final int FORMAT_JPEG = ImageFormat.JPEG;
private static final int MAX_IMAGES = 2;
// Interface for the user to use. User supplies the function to manipulate the image
public interface ImageTransform
{
void doTransform(Image image);
}
//***********Camera 2 API Members***********
// The camera2 API CameraManager. Used to access the camera device
private CameraManager mCamera2Manager;
// The information used by the device to reference the camera. Not a camera object itself
private CameraDevice mCamera2Device;
private String mCamera2DeviceID = "";
// The class that allows us to get the camera's image
private ImageReader mImageReader;
// This listener is where we have the programmer deal with the image. Just edit the interface
private ImageReader.OnImageAvailableListener mListener;
// This is the thread for the handler. It keeps it off the UI thread so we don't block the GUI
private HandlerThread mCameraCaptureHandlerThread;
// This runs in the background and handles the camera feed, activating the OnImageAvailableListener
private Handler mCameraCaptureHandler;
private HandlerThread mImageAvailableHandlerThread;
// This runs in the background and handles the camera feed, activating the OnImageAvailableListener
private Handler mImageAvailableHandler;
// This object is the camera feed, essentially. We store it so we can properly close it later
private CameraCaptureSession cameraCaptureSession;
// DEBUG
private boolean TEST_SURFACE_VIEW = false;
private Surface dbSurface;
// Mutex lock. Locks and unlocks when the ImageReader is pulling and processing an image
private Semaphore imageReaderLock = new Semaphore(1);
//***********Common Members***********
// The context of the activity holding this object
private Context mContext;
// Our ImageTransform implementation to alter the image as it comes in
private ImageTransform mTransform;
private int iImageFormat= FORMAT_RAW;
//==========Methods==========
public CompatibleCamera(Context context, ImageTransform transform, int imageFormat)
{
mContext = context;
mTransform = transform;
mListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader imageReader) {
try {
imageReaderLock.acquire();
Image image = imageReader.acquireNextImage();
//<--------------Problem With Image is Here-------------->
mTransform.doTransform(image);
image.close();
imageReaderLock.release();
}
catch(InterruptedException ex)
{
ex.printStackTrace();
}
}
};
}
private boolean camera2GetManager()
{
//----First, get the CameraManager and a Camera Device----
mCamera2Manager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
if (mCamera2Manager == null) {
System.out.println(" DEBUG: Manager is null");
return false;
}
else {
System.out.println(" DEBUG: Camera Manager obtained");
try {
String[] cameraIDs = mCamera2Manager.getCameraIdList();
for (String cameraID : cameraIDs) {
CameraCharacteristics cameraCharacteristics = mCamera2Manager.getCameraCharacteristics(cameraID);
if (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) ==
CameraCharacteristics.LENS_FACING_BACK) {
mCamera2DeviceID = cameraID;
break;
}
}
if (mCamera2DeviceID.equals("")) {
System.out.println("No back camera, exiting");
return false;
}
System.out.println(" DEBUG: Camera Device obtained");
// Open the Camera Device
} catch (Exception ex) {
ex.printStackTrace();
return false;
}
return camera2OpenCamera();
}
}
private boolean camera2SetupImageReader()
{
// Get the largest image size available
CameraCharacteristics cameraCharacteristics;
try {
cameraCharacteristics= mCamera2Manager.getCameraCharacteristics(mCamera2DeviceID);
} catch(Exception e) {
e.printStackTrace();
return false;
}
StreamConfigurationMap map = cameraCharacteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size largestSize = Collections.max(
Arrays.asList(map.getOutputSizes(iImageFormat)),
new CompareSizesByArea());
// Set up the handler
mCameraCaptureHandlerThread = new HandlerThread("cameraCaptureHandlerThread");
mCameraCaptureHandlerThread.start();
mCameraCaptureHandler = new Handler(mCameraCaptureHandlerThread.getLooper());
mImageAvailableHandlerThread = new HandlerThread("imageReaderHandlerThread");
mImageAvailableHandlerThread.start();
mImageAvailableHandler = new Handler(mImageAvailableHandlerThread.getLooper());
mImageReader = ImageReader.newInstance( largestSize.getWidth(),
largestSize.getHeight(),
iImageFormat,
MAX_IMAGES);
mImageReader.setOnImageAvailableListener(mListener, mImageAvailableHandler);
// This callback is used to asynchronously set up the capture session on our end
final CameraCaptureSession.StateCallback captureStateCallback = new CameraCaptureSession.StateCallback() {
// When configured, set the target surface
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
try
{
CaptureRequest.Builder requestBuilder = session.getDevice().createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
if (TEST_SURFACE_VIEW)
requestBuilder.addTarget(dbSurface);
else
requestBuilder.addTarget(mImageReader.getSurface());
//set to null - image data will be produced but will not receive metadata
session.setRepeatingRequest(requestBuilder.build(), null, mCameraCaptureHandler);
cameraCaptureSession = session;
}
catch (Exception ex)
{
ex.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
System.out.println("Failed to configure the capture session :(");
}
};
ArrayList<Surface> surfaces = new ArrayList<>();
if (TEST_SURFACE_VIEW)
surfaces.add(dbSurface);
else
surfaces.add(mImageReader.getSurface());
try
{
mCamera2Device.createCaptureSession(surfaces, captureStateCallback, mCameraCaptureHandler);
}
catch(Exception ex)
{
ex.printStackTrace();
}
return true;
}
}
RAW_SENSOR 是一种特殊的格式。
General raw camera sensor image format, usually representing a single-channel Bayer-mosaic image. Each pixel color sample is stored with 16 bits of precision.
The layout of the color mosaic, the maximum and minimum encoding values of the raw pixel data, the color space of the image, and all other needed information to interpret a raw sensor image must be queried from the android.hardware.camera2.CameraDevice which produced the image.
你不应该尝试直接使用它的步幅信息,就好像它是一个 YUV 帧一样。