camera2 视频录制性能差
Poor camera2 video recording performance
我正在尝试通过 mediarecorder 和 camera2 录制视频 API,但我很难从中获得高质量的录音。
在启动纹理视图时,我的 logcat 中也出现了一些不同的错误,例如 "App passed a NULL surface"。
除此之外它似乎可以工作,但捕获的视频被放大并且帧率非常低~2 fps。
这是我正在使用的代码:
public class VideoTest extends AppCompatActivity {
private TextureView mTextureView;
private CameraDevice mCameraDevice;
private CaptureRequest.Builder mPreviewBuilder;
private CameraCaptureSession mPreviewSession;
private Size mPreviewSize;
private Handler backgroundHandler;
private HandlerThread thread;
private MediaRecorder mMediaRecorder;
private String mVideoPath;
private boolean mIsRecordingVideo;
private static final String TAG = "VideoTest";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_test);
mTextureView = (TextureView) findViewById(R.id.texture);
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
mMediaRecorder = new MediaRecorder();
}
@Override
public void onResume() {
super.onResume();
registerReceiver(buttonReceiver, new IntentFilter("ACTION_PRESSED"));
}
@Override
public void onPause() {
Log.d(TAG, "onPause");
super.onPause();
closeCamera();
stopBackgroundThread();
}
private BroadcastReceiver buttonReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
Log.d(TAG, "Got Button Press!");
try {
if (mIsRecordingVideo) {
i2cRequest(IndicatorControlReceiver.INDICATOR_OFF);
stopRecordingVideo();
} else {
i2cRequest(IndicatorControlReceiver.INDICATOR_ON);
startRecordingVideo();
}
} catch (Exception ex) {
Log.d(TAG, "ERROR BLAH CAMERA SUX");
}
}
};
private void openCamera() {
CameraManager camManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
Log.d(TAG, "Opening Camera");
try {
String camId = camManager.getCameraIdList()[0];
CameraCharacteristics cameraChars = camManager.getCameraCharacteristics(camId);
StreamConfigurationMap map = cameraChars.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
mPreviewSize = map.getOutputSizes(SurfaceTexture.class)[14];
camManager.openCamera(camId, cameraStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.e(TAG, "onSurfaceTextureAvailable, width="+width+",height="+height);
openCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
//Log.e(TAG, "onSurfaceTextureSizeChanged");
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
//Log.e(TAG, "onSurfaceTextureUpdated");
}
};
private CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
Log.d(TAG, "onOpened");
mCameraDevice = camera;
startPreview();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
Log.d(TAG, "onDisconnected");
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
Log.e(TAG, "onError code: " + error);
}
};
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.e(TAG, "Error Starting Preview. ABORTED!");
return;
}
SurfaceTexture texture = mTextureView.getSurfaceTexture();
if(null == texture) {
Log.e(TAG, "Cannot create texture. ABORTED!");
return;
}
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
try {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
} catch (CameraAccessException e) {
e.printStackTrace();
}
mPreviewBuilder.addTarget(surface);
try {
mCameraDevice.createCaptureSession(Collections.singletonList(surface), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
i2cRequest(I2CRequestReceiver.VIDEO_READY);
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Log.e(TAG, "onConfigureFailed");
i2cRequest(I2CRequestReceiver.E_CAMERA_ERROR);
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void updatePreview() {
if(null == mCameraDevice) {
Log.e(TAG, "Camera Device is Null! ABORT!");
return;
}
/* mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE,CameraMetadata.CONTROL_AF_MODE_OFF);*/
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(30000,30000));
thread = new HandlerThread("CameraPreview");
thread.start();
backgroundHandler = new Handler(thread.getLooper());
try {
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
thread.quitSafely();
try {
thread.join();
thread = null;
backgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void closePreviewSession() {
if (null != mPreviewSession) {
mPreviewSession.close();
}
}
private void closeCamera(){
closePreviewSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
}
private void setupMediaRecorder() throws IOException {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
/*
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
*/
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_720P);
profile.audioBitRate = 128000;
profile.audioCodec = MediaRecorder.AudioEncoder.AAC;
profile.fileFormat = MediaRecorder.OutputFormat.MPEG_4;
profile.videoCodec = MediaRecorder.VideoEncoder.H264;
profile.videoBitRate = 2048000;
profile.videoFrameRate = 30;
mMediaRecorder.setProfile(profile);
/* mMediaRecorder.setVideoEncodingBitRate(2048000);
mMediaRecorder.setAudioEncodingBitRate(128000);
mMediaRecorder.setVideoSize(1280, 720);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);*/
if (mVideoPath == null || mVideoPath.isEmpty()) {
mVideoPath = getVideoFilePath();
}
mMediaRecorder.setOutputFile(mVideoPath);
mMediaRecorder.prepare();
}
@SuppressLint("SdCardPath")
private String getVideoFilePath() {
return "/sdcard/LIVE/video/" + System.currentTimeMillis() + ".mp4";
}
Surface recorderSurface;
private void startRecordingVideo() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.e(TAG, "Cannot bind camera, textureView, or previewSize");
return;
}
try {
closePreviewSession();
setupMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture!= null;
texture.setDefaultBufferSize(1280, 720);
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
new Thread(new Runnable() {
@Override
public void run() {
i2cRequest(I2CRequestReceiver.VIDEO_RECORDING);
mIsRecordingVideo = true;
mMediaRecorder.start();
}
}).start();
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Log.e(TAG, "Capture failed!");
runOnUiThread(new Runnable() {
@Override
public void run() {
i2cRequest(I2CRequestReceiver.E_CAMERA_ERROR);
mIsRecordingVideo = false;
}
});
}
},backgroundHandler);
} catch (IOException | CameraAccessException e) {
e.printStackTrace();
}
}
private void stopRecordingVideo() {
try {
mPreviewSession.abortCaptures();
} catch (CameraAccessException e) {
e.printStackTrace();
}
mIsRecordingVideo = false;
mMediaRecorder.stop();
mMediaRecorder.reset();
Log.d(TAG, "Video saved: " + mVideoPath);
}
private void i2cRequest(String request) {
Intent sendI2cRequest = new Intent();
sendI2cRequest.setAction(I2CRequestReceiver.NOWSPEAK_REQUEST_ACTION);
switch (request) {
case I2CRequestReceiver.VIDEO_READY:
sendI2cRequest.putExtra(I2CRequestReceiver.EXTRA_SPEAK, I2CRequestReceiver.VIDEO_READY);
Log.d(TAG, "VIDEO READY!!");
break;
case I2CRequestReceiver.E_CAMERA_ERROR:
sendI2cRequest.putExtra(I2CRequestReceiver.EXTRA_SPEAK, I2CRequestReceiver.E_CAMERA_ERROR);
Log.d(TAG, "VIDEO ERROR!!");
break;
case IndicatorControlReceiver.INDICATOR_ON:
sendI2cRequest.setAction(IndicatorControlReceiver.INDICATOR_CONTROL_ACTION);
sendI2cRequest.putExtra(IndicatorControlReceiver.EXTRA_INDICATOR, IndicatorControlReceiver.INDICATOR_ON);
break;
case IndicatorControlReceiver.INDICATOR_OFF:
sendI2cRequest.setAction(IndicatorControlReceiver.INDICATOR_CONTROL_ACTION);
sendI2cRequest.putExtra(IndicatorControlReceiver.EXTRA_INDICATOR, IndicatorControlReceiver.INDICATOR_OFF);
break;
}
LocalBroadcastManager.getInstance(this).sendBroadcast(sendI2cRequest);
}
}
CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE 采用 FPS 为单位的帧速率,而不是 FPS 的 1/1000。
所以试试
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(30,30));
甚至更好,从 CameraCharacteristics.CONTROL_AVAILABLE_TARGET_FPS_RANGES
提供的列表中选择
在与电路板制造商交谈后,似乎在实施方面存在一些问题,他们正在努力解决这些问题。
我要感谢@CommonsWare 和@EddyTalvala 帮助我在这里找到问题的专业知识。
-罗布
我正在尝试通过 mediarecorder 和 camera2 录制视频 API,但我很难从中获得高质量的录音。
在启动纹理视图时,我的 logcat 中也出现了一些不同的错误,例如 "App passed a NULL surface"。
除此之外它似乎可以工作,但捕获的视频被放大并且帧率非常低~2 fps。
这是我正在使用的代码:
public class VideoTest extends AppCompatActivity {
private TextureView mTextureView;
private CameraDevice mCameraDevice;
private CaptureRequest.Builder mPreviewBuilder;
private CameraCaptureSession mPreviewSession;
private Size mPreviewSize;
private Handler backgroundHandler;
private HandlerThread thread;
private MediaRecorder mMediaRecorder;
private String mVideoPath;
private boolean mIsRecordingVideo;
private static final String TAG = "VideoTest";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_test);
mTextureView = (TextureView) findViewById(R.id.texture);
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
mMediaRecorder = new MediaRecorder();
}
@Override
public void onResume() {
super.onResume();
registerReceiver(buttonReceiver, new IntentFilter("ACTION_PRESSED"));
}
@Override
public void onPause() {
Log.d(TAG, "onPause");
super.onPause();
closeCamera();
stopBackgroundThread();
}
private BroadcastReceiver buttonReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
Log.d(TAG, "Got Button Press!");
try {
if (mIsRecordingVideo) {
i2cRequest(IndicatorControlReceiver.INDICATOR_OFF);
stopRecordingVideo();
} else {
i2cRequest(IndicatorControlReceiver.INDICATOR_ON);
startRecordingVideo();
}
} catch (Exception ex) {
Log.d(TAG, "ERROR BLAH CAMERA SUX");
}
}
};
private void openCamera() {
CameraManager camManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
Log.d(TAG, "Opening Camera");
try {
String camId = camManager.getCameraIdList()[0];
CameraCharacteristics cameraChars = camManager.getCameraCharacteristics(camId);
StreamConfigurationMap map = cameraChars.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
mPreviewSize = map.getOutputSizes(SurfaceTexture.class)[14];
camManager.openCamera(camId, cameraStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.e(TAG, "onSurfaceTextureAvailable, width="+width+",height="+height);
openCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
//Log.e(TAG, "onSurfaceTextureSizeChanged");
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
//Log.e(TAG, "onSurfaceTextureUpdated");
}
};
private CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
Log.d(TAG, "onOpened");
mCameraDevice = camera;
startPreview();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
Log.d(TAG, "onDisconnected");
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
Log.e(TAG, "onError code: " + error);
}
};
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.e(TAG, "Error Starting Preview. ABORTED!");
return;
}
SurfaceTexture texture = mTextureView.getSurfaceTexture();
if(null == texture) {
Log.e(TAG, "Cannot create texture. ABORTED!");
return;
}
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
try {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
} catch (CameraAccessException e) {
e.printStackTrace();
}
mPreviewBuilder.addTarget(surface);
try {
mCameraDevice.createCaptureSession(Collections.singletonList(surface), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
i2cRequest(I2CRequestReceiver.VIDEO_READY);
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Log.e(TAG, "onConfigureFailed");
i2cRequest(I2CRequestReceiver.E_CAMERA_ERROR);
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void updatePreview() {
if(null == mCameraDevice) {
Log.e(TAG, "Camera Device is Null! ABORT!");
return;
}
/* mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE,CameraMetadata.CONTROL_AF_MODE_OFF);*/
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(30000,30000));
thread = new HandlerThread("CameraPreview");
thread.start();
backgroundHandler = new Handler(thread.getLooper());
try {
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
thread.quitSafely();
try {
thread.join();
thread = null;
backgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void closePreviewSession() {
if (null != mPreviewSession) {
mPreviewSession.close();
}
}
private void closeCamera(){
closePreviewSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
}
private void setupMediaRecorder() throws IOException {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
/*
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
*/
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_720P);
profile.audioBitRate = 128000;
profile.audioCodec = MediaRecorder.AudioEncoder.AAC;
profile.fileFormat = MediaRecorder.OutputFormat.MPEG_4;
profile.videoCodec = MediaRecorder.VideoEncoder.H264;
profile.videoBitRate = 2048000;
profile.videoFrameRate = 30;
mMediaRecorder.setProfile(profile);
/* mMediaRecorder.setVideoEncodingBitRate(2048000);
mMediaRecorder.setAudioEncodingBitRate(128000);
mMediaRecorder.setVideoSize(1280, 720);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);*/
if (mVideoPath == null || mVideoPath.isEmpty()) {
mVideoPath = getVideoFilePath();
}
mMediaRecorder.setOutputFile(mVideoPath);
mMediaRecorder.prepare();
}
@SuppressLint("SdCardPath")
private String getVideoFilePath() {
return "/sdcard/LIVE/video/" + System.currentTimeMillis() + ".mp4";
}
Surface recorderSurface;
private void startRecordingVideo() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.e(TAG, "Cannot bind camera, textureView, or previewSize");
return;
}
try {
closePreviewSession();
setupMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture!= null;
texture.setDefaultBufferSize(1280, 720);
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
new Thread(new Runnable() {
@Override
public void run() {
i2cRequest(I2CRequestReceiver.VIDEO_RECORDING);
mIsRecordingVideo = true;
mMediaRecorder.start();
}
}).start();
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Log.e(TAG, "Capture failed!");
runOnUiThread(new Runnable() {
@Override
public void run() {
i2cRequest(I2CRequestReceiver.E_CAMERA_ERROR);
mIsRecordingVideo = false;
}
});
}
},backgroundHandler);
} catch (IOException | CameraAccessException e) {
e.printStackTrace();
}
}
private void stopRecordingVideo() {
try {
mPreviewSession.abortCaptures();
} catch (CameraAccessException e) {
e.printStackTrace();
}
mIsRecordingVideo = false;
mMediaRecorder.stop();
mMediaRecorder.reset();
Log.d(TAG, "Video saved: " + mVideoPath);
}
private void i2cRequest(String request) {
Intent sendI2cRequest = new Intent();
sendI2cRequest.setAction(I2CRequestReceiver.NOWSPEAK_REQUEST_ACTION);
switch (request) {
case I2CRequestReceiver.VIDEO_READY:
sendI2cRequest.putExtra(I2CRequestReceiver.EXTRA_SPEAK, I2CRequestReceiver.VIDEO_READY);
Log.d(TAG, "VIDEO READY!!");
break;
case I2CRequestReceiver.E_CAMERA_ERROR:
sendI2cRequest.putExtra(I2CRequestReceiver.EXTRA_SPEAK, I2CRequestReceiver.E_CAMERA_ERROR);
Log.d(TAG, "VIDEO ERROR!!");
break;
case IndicatorControlReceiver.INDICATOR_ON:
sendI2cRequest.setAction(IndicatorControlReceiver.INDICATOR_CONTROL_ACTION);
sendI2cRequest.putExtra(IndicatorControlReceiver.EXTRA_INDICATOR, IndicatorControlReceiver.INDICATOR_ON);
break;
case IndicatorControlReceiver.INDICATOR_OFF:
sendI2cRequest.setAction(IndicatorControlReceiver.INDICATOR_CONTROL_ACTION);
sendI2cRequest.putExtra(IndicatorControlReceiver.EXTRA_INDICATOR, IndicatorControlReceiver.INDICATOR_OFF);
break;
}
LocalBroadcastManager.getInstance(this).sendBroadcast(sendI2cRequest);
}
}
CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE 采用 FPS 为单位的帧速率,而不是 FPS 的 1/1000。
所以试试
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(30,30));
甚至更好,从 CameraCharacteristics.CONTROL_AVAILABLE_TARGET_FPS_RANGES
提供的列表中选择在与电路板制造商交谈后,似乎在实施方面存在一些问题,他们正在努力解决这些问题。
我要感谢@CommonsWare 和@EddyTalvala 帮助我在这里找到问题的专业知识。
-罗布