必须在调用 startCapture 之前初始化 CameraCapturer
CameraCapturer must be initialized before calling startCapture
在 Android 中实施 WebRTC 时遇到此问题:
Caused by: java.lang.RuntimeException: CameraCapturer must be initialized before calling startCapture.
build.gradle(:app)
dependencies {
......
implementation 'org.webrtc:google-webrtc:1.0.+'
......
}
// 导致问题的块:
private void getVideoSource() {
// isScreenCast = false
videoSource = peerConnectionFactory.createVideoSource(false);
surfaceTextureHelper = SurfaceTextureHelper.create(Thread.currentThread().getName(), rootEglBase.getEglBaseContext());
VideoCapturer videoCapturer = createCameraCapturer(new Camera1Enumerator(false));
localVideoTrack = peerConnectionFactory.createVideoTrack("200", videoSource);
localVideoTrack.addSink(local_renderer);
if(videoCapturer != null)
videoCapturer.startCapture(1000,1000,30); // <- Here is the Exception
}
CameraCapturer 已弃用。 Camera1Capturer 现在可用。
You need to initialise before you use it
private void getVideoSource() {
VideoCapturer videoCapturer = createVideoCapturer();
VideoSource videoSource;
//Create a VideoSource instance
if (videoCapturer != null) {
SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", rootEglBase.getEglBaseContext());
videoSource = factory.createVideoSource(videoCapturer.isScreencast());
videoCapturer.initialize(surfaceTextureHelper, this, videoSource.getCapturerObserver());
}
localVideoTrack = factory.createVideoTrack("100", videoSource);
//Create MediaConstraints - Will be useful for specifying video and audio constraints.
audioConstraints = new MediaConstraints();
videoConstraints = new MediaConstraints();
//create an AudioSource instance
audioSource = factory.createAudioSource(audioConstraints);
localAudioTrack = factory.createAudioTrack("101", audioSource);
if (videoCapturer != null) {
videoCapturer.startCapture(1024, 720, 30);
}
binding.localGlSurfaceView.setVisibility(View.VISIBLE);
// And finally, with our VideoRenderer ready, we
// can add our renderer to the VideoTrack.
localVideoTrack.addSink(binding.localGlSurfaceView);
}
在 Android 中实施 WebRTC 时遇到此问题:
Caused by: java.lang.RuntimeException: CameraCapturer must be initialized before calling startCapture.
build.gradle(:app)
dependencies {
......
implementation 'org.webrtc:google-webrtc:1.0.+'
......
}
// 导致问题的块:
private void getVideoSource() {
// isScreenCast = false
videoSource = peerConnectionFactory.createVideoSource(false);
surfaceTextureHelper = SurfaceTextureHelper.create(Thread.currentThread().getName(), rootEglBase.getEglBaseContext());
VideoCapturer videoCapturer = createCameraCapturer(new Camera1Enumerator(false));
localVideoTrack = peerConnectionFactory.createVideoTrack("200", videoSource);
localVideoTrack.addSink(local_renderer);
if(videoCapturer != null)
videoCapturer.startCapture(1000,1000,30); // <- Here is the Exception
}
CameraCapturer 已弃用。 Camera1Capturer 现在可用。
You need to initialise before you use it
private void getVideoSource() {
VideoCapturer videoCapturer = createVideoCapturer();
VideoSource videoSource;
//Create a VideoSource instance
if (videoCapturer != null) {
SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", rootEglBase.getEglBaseContext());
videoSource = factory.createVideoSource(videoCapturer.isScreencast());
videoCapturer.initialize(surfaceTextureHelper, this, videoSource.getCapturerObserver());
}
localVideoTrack = factory.createVideoTrack("100", videoSource);
//Create MediaConstraints - Will be useful for specifying video and audio constraints.
audioConstraints = new MediaConstraints();
videoConstraints = new MediaConstraints();
//create an AudioSource instance
audioSource = factory.createAudioSource(audioConstraints);
localAudioTrack = factory.createAudioTrack("101", audioSource);
if (videoCapturer != null) {
videoCapturer.startCapture(1024, 720, 30);
}
binding.localGlSurfaceView.setVisibility(View.VISIBLE);
// And finally, with our VideoRenderer ready, we
// can add our renderer to the VideoTrack.
localVideoTrack.addSink(binding.localGlSurfaceView);
}