Libstreaming:我们可以在 surfaceview 上执行放大和缩小相机以及相机预览中的触摸对焦圈和闪光灯吗?
Libstreaming: Can we perform zoom in and out in camera on surfaceview as well as on-touch Focus circle and Flash in camera preview?
我正在使用 libstreaming 流媒体库开发 android 应用程序。该应用程序在 Wowza 上向上游发送(移动到 Wowza)。我创建了一个 surfaceview,它具有 相机预览 。它工作正常,但我想添加三个功能(Zoom in/out、Autofous 和 Flash)。
我不知道是否可以使用 Libstreaming。
我用过的SurfaceView属于
包裹 net.majorkernelpanic.streaming.gl.SurfaceView
.
下面是我的 Activity 代码:
public class LiveStreamingActivity extends Activity implements RtspClient.Callback, Session.Callback, SurfaceHolder.Callback {
private static SurfaceView mSurfaceView;
private SurfaceHolder mHolder;
private Session mSession;// Rtsp session
private static RtspClient mClient;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_main);
if (!LibsChecker.checkVitamioLibs(this))
return;
mSurfaceView = (SurfaceView) findViewById(R.id.surface_view);
mHolder = mSurfaceView.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
@SuppressWarnings("deprecation")
private void initRtspClient() {
// Configures the SessionBuilder
mSession = SessionBuilder
.getInstance()
.setContext(getApplicationContext())
.setAudioEncoder(SessionBuilder.AUDIO_AAC)
.setAudioQuality(new AudioQuality(8000, 16000))
.setVideoEncoder(SessionBuilder.VIDEO_H264)
//.setVideoQuality(new VideoQuality(352, 288, 30, 300000))
.setCamera(CameraInfo.CAMERA_FACING_BACK)
.setSurfaceView(mSurfaceView).setPreviewOrientation(0)
.setCallback(this).build();
mClient = new RtspClient();
mClient.setSession(mSession);
mClient.setCallback(this);
mClient.setTransportMode(RtspClient.TRANSPORT_TCP);
mSurfaceView.setAspectRatioMode(SurfaceView.ASPECT_RATIO_PREVIEW);
String ip, port, path;
Pattern uri = Pattern.compile("rtsp://(.+):(\d+)/(.+)");
Matcher m = uri.matcher("rtsp://219.65.90.226:1935/app2/myStream");
m.find();
ip = m.group(1);
port = m.group(2);
path = m.group(3);
mClient.setCredentials(AppConfig.PUBLISHER_USERNAME,
AppConfig.PUBLISHER_PASSWORD);
mClient.setServerAddress(ip, Integer.parseInt(port));
mClient.setStreamPath("/" + path);
}
@Override
protected void onResume() {System.out.println("on Resume activity 2");
super.onResume();
try{
if(null != mSurfaceView){
/* Broadcastreceiver: check network connectivity */
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction("android.net.conn.CONNECTIVITY_CHANGE");
registerReceiver(receiver, intentFilter);
/* Start audio streaming background thread: AsyncTask */
vmPlayer = null;
vmPlayer = new MediaPlayer(this);
audioStream= new AudioStreamTask(this);
audioStream.execute("push","push","push");
}
}catch(Exception ex){
ex.printStackTrace();
}
}
@Override
protected void onPause() {
super.onPause();
try{
/* release the surface view */
if(null != mSurfaceView){
mClient.release();
mSession.release();
mSurfaceView.getHolder().removeCallback(this);
}
}catch(Exception ex){
ex.printStackTrace();
}
}
@Override
public void onDestroy() {
try {
super.onDestroy();
if (mClient != null) {
mClient.release();
}
if (mSession != null) {
mSession.release();
}
mSurfaceView.getHolder().removeCallback(this);
} catch (Exception e) {
System.out.println("Error while destroying activity " + e);
}
}
private void toggleStreaming() {
if (!mClient.isStreaming()) {
// Start camera preview
mSession.startPreview();
// mFrontSession.startPreview();
// Start video stream
mClient.startStream();
//startRtmpStream();
} else {
// already streaming, stop streaming
// stop camera preview
mSession.stopPreview();
// mFrontSession.stopPreview();
// stop streaming
mClient.stopStream();
}
}}
activity_main.xml
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/surface_layout"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@android:color/black"
android:orientation="vertical" >
<LinearLayout
android:id="@+id/surface_view_layout"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1"
android:orientation="vertical" >
<net.majorkernelpanic.streaming.gl.SurfaceView
android:id="@+id/surface_view"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_gravity="center" />
</LinearLayout></FrameLayout>
我需要完整的描述来添加所有这三个相机功能。
我做到了! :)
转到 VideoStream.Java 并更改:
protected Camera mCamera 到 public static Camera mCamera.
转到您的 MainActivity,在您的情况下是 LiveStreamingActivity 并粘贴:
private float mDist;
@Override
public boolean onTouchEvent(MotionEvent event) {
// Get the pointer ID
Camera.Parameters params = VideoStream.mCamera.getParameters();
int action = event.getAction();
if (event.getPointerCount() > 1) {
// handle multi-touch events
if (action == MotionEvent.ACTION_POINTER_DOWN) {
mDist = getFingerSpacing(event);
} else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) {
VideoStream.mCamera.cancelAutoFocus();
handleZoom(event, params);
}
} else {
// handle single touch events
if (action == MotionEvent.ACTION_UP) {
handleFocus(event, params);
}
}
return true;
}
private void handleZoom(MotionEvent event, Camera.Parameters params) {
int maxZoom = params.getMaxZoom();
int zoom = params.getZoom();
float newDist = getFingerSpacing(event);
if (newDist > mDist) {
//zoom in
if (zoom < maxZoom)
zoom++;
} else if (newDist < mDist) {
//zoom out
if (zoom > 0)
zoom--;
}
mDist = newDist;
params.setZoom(zoom);
VideoStream.mCamera.setParameters(params);
}
public void handleFocus(MotionEvent event, Camera.Parameters params) {
int pointerId = event.getPointerId(0);
int pointerIndex = event.findPointerIndex(pointerId);
// Get the pointer's current position
float x = event.getX(pointerIndex);
float y = event.getY(pointerIndex);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
VideoStream.mCamera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean b, Camera camera) {
// currently set to auto-focus on single touch
}
});
}
}
/**
* Determine the space between the first two fingers
*/
private float getFingerSpacing(MotionEvent event) {
// ...
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return FloatMath.sqrt(x * x + y * y);
}
基于here.
如果有帮助,请告诉我!
谢谢@José Cunha Fonte,你的代码很棒!
对我来说(使用 Marshmallow SDK)return FloatMath.sqrt(x * x + y * y);
已弃用并消失,所以我只更改为 return (float)Math.sqrt(x * x + y * y);
希望对大家有所帮助:)
我正在使用 libstreaming 流媒体库开发 android 应用程序。该应用程序在 Wowza 上向上游发送(移动到 Wowza)。我创建了一个 surfaceview,它具有 相机预览 。它工作正常,但我想添加三个功能(Zoom in/out、Autofous 和 Flash)。
我不知道是否可以使用 Libstreaming。
我用过的SurfaceView属于
包裹 net.majorkernelpanic.streaming.gl.SurfaceView
.
下面是我的 Activity 代码:
public class LiveStreamingActivity extends Activity implements RtspClient.Callback, Session.Callback, SurfaceHolder.Callback {
private static SurfaceView mSurfaceView;
private SurfaceHolder mHolder;
private Session mSession;// Rtsp session
private static RtspClient mClient;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_main);
if (!LibsChecker.checkVitamioLibs(this))
return;
mSurfaceView = (SurfaceView) findViewById(R.id.surface_view);
mHolder = mSurfaceView.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
@SuppressWarnings("deprecation")
private void initRtspClient() {
// Configures the SessionBuilder
mSession = SessionBuilder
.getInstance()
.setContext(getApplicationContext())
.setAudioEncoder(SessionBuilder.AUDIO_AAC)
.setAudioQuality(new AudioQuality(8000, 16000))
.setVideoEncoder(SessionBuilder.VIDEO_H264)
//.setVideoQuality(new VideoQuality(352, 288, 30, 300000))
.setCamera(CameraInfo.CAMERA_FACING_BACK)
.setSurfaceView(mSurfaceView).setPreviewOrientation(0)
.setCallback(this).build();
mClient = new RtspClient();
mClient.setSession(mSession);
mClient.setCallback(this);
mClient.setTransportMode(RtspClient.TRANSPORT_TCP);
mSurfaceView.setAspectRatioMode(SurfaceView.ASPECT_RATIO_PREVIEW);
String ip, port, path;
Pattern uri = Pattern.compile("rtsp://(.+):(\d+)/(.+)");
Matcher m = uri.matcher("rtsp://219.65.90.226:1935/app2/myStream");
m.find();
ip = m.group(1);
port = m.group(2);
path = m.group(3);
mClient.setCredentials(AppConfig.PUBLISHER_USERNAME,
AppConfig.PUBLISHER_PASSWORD);
mClient.setServerAddress(ip, Integer.parseInt(port));
mClient.setStreamPath("/" + path);
}
@Override
protected void onResume() {System.out.println("on Resume activity 2");
super.onResume();
try{
if(null != mSurfaceView){
/* Broadcastreceiver: check network connectivity */
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction("android.net.conn.CONNECTIVITY_CHANGE");
registerReceiver(receiver, intentFilter);
/* Start audio streaming background thread: AsyncTask */
vmPlayer = null;
vmPlayer = new MediaPlayer(this);
audioStream= new AudioStreamTask(this);
audioStream.execute("push","push","push");
}
}catch(Exception ex){
ex.printStackTrace();
}
}
@Override
protected void onPause() {
super.onPause();
try{
/* release the surface view */
if(null != mSurfaceView){
mClient.release();
mSession.release();
mSurfaceView.getHolder().removeCallback(this);
}
}catch(Exception ex){
ex.printStackTrace();
}
}
@Override
public void onDestroy() {
try {
super.onDestroy();
if (mClient != null) {
mClient.release();
}
if (mSession != null) {
mSession.release();
}
mSurfaceView.getHolder().removeCallback(this);
} catch (Exception e) {
System.out.println("Error while destroying activity " + e);
}
}
private void toggleStreaming() {
if (!mClient.isStreaming()) {
// Start camera preview
mSession.startPreview();
// mFrontSession.startPreview();
// Start video stream
mClient.startStream();
//startRtmpStream();
} else {
// already streaming, stop streaming
// stop camera preview
mSession.stopPreview();
// mFrontSession.stopPreview();
// stop streaming
mClient.stopStream();
}
}}
activity_main.xml
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/surface_layout"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@android:color/black"
android:orientation="vertical" >
<LinearLayout
android:id="@+id/surface_view_layout"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1"
android:orientation="vertical" >
<net.majorkernelpanic.streaming.gl.SurfaceView
android:id="@+id/surface_view"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_gravity="center" />
</LinearLayout></FrameLayout>
我需要完整的描述来添加所有这三个相机功能。
我做到了! :)
转到 VideoStream.Java 并更改: protected Camera mCamera 到 public static Camera mCamera.
转到您的 MainActivity,在您的情况下是 LiveStreamingActivity 并粘贴:
private float mDist;
@Override
public boolean onTouchEvent(MotionEvent event) {
// Get the pointer ID
Camera.Parameters params = VideoStream.mCamera.getParameters();
int action = event.getAction();
if (event.getPointerCount() > 1) {
// handle multi-touch events
if (action == MotionEvent.ACTION_POINTER_DOWN) {
mDist = getFingerSpacing(event);
} else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) {
VideoStream.mCamera.cancelAutoFocus();
handleZoom(event, params);
}
} else {
// handle single touch events
if (action == MotionEvent.ACTION_UP) {
handleFocus(event, params);
}
}
return true;
}
private void handleZoom(MotionEvent event, Camera.Parameters params) {
int maxZoom = params.getMaxZoom();
int zoom = params.getZoom();
float newDist = getFingerSpacing(event);
if (newDist > mDist) {
//zoom in
if (zoom < maxZoom)
zoom++;
} else if (newDist < mDist) {
//zoom out
if (zoom > 0)
zoom--;
}
mDist = newDist;
params.setZoom(zoom);
VideoStream.mCamera.setParameters(params);
}
public void handleFocus(MotionEvent event, Camera.Parameters params) {
int pointerId = event.getPointerId(0);
int pointerIndex = event.findPointerIndex(pointerId);
// Get the pointer's current position
float x = event.getX(pointerIndex);
float y = event.getY(pointerIndex);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
VideoStream.mCamera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean b, Camera camera) {
// currently set to auto-focus on single touch
}
});
}
}
/**
* Determine the space between the first two fingers
*/
private float getFingerSpacing(MotionEvent event) {
// ...
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return FloatMath.sqrt(x * x + y * y);
}
基于here.
如果有帮助,请告诉我!
谢谢@José Cunha Fonte,你的代码很棒!
对我来说(使用 Marshmallow SDK)return FloatMath.sqrt(x * x + y * y);
已弃用并消失,所以我只更改为 return (float)Math.sqrt(x * x + y * y);
希望对大家有所帮助:)