如何使用 RajawaliVR 或 Rajawali 播放 360 视频
How to use RajawaliVR or Rajawali to play a 360 Video
我很难弄清楚如何使用 Rajawali 播放 360 度全景视频。为了实现这一点,我尝试了所有我能在互联网上找到的解决方案,但都失败了。
首先,我使用了 RajawaliCardboard,并让 MainActivity 从 CardboardActivity
扩展。同时,在MyRenderer
class中,我让这个class从RajawaliCardboardRenderer
class延伸出来。在 MyRenderer
class 中,我覆盖了 initScene()
函数:
protected void initScene() {
StreamingTexture mTexture = null;
if (externalMemoryAvailable())
{
mVideoPath = Environment.getExternalStorageDirectory().getAbsolutePath()+"/testVideo.mp4";
try{
mPlayer = new MediaPlayer();
mPlayer.setDataSource(mVideoPath);
}catch(IllegalArgumentException e){
e.printStackTrace();
} catch (SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
mPlayer.prepare();
} catch (IOException t) {
t.printStackTrace();
}
mTexture = new StreamingTexture("video", mPlayer);
}
Sphere sphere = createPhotoSphereWithTexture(mTexture);
getCurrentScene().addChild(sphere);
getCurrentCamera().setPosition(Vector3.ZERO);
getCurrentCamera().setFieldOfView(75);
}
private Sphere createPhotoSphereWithTexture(ATexture texture) {
Material material = new Material();
material.setColor(0);
try {
material.addTexture(texture);
} catch (ATexture.TextureException e) {
throw new RuntimeException(e);
}
Sphere sphere = new Sphere(50, 64, 32);
sphere.setScaleX(-1);
sphere.setMaterial(material);
return sphere;
}
程序可以运行没有任何错误,但是黑屏没有图像。
我想问一下我应该如何改进我的程序,以及为什么我应该使用 Rajawali 来播放视频。谁能帮帮我?
我认为你的主要错误是在媒体播放器中调用 MediaPlayer.prepare()
而不是 MediaPlayer.prepareAsync()
您必须考虑 MediaPlayer 在播放视频时经历的不同状态。这里有一个 link 到 state diagram。您应该只在视频播放器准备好所有内容后才调用 MediaPlayer.start()
,以便视频开始播放。
我正在与 Rajawali 一起做同样的事情(360 度视频的视频播放器),到目前为止,我已经实现了在普通陀螺仪和触摸模式下重现它们,但我发现很多问题使其与 Google Cardboard 集成,所以我现在正在尝试制作自己的 "sideBySide" 渲染器。
如果我的评论还不够,这里有一个代码示例,我目前正在使用它来将视频重现为 Sphere 上的流式纹理。它是 class 上覆盖方法 initScene()
的一部分,它扩展了 RajawaliRenderer
//create a 100 segment sphere
earthSphere = new Sphere(1, 100, 100);
//try to set the mediaPLayer data source
mMediaPlayer = new MediaPlayer();
try{
mMediaPlayer.setDataSource(context, Uri.parse("android.resource://" + context.getPackageName() + "/" + R.raw.pyrex));
}catch(IOException ex){
Log.e("ERROR","couldn attach data source to the media player");
}
mMediaPlayer.setLooping(true); //enable video looping
video = new StreamingTexture("pyrex",mMediaPlayer); //create video texture
mMediaPlayer.prepareAsync(); //prepare the player (asynchronous)
mMediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
@Override
public void onPrepared(MediaPlayer mp) {
mp.start(); //start the player only when it is prepared
}
});
//add textture to a new material
Material material = new Material ();
material.setColorInfluence(0f);
try{
material.addTexture(video);
}catch(ATexture.TextureException ex){
Log.e("ERROR","texture error when adding video to material");
}
//set the material to the sphere
earthSphere.setMaterial(material);
earthSphere.setPosition(0, 0, 0);
//add the sphere to the rendering scene
getCurrentScene().addChild(earthSphere);
我成功地与 Rajawali 一起播放视频。
public class VideoRenderer extends RajawaliCardboardRenderer {
Context mContext;
private MediaPlayer mMediaPlayer;
private StreamingTexture mVideoTexture;
public VideoRenderer(Context context) {
super(context);
mContext = context;
}
@Override
protected void initScene() {
mMediaPlayer = MediaPlayer.create(getContext(),
R.raw.video);
mMediaPlayer.setLooping(true);
mVideoTexture = new StreamingTexture("sintelTrailer", mMediaPlayer);
Material material = new Material();
material.setColorInfluence(0);
try {
material.addTexture(mVideoTexture);
} catch (ATexture.TextureException e) {
e.printStackTrace();
}
Sphere sphere = new Sphere(50, 64, 32);
sphere.setScaleX(-1);
sphere.setMaterial(material);
getCurrentScene().addChild(sphere);
getCurrentCamera().setPosition(Vector3.ZERO);
getCurrentCamera().setFieldOfView(75);
mMediaPlayer.start();
}
@Override
protected void onRender(long ellapsedRealtime, double deltaTime) {
super.onRender(ellapsedRealtime, deltaTime);
mVideoTexture.update();
}
@Override
public void onPause() {
super.onPause();
if (mMediaPlayer != null)
mMediaPlayer.pause();
}
@Override
public void onResume() {
super.onResume();
if (mMediaPlayer != null)
mMediaPlayer.start();
}
@Override
public void onRenderSurfaceDestroyed(SurfaceTexture surfaceTexture) {
super.onRenderSurfaceDestroyed(surfaceTexture);
mMediaPlayer.stop();
mMediaPlayer.release();
}
public void nextVideo(String nextVideoPath){
try{
mMediaPlayer.stop();
mMediaPlayer.reset();
mMediaPlayer.setDataSource(nextVideoPath);
mMediaPlayer.prepare();
mMediaPlayer.start();
}catch (Exception e){
e.printStackTrace();
}
}
}
既然你想播放 360 度视频,你需要一些方向跟踪器。
这是硬纸板 activity.
的示例
public class CardboardRendererExample extends Renderer implements CardboardView.StereoRenderer {
public static final int FIELD_OF_VIEW = 90;
public static final float PLANE_WIDTH = 64.0f;
public static final float PLANE_HEIGHT = 36.0f;
public static final float PLANE_DISTANCE = -64.0f;
private final MediaPlayer mMediaPlayer;
protected StreamingTexture mStreamingTexture;
protected Quaternion mOrientation = Quaternion.getIdentity();
protected Quaternion mEyeOrientation = Quaternion.getIdentity();
protected float[] mHeadView = new float[16];
private Matrix4 mEyeMatrix = new Matrix4();
private Vector3 mEyePosition = new Vector3();
private Matrix4 mHeadViewMatrix4 = new Matrix4();
public CardboardRendererExample(Context context, MediaPlayer mediaPlayer) {
super(context);
mMediaPlayer = mediaPlayer;
}
@Override
protected void initScene() {
getCurrentCamera().setPosition(Vector3.ZERO);
getCurrentCamera().setFieldOfView(FIELD_OF_VIEW);
mStreamingTexture = new StreamingTexture("give_it_some_name", mMediaPlayer);
mStreamingTexture.shouldRecycle(true);
setSceneCachingEnabled(true);
final Plane projectionScreen = new Plane(PLANE_WIDTH, PLANE_HEIGHT, 64, 64);
final Material material = new Material();
material.setColor(0);
material.setColorInfluence(0f);
try {
material.addTexture(mStreamingTexture);
} catch (ATexture.TextureException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
projectionScreen.setDoubleSided(true);
projectionScreen.setMaterial(material);
projectionScreen.setTransparent(true);
projectionScreen.setPosition(0, 0, PLANE_DISTANCE);
getCurrentScene().addChild(projectionScreen);
getCurrentScene().addChild(projectionScreen);
}
@Override
public void onNewFrame(HeadTransform headTransform) {
headTransform.getHeadView(mHeadView, 0);
mHeadViewMatrix4.setAll(mHeadView).inverse();
mOrientation.fromMatrix(mHeadViewMatrix4);
}
@Override
public void onDrawEye(Eye eye) {
getCurrentCamera().updatePerspective(
eye.getFov().getLeft(),
eye.getFov().getRight(),
eye.getFov().getBottom(),
eye.getFov().getTop());
mEyeMatrix.setAll(eye.getEyeView());
mEyeOrientation.fromMatrix(mEyeMatrix);
getCurrentCamera().setOrientation(mEyeOrientation);
mEyePosition = mEyeMatrix.getTranslation(mEyePosition).inverse();
getCurrentCamera().setPosition(mEyePosition);
super.onRenderFrame(null);
}
@Override
public void onFinishFrame(Viewport viewport) {
}
@Override
public void onSurfaceChanged(int width, int height) {
super.onRenderSurfaceSizeChanged(null, width, height);
}
@Override
public void onSurfaceCreated(EGLConfig eglConfig) {
super.onRenderSurfaceCreated(eglConfig, null, -1, -1);
}
@Override
public void onRenderSurfaceCreated(EGLConfig config, GL10 gl, int width, int height) {
super.onRenderSurfaceCreated(config, gl, width, height);
}
@Override
public void onRendererShutdown() {
}
@Override
protected void onRender(long elapsedRealTime, double deltaTime) {
super.onRender(elapsedRealTime, deltaTime);
if (mStreamingTexture != null) {
mStreamingTexture.update();
}
}
@Override
public void onOffsetsChanged(float xOffset, float yOffset, float xOffsetStep, float yOffsetStep, int xPixelOffset, int yPixelOffset) {
}
@Override
public void onTouchEvent(MotionEvent event) {
}
}
或者,您可以基于(例如)
实施您的跟踪器
com.google.vrtoolkit.cardboard.sensors.HeadTracker
当然你可以摆脱所有这些领域,但他们应该让 GC 生活更轻松。
我很难弄清楚如何使用 Rajawali 播放 360 度全景视频。为了实现这一点,我尝试了所有我能在互联网上找到的解决方案,但都失败了。
首先,我使用了 RajawaliCardboard,并让 MainActivity 从 CardboardActivity
扩展。同时,在MyRenderer
class中,我让这个class从RajawaliCardboardRenderer
class延伸出来。在 MyRenderer
class 中,我覆盖了 initScene()
函数:
protected void initScene() {
StreamingTexture mTexture = null;
if (externalMemoryAvailable())
{
mVideoPath = Environment.getExternalStorageDirectory().getAbsolutePath()+"/testVideo.mp4";
try{
mPlayer = new MediaPlayer();
mPlayer.setDataSource(mVideoPath);
}catch(IllegalArgumentException e){
e.printStackTrace();
} catch (SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
mPlayer.prepare();
} catch (IOException t) {
t.printStackTrace();
}
mTexture = new StreamingTexture("video", mPlayer);
}
Sphere sphere = createPhotoSphereWithTexture(mTexture);
getCurrentScene().addChild(sphere);
getCurrentCamera().setPosition(Vector3.ZERO);
getCurrentCamera().setFieldOfView(75);
}
private Sphere createPhotoSphereWithTexture(ATexture texture) {
Material material = new Material();
material.setColor(0);
try {
material.addTexture(texture);
} catch (ATexture.TextureException e) {
throw new RuntimeException(e);
}
Sphere sphere = new Sphere(50, 64, 32);
sphere.setScaleX(-1);
sphere.setMaterial(material);
return sphere;
}
程序可以运行没有任何错误,但是黑屏没有图像。
我想问一下我应该如何改进我的程序,以及为什么我应该使用 Rajawali 来播放视频。谁能帮帮我?
我认为你的主要错误是在媒体播放器中调用 MediaPlayer.prepare()
而不是 MediaPlayer.prepareAsync()
您必须考虑 MediaPlayer 在播放视频时经历的不同状态。这里有一个 link 到 state diagram。您应该只在视频播放器准备好所有内容后才调用 MediaPlayer.start()
,以便视频开始播放。
我正在与 Rajawali 一起做同样的事情(360 度视频的视频播放器),到目前为止,我已经实现了在普通陀螺仪和触摸模式下重现它们,但我发现很多问题使其与 Google Cardboard 集成,所以我现在正在尝试制作自己的 "sideBySide" 渲染器。
如果我的评论还不够,这里有一个代码示例,我目前正在使用它来将视频重现为 Sphere 上的流式纹理。它是 class 上覆盖方法 initScene()
的一部分,它扩展了 RajawaliRenderer
//create a 100 segment sphere
earthSphere = new Sphere(1, 100, 100);
//try to set the mediaPLayer data source
mMediaPlayer = new MediaPlayer();
try{
mMediaPlayer.setDataSource(context, Uri.parse("android.resource://" + context.getPackageName() + "/" + R.raw.pyrex));
}catch(IOException ex){
Log.e("ERROR","couldn attach data source to the media player");
}
mMediaPlayer.setLooping(true); //enable video looping
video = new StreamingTexture("pyrex",mMediaPlayer); //create video texture
mMediaPlayer.prepareAsync(); //prepare the player (asynchronous)
mMediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
@Override
public void onPrepared(MediaPlayer mp) {
mp.start(); //start the player only when it is prepared
}
});
//add textture to a new material
Material material = new Material ();
material.setColorInfluence(0f);
try{
material.addTexture(video);
}catch(ATexture.TextureException ex){
Log.e("ERROR","texture error when adding video to material");
}
//set the material to the sphere
earthSphere.setMaterial(material);
earthSphere.setPosition(0, 0, 0);
//add the sphere to the rendering scene
getCurrentScene().addChild(earthSphere);
我成功地与 Rajawali 一起播放视频。
public class VideoRenderer extends RajawaliCardboardRenderer {
Context mContext;
private MediaPlayer mMediaPlayer;
private StreamingTexture mVideoTexture;
public VideoRenderer(Context context) {
super(context);
mContext = context;
}
@Override
protected void initScene() {
mMediaPlayer = MediaPlayer.create(getContext(),
R.raw.video);
mMediaPlayer.setLooping(true);
mVideoTexture = new StreamingTexture("sintelTrailer", mMediaPlayer);
Material material = new Material();
material.setColorInfluence(0);
try {
material.addTexture(mVideoTexture);
} catch (ATexture.TextureException e) {
e.printStackTrace();
}
Sphere sphere = new Sphere(50, 64, 32);
sphere.setScaleX(-1);
sphere.setMaterial(material);
getCurrentScene().addChild(sphere);
getCurrentCamera().setPosition(Vector3.ZERO);
getCurrentCamera().setFieldOfView(75);
mMediaPlayer.start();
}
@Override
protected void onRender(long ellapsedRealtime, double deltaTime) {
super.onRender(ellapsedRealtime, deltaTime);
mVideoTexture.update();
}
@Override
public void onPause() {
super.onPause();
if (mMediaPlayer != null)
mMediaPlayer.pause();
}
@Override
public void onResume() {
super.onResume();
if (mMediaPlayer != null)
mMediaPlayer.start();
}
@Override
public void onRenderSurfaceDestroyed(SurfaceTexture surfaceTexture) {
super.onRenderSurfaceDestroyed(surfaceTexture);
mMediaPlayer.stop();
mMediaPlayer.release();
}
public void nextVideo(String nextVideoPath){
try{
mMediaPlayer.stop();
mMediaPlayer.reset();
mMediaPlayer.setDataSource(nextVideoPath);
mMediaPlayer.prepare();
mMediaPlayer.start();
}catch (Exception e){
e.printStackTrace();
}
}
}
既然你想播放 360 度视频,你需要一些方向跟踪器。 这是硬纸板 activity.
的示例public class CardboardRendererExample extends Renderer implements CardboardView.StereoRenderer {
public static final int FIELD_OF_VIEW = 90;
public static final float PLANE_WIDTH = 64.0f;
public static final float PLANE_HEIGHT = 36.0f;
public static final float PLANE_DISTANCE = -64.0f;
private final MediaPlayer mMediaPlayer;
protected StreamingTexture mStreamingTexture;
protected Quaternion mOrientation = Quaternion.getIdentity();
protected Quaternion mEyeOrientation = Quaternion.getIdentity();
protected float[] mHeadView = new float[16];
private Matrix4 mEyeMatrix = new Matrix4();
private Vector3 mEyePosition = new Vector3();
private Matrix4 mHeadViewMatrix4 = new Matrix4();
public CardboardRendererExample(Context context, MediaPlayer mediaPlayer) {
super(context);
mMediaPlayer = mediaPlayer;
}
@Override
protected void initScene() {
getCurrentCamera().setPosition(Vector3.ZERO);
getCurrentCamera().setFieldOfView(FIELD_OF_VIEW);
mStreamingTexture = new StreamingTexture("give_it_some_name", mMediaPlayer);
mStreamingTexture.shouldRecycle(true);
setSceneCachingEnabled(true);
final Plane projectionScreen = new Plane(PLANE_WIDTH, PLANE_HEIGHT, 64, 64);
final Material material = new Material();
material.setColor(0);
material.setColorInfluence(0f);
try {
material.addTexture(mStreamingTexture);
} catch (ATexture.TextureException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
projectionScreen.setDoubleSided(true);
projectionScreen.setMaterial(material);
projectionScreen.setTransparent(true);
projectionScreen.setPosition(0, 0, PLANE_DISTANCE);
getCurrentScene().addChild(projectionScreen);
getCurrentScene().addChild(projectionScreen);
}
@Override
public void onNewFrame(HeadTransform headTransform) {
headTransform.getHeadView(mHeadView, 0);
mHeadViewMatrix4.setAll(mHeadView).inverse();
mOrientation.fromMatrix(mHeadViewMatrix4);
}
@Override
public void onDrawEye(Eye eye) {
getCurrentCamera().updatePerspective(
eye.getFov().getLeft(),
eye.getFov().getRight(),
eye.getFov().getBottom(),
eye.getFov().getTop());
mEyeMatrix.setAll(eye.getEyeView());
mEyeOrientation.fromMatrix(mEyeMatrix);
getCurrentCamera().setOrientation(mEyeOrientation);
mEyePosition = mEyeMatrix.getTranslation(mEyePosition).inverse();
getCurrentCamera().setPosition(mEyePosition);
super.onRenderFrame(null);
}
@Override
public void onFinishFrame(Viewport viewport) {
}
@Override
public void onSurfaceChanged(int width, int height) {
super.onRenderSurfaceSizeChanged(null, width, height);
}
@Override
public void onSurfaceCreated(EGLConfig eglConfig) {
super.onRenderSurfaceCreated(eglConfig, null, -1, -1);
}
@Override
public void onRenderSurfaceCreated(EGLConfig config, GL10 gl, int width, int height) {
super.onRenderSurfaceCreated(config, gl, width, height);
}
@Override
public void onRendererShutdown() {
}
@Override
protected void onRender(long elapsedRealTime, double deltaTime) {
super.onRender(elapsedRealTime, deltaTime);
if (mStreamingTexture != null) {
mStreamingTexture.update();
}
}
@Override
public void onOffsetsChanged(float xOffset, float yOffset, float xOffsetStep, float yOffsetStep, int xPixelOffset, int yPixelOffset) {
}
@Override
public void onTouchEvent(MotionEvent event) {
}
}
或者,您可以基于(例如)
实施您的跟踪器com.google.vrtoolkit.cardboard.sensors.HeadTracker
当然你可以摆脱所有这些领域,但他们应该让 GC 生活更轻松。