libgdx: 如何通过另一个正交相机得到一个正交相机的结果?
libgdx: How do i get the result of an orthographic camera through another orthographic camera?
我想在我的舞台上放一个电视屏幕。我有一个包含电视屏幕和一些其他东西(一些精灵)的主场景以及与我要在电视中渲染的内容相对应的场景。
所以我想通过 OrthographicCamera 渲染电视场景,然后将结果放入类似于 sprite 的东西中,然后可以通过主正交相机渲染,该怎么做?
我知道可以在多个视口中使用多个摄像头,但这些是直接在我的计算机屏幕的像素上渲染的。
如有任何建议,我们将不胜感激
这可以通过从屏幕摄像机的角度将场景渲染到 FrameBuffer
,然后从 FrameBuffer
中获取 TextureRegion
并将其渲染到屏幕,然后再次渲染场景。
例如:
在上面的例子中,青色矩形显示了场景摄像机看到的内容,并且渲染到灰色框,可以独立移动场景摄像机和主摄像机。
通过从 FrameBuffer
创建并获取 TextureRegion
screenFrameBuffer = new FrameBuffer(Pixmap.Format.RGBA8888, Gdx.graphics.getWidth(), Gdx.graphics.getHeight(),true);
screenTexture = new TextureRegion(screenFrameBuffer.getColorBufferTexture());
screenTexture.flip(false, true);
通过调用 screenFrameBuffer.begin()
,可以使所有渲染调用仅影响 screenFrameBuffer
,在调用 screenFrameBuffer.end()
之后,下一个绘制调用将再次影响实际的 screen/window .
所以在 render
方法中,您可以先将场景绘制到 FrameBuffer
screenFrameBuffer.begin();
renderScene(screenCamera, Color.DARK_GRAY);
screenFrameBuffer.end();
然后再画一遍,接着把“screen”作为精灵:
renderScene(sceneCamera, Color.BLACK);
batch.setProjectionMatrix(sceneCamera.combined);
batch.begin();
batch.draw(screenTexture, -sceneCamera.viewportWidth / 2.0f,-sceneCamera.viewportHeight / 2.0f,0,0,screenCamera.viewportWidth,screenCamera.viewportHeight,1.0f, 1.0f, 0.0f);
batch.end();
上面gif的完整源代码是:
package com.bornander.sandbox;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.graphics.*;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.graphics.glutils.FrameBuffer;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.math.RandomXS128;
import com.badlogic.gdx.math.Vector2;
public class MyGdxGame implements ApplicationListener {
public static class Ball {
public Vector2 position = new Vector2();
public Vector2 velocity = new Vector2();
public float size = 1.0f;
public Color color = new Color();
public void render(ShapeRenderer shapeRenderer) {
shapeRenderer.setColor(color);
shapeRenderer.circle(position.x, position.y, size, 16);
}
public void update() {
position.x += velocity.x * Gdx.graphics.getDeltaTime();
position.y += velocity.y * Gdx.graphics.getDeltaTime();
}
}
static RandomXS128 rnd = new RandomXS128();
OrthographicCamera sceneCamera;
OrthographicCamera screenCamera;
ShapeRenderer shapeRenderer;
SpriteBatch batch;
FrameBuffer screenFrameBuffer;
TextureRegion screenTexture;
Ball[] balls;
private static float rnd(float min, float max) {
return min + rnd.nextFloat() * (max - min);
}
@Override
public void create() {
float aspectRatio = (float) Gdx.graphics.getWidth() / (float)Gdx.graphics.getHeight();
sceneCamera = new OrthographicCamera(100.0f, 100.0f / aspectRatio);
screenCamera = new OrthographicCamera(32.0f, 32.0f / aspectRatio);
batch = new SpriteBatch();
screenFrameBuffer = new FrameBuffer(Pixmap.Format.RGBA8888, Gdx.graphics.getWidth(), Gdx.graphics.getHeight(),true);
screenTexture = new TextureRegion(screenFrameBuffer.getColorBufferTexture());
screenTexture.flip(false, true);
shapeRenderer = new ShapeRenderer();
balls = new Ball[128];
for(int i = 0; i < balls.length; ++i) {
balls[i] = new Ball();
balls[i].position.set(0, 0);
balls[i].velocity.set(rnd(-4, 4), rnd(-4, 4));
balls[i].size = rnd(1, 1);
balls[i].color.set(rnd(0.5f, 1.0f), rnd(0.5f, 1.0f), rnd(0.5f, 1.0f), 1.0f);
}
}
private void renderScene(Camera camera, Color background) {
camera.update();
Gdx.gl.glViewport(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
Gdx.gl.glClearColor(background.r, background.g, background.b, background.a);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
shapeRenderer.setProjectionMatrix(camera.combined);
shapeRenderer.begin(ShapeRenderer.ShapeType.Filled);
for(int i = 0; i < balls.length; ++i) {
balls[i].render(shapeRenderer);
}
shapeRenderer.end();
}
@Override
public void render() {
float cs = 8.0f;
for(int i = 0; i < balls.length; ++i)
balls[i].update();
if (Gdx.input.isKeyPressed(Input.Keys.LEFT))
sceneCamera.position.x -= cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.RIGHT))
sceneCamera.position.x += cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.UP))
sceneCamera.position.y -= cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.DOWN))
sceneCamera.position.y += cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.A))
screenCamera.position.x -= cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.D))
screenCamera.position.x += cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.W))
screenCamera.position.y -= cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.S))
screenCamera.position.y += cs * Gdx.graphics.getDeltaTime();
// Render to framebuffer, clear the background to DARK_GRAY
screenFrameBuffer.begin();
renderScene(screenCamera, Color.DARK_GRAY);
screenFrameBuffer.end();
// Render to window/screen, clear backgroun to BLACK
renderScene(sceneCamera, Color.BLACK);
// Draw the framebuffer's texture as a sprite using a normal SpriteBatch
batch.setProjectionMatrix(sceneCamera.combined);
batch.begin();
batch.draw(screenTexture, -sceneCamera.viewportWidth / 2.0f,-sceneCamera.viewportHeight / 2.0f,0,0,screenCamera.viewportWidth,screenCamera.viewportHeight,1.0f, 1.0f, 0.0f);
batch.end();
// This just draws the outline of what the screen camera looks at
shapeRenderer.setProjectionMatrix(sceneCamera.combined);
shapeRenderer.begin(ShapeRenderer.ShapeType.Line);
shapeRenderer.setColor(Color.CYAN);
shapeRenderer.rect(
screenCamera.position.x - screenCamera.viewportWidth / 2.0f,
screenCamera.position.y - screenCamera.viewportHeight / 2.0f,
screenCamera.viewportWidth,
screenCamera.viewportHeight
);
shapeRenderer.end();
}
@Override
public void dispose() {
}
@Override
public void resize(int width, int height) {
}
@Override
public void pause() {
}
@Override
public void resume() {
}
}
我想在我的舞台上放一个电视屏幕。我有一个包含电视屏幕和一些其他东西(一些精灵)的主场景以及与我要在电视中渲染的内容相对应的场景。
所以我想通过 OrthographicCamera 渲染电视场景,然后将结果放入类似于 sprite 的东西中,然后可以通过主正交相机渲染,该怎么做?
我知道可以在多个视口中使用多个摄像头,但这些是直接在我的计算机屏幕的像素上渲染的。
如有任何建议,我们将不胜感激
这可以通过从屏幕摄像机的角度将场景渲染到 FrameBuffer
,然后从 FrameBuffer
中获取 TextureRegion
并将其渲染到屏幕,然后再次渲染场景。
例如:
在上面的例子中,青色矩形显示了场景摄像机看到的内容,并且渲染到灰色框,可以独立移动场景摄像机和主摄像机。
通过从 FrameBuffer
TextureRegion
screenFrameBuffer = new FrameBuffer(Pixmap.Format.RGBA8888, Gdx.graphics.getWidth(), Gdx.graphics.getHeight(),true);
screenTexture = new TextureRegion(screenFrameBuffer.getColorBufferTexture());
screenTexture.flip(false, true);
通过调用 screenFrameBuffer.begin()
,可以使所有渲染调用仅影响 screenFrameBuffer
,在调用 screenFrameBuffer.end()
之后,下一个绘制调用将再次影响实际的 screen/window .
所以在 render
方法中,您可以先将场景绘制到 FrameBuffer
screenFrameBuffer.begin();
renderScene(screenCamera, Color.DARK_GRAY);
screenFrameBuffer.end();
然后再画一遍,接着把“screen”作为精灵:
renderScene(sceneCamera, Color.BLACK);
batch.setProjectionMatrix(sceneCamera.combined);
batch.begin();
batch.draw(screenTexture, -sceneCamera.viewportWidth / 2.0f,-sceneCamera.viewportHeight / 2.0f,0,0,screenCamera.viewportWidth,screenCamera.viewportHeight,1.0f, 1.0f, 0.0f);
batch.end();
上面gif的完整源代码是:
package com.bornander.sandbox;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.graphics.*;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.graphics.glutils.FrameBuffer;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.math.RandomXS128;
import com.badlogic.gdx.math.Vector2;
public class MyGdxGame implements ApplicationListener {
public static class Ball {
public Vector2 position = new Vector2();
public Vector2 velocity = new Vector2();
public float size = 1.0f;
public Color color = new Color();
public void render(ShapeRenderer shapeRenderer) {
shapeRenderer.setColor(color);
shapeRenderer.circle(position.x, position.y, size, 16);
}
public void update() {
position.x += velocity.x * Gdx.graphics.getDeltaTime();
position.y += velocity.y * Gdx.graphics.getDeltaTime();
}
}
static RandomXS128 rnd = new RandomXS128();
OrthographicCamera sceneCamera;
OrthographicCamera screenCamera;
ShapeRenderer shapeRenderer;
SpriteBatch batch;
FrameBuffer screenFrameBuffer;
TextureRegion screenTexture;
Ball[] balls;
private static float rnd(float min, float max) {
return min + rnd.nextFloat() * (max - min);
}
@Override
public void create() {
float aspectRatio = (float) Gdx.graphics.getWidth() / (float)Gdx.graphics.getHeight();
sceneCamera = new OrthographicCamera(100.0f, 100.0f / aspectRatio);
screenCamera = new OrthographicCamera(32.0f, 32.0f / aspectRatio);
batch = new SpriteBatch();
screenFrameBuffer = new FrameBuffer(Pixmap.Format.RGBA8888, Gdx.graphics.getWidth(), Gdx.graphics.getHeight(),true);
screenTexture = new TextureRegion(screenFrameBuffer.getColorBufferTexture());
screenTexture.flip(false, true);
shapeRenderer = new ShapeRenderer();
balls = new Ball[128];
for(int i = 0; i < balls.length; ++i) {
balls[i] = new Ball();
balls[i].position.set(0, 0);
balls[i].velocity.set(rnd(-4, 4), rnd(-4, 4));
balls[i].size = rnd(1, 1);
balls[i].color.set(rnd(0.5f, 1.0f), rnd(0.5f, 1.0f), rnd(0.5f, 1.0f), 1.0f);
}
}
private void renderScene(Camera camera, Color background) {
camera.update();
Gdx.gl.glViewport(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
Gdx.gl.glClearColor(background.r, background.g, background.b, background.a);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
shapeRenderer.setProjectionMatrix(camera.combined);
shapeRenderer.begin(ShapeRenderer.ShapeType.Filled);
for(int i = 0; i < balls.length; ++i) {
balls[i].render(shapeRenderer);
}
shapeRenderer.end();
}
@Override
public void render() {
float cs = 8.0f;
for(int i = 0; i < balls.length; ++i)
balls[i].update();
if (Gdx.input.isKeyPressed(Input.Keys.LEFT))
sceneCamera.position.x -= cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.RIGHT))
sceneCamera.position.x += cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.UP))
sceneCamera.position.y -= cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.DOWN))
sceneCamera.position.y += cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.A))
screenCamera.position.x -= cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.D))
screenCamera.position.x += cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.W))
screenCamera.position.y -= cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.S))
screenCamera.position.y += cs * Gdx.graphics.getDeltaTime();
// Render to framebuffer, clear the background to DARK_GRAY
screenFrameBuffer.begin();
renderScene(screenCamera, Color.DARK_GRAY);
screenFrameBuffer.end();
// Render to window/screen, clear backgroun to BLACK
renderScene(sceneCamera, Color.BLACK);
// Draw the framebuffer's texture as a sprite using a normal SpriteBatch
batch.setProjectionMatrix(sceneCamera.combined);
batch.begin();
batch.draw(screenTexture, -sceneCamera.viewportWidth / 2.0f,-sceneCamera.viewportHeight / 2.0f,0,0,screenCamera.viewportWidth,screenCamera.viewportHeight,1.0f, 1.0f, 0.0f);
batch.end();
// This just draws the outline of what the screen camera looks at
shapeRenderer.setProjectionMatrix(sceneCamera.combined);
shapeRenderer.begin(ShapeRenderer.ShapeType.Line);
shapeRenderer.setColor(Color.CYAN);
shapeRenderer.rect(
screenCamera.position.x - screenCamera.viewportWidth / 2.0f,
screenCamera.position.y - screenCamera.viewportHeight / 2.0f,
screenCamera.viewportWidth,
screenCamera.viewportHeight
);
shapeRenderer.end();
}
@Override
public void dispose() {
}
@Override
public void resize(int width, int height) {
}
@Override
public void pause() {
}
@Override
public void resume() {
}
}