如何使用 GPU Video-android 在视频上同时获取水印和滤镜?

How to get Watermark and filter together on Vedio using GPUVideo-android?

我正在开发视频编辑 android 应用程序,想在视频上同时应用滤镜和水印。 为此,我一直在使用 GPUVideo-android 库 https://github.com/MasayukiSuda/GPUVideo-android

问题出在这个库中,我无法同时获得水印和过滤。

下面是我的 FRAGMENT_SHADER 过滤器之一

private static final String FRAGMENT_SHADER =
 "precision mediump float;" +
                    "varying vec2 vTextureCoord;" +
                    "uniform lowp sampler2D sTexture;" +
                    "const highp vec3 weight = vec3(0.2125, 0.7154, 0.0721);" +
                    "void main() {" +
                    "   vec4 FragColor = texture2D(sTexture, vTextureCoord);\n" +
                    "   gl_FragColor.r = dot(FragColor.rgb, vec3(.393, .769, .189));\n" +
                    "   gl_FragColor.g = dot(FragColor.rgb, vec3(.349, .686, .168));\n" +
                    "   gl_FragColor.b = dot(FragColor.rgb, vec3(.272, .534, .131));\n" +
                    "}";

and below is for watermark

 protected static final String DEFAULT_FRAGMENT_SHADER =
            "precision mediump float;\n" +
                    "varying vec2 vTextureCoord;\n" +
                    "uniform lowp sampler2D sTexture;\n" +
                    "uniform lowp sampler2D oTexture;\n" +
                    "void main() {\n" +
                    "   lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
                    "   lowp vec4 textureColor2 = texture2D(oTexture, vTextureCoord);\n" +
                    "   \n" +
                    "   gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);\n" +
                    "}\n";

我想将两种效果合二为一:

https://github.com/MasayukiSuda/GPUVideo-android/blob/master/gpuv/src/main/java/com/daasuu/gpuv/egl/filter/GlWatermarkFilter.java

https://github.com/MasayukiSuda/GPUVideo-android/blob/master/gpuv/src/main/java/com/daasuu/gpuv/egl/filter/GlMonochromeFilter.java

请帮我解决这个问题 我尝试为扩展 GLFilter class 的过滤器创建自定义 class 下面是我的 SepiaFilter 和 OverlayFilter 的代码。

public class SepiaFilter extends OverlayFilter {

    private Bitmap bitmap;
    private Position position = Position.LEFT_TOP;

    private static final String FRAGMENT_SHADER =
            "precision mediump float;" +
                    "varying vec2 vTextureCoord;" +
                    "uniform lowp sampler2D sTexture;" +
                    "const highp vec3 weight = vec3(0.2125, 0.7154, 0.0721);" +
                    "void main() {" +
                    "   vec4 FragColor = texture2D(sTexture, vTextureCoord);\n" +
                    "   gl_FragColor.r = dot(FragColor.rgb, vec3(.393, .769, .189));\n" +
                    "   gl_FragColor.g = dot(FragColor.rgb, vec3(.349, .686, .168));\n" +
                    "   gl_FragColor.b = dot(FragColor.rgb, vec3(.272, .534, .131));\n" +
                    "}";

    public SepiaFilter(Bitmap bitmap)
    {
        super(FRAGMENT_SHADER);
        this.bitmap = bitmap;
    }


    public SepiaFilter(Bitmap bitmap, Position position)
    {
        super(FRAGMENT_SHADER);
        this.bitmap = bitmap;
        this.position = position;
    }

    @Override
    protected void drawCanvas(Canvas canvas) {
        if (bitmap != null && !bitmap.isRecycled()) {
            switch (position) {
                case LEFT_TOP:
                    canvas.drawBitmap(bitmap, 0, 0, null);
                    break;
                case LEFT_BOTTOM:
                    canvas.drawBitmap(bitmap, 0, canvas.getHeight() - bitmap.getHeight(), null);
                    break;
                case RIGHT_TOP:
                    canvas.drawBitmap(bitmap, canvas.getWidth() - bitmap.getWidth(), 0, null);
                    break;
                case RIGHT_BOTTOM:
                    canvas.drawBitmap(bitmap, canvas.getWidth() - bitmap.getWidth(), canvas.getHeight() - bitmap.getHeight(), null);
                    break;
            }
        }
    }

    public enum Position {
        LEFT_TOP,
        LEFT_BOTTOM,
        RIGHT_TOP,
        RIGHT_BOTTOM
    }
}


public abstract class OverlayFilter extends GlFilter {

    private int[] textures = new int[1];

    private Bitmap bitmap = null;

    protected Size inputResolution = new Size(1280, 720);

    public OverlayFilter(String FRAGMENT_SHADER)
    {
        super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
    }


    public void setResolution(Size resolution) {
        this.inputResolution = resolution;
    }

    @Override
    public void setFrameSize(int width, int height) {
        super.setFrameSize(width, height);
        setResolution(new Size(width, height));
    }

    private void createBitmap() {
        releaseBitmap(bitmap);
        bitmap = Bitmap.createBitmap(inputResolution.getWidth(), inputResolution.getHeight(), Bitmap.Config.ARGB_8888);
    }

    @Override
    public void setup() {
        super.setup();// 1
        GLES20.glGenTextures(1, textures, 0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);

        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);

        createBitmap();
    }

    @Override
    public void onDraw() {
        if (bitmap == null) {
            createBitmap();
        }
        if (bitmap.getWidth() != inputResolution.getWidth() || bitmap.getHeight() != inputResolution.getHeight()) {
            createBitmap();
        }

        bitmap.eraseColor(Color.argb(0, 0, 0, 0));
        Canvas bitmapCanvas = new Canvas(bitmap);
        bitmapCanvas.scale(1, -1, bitmapCanvas.getWidth() / 2, bitmapCanvas.getHeight() / 2);
        drawCanvas(bitmapCanvas);

        int offsetDepthMapTextureUniform = getHandle("oTexture");// 3

        GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);

        if (bitmap != null && !bitmap.isRecycled()) {
            GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, bitmap, 0);
        }

        GLES20.glUniform1i(offsetDepthMapTextureUniform, 3);
    }

    protected abstract void drawCanvas(Canvas canvas);

    public static void releaseBitmap(Bitmap bitmap) {
        if (bitmap != null && !bitmap.isRecycled()) {
            bitmap.recycle();
            bitmap = null;
        }
    }
}


// code to apply filter

Bitmap watermark = BitmapFactory.decodeResource(getResources(), R.drawable.shashankimg);

GPUPlayerView gpuPlayerView.setGlFilter( new SepiaFilter(watermark));

使用 GlFilterGroup class。 用法已写读我。

不应编辑着色器。

https://github.com/MasayukiSuda/GPUVideo-android/blob/master/gpuv/src/main/java/com/daasuu/gpuv/egl/filter/GlFilterGroup.java