2016-09-20 15 views
6

Próbuję narysować bitmapę jako nakładkę na każdą klatkę wideo. Znalazłem przykład, jak dekodować i kodować wideo i działa. Ten przykład ma klasę TextureRenderer z funkcją drawFrame, którą trzeba zmodyfikować, aby dodać bitmapę. Jestem początkującym użytkownikiem opengl, ale nauczyłem się, że muszę stworzyć teksturę z bitmapą i powiązać ją. Próbowałem tego w poniższym kodzie, ale rzuca wyjątek.Nakładka bitmapy OpenGL es 2.0 narysuj na wideo

/* 
* Copyright (C) 2013 The Android Open Source Project 
* 
* Licensed under the Apache License, Version 2.0 (the "License"); 
* you may not use this file except in compliance with the License. 
* You may obtain a copy of the License at 
* 
*  http://www.apache.org/licenses/LICENSE-2.0 
* 
* Unless required by applicable law or agreed to in writing, software 
* distributed under the License is distributed on an "AS IS" BASIS, 
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
* See the License for the specific language governing permissions and 
* limitations under the License. 
*/ 
// from: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/TextureRender.java 
// blob: 4125dcfcfed6ed7fddba5b71d657dec0d433da6a 
// modified: removed unused method bodies 
// modified: use GL_LINEAR for GL_TEXTURE_MIN_FILTER to improve quality. 

package com.example.name.videoeditortest; 
/** 
* Code for rendering a texture onto a surface using OpenGL ES 2.0. 
*/ 

import android.graphics.Bitmap; 
import android.graphics.SurfaceTexture; 
import android.opengl.GLES11Ext; 
import android.opengl.GLES20; 
import android.opengl.GLUtils; 
import android.opengl.Matrix; 
import android.util.Log; 

import java.io.FileOutputStream; 
import java.io.IOException; 
import java.nio.ByteBuffer; 
import java.nio.ByteOrder; 
import java.nio.FloatBuffer; 

/** 
* Code for rendering a texture onto a surface using OpenGL ES 2.0. 
*/ 
class TextureRender { 
    private Bitmap bitmap; 
    private static final String TAG = "TextureRender"; 
    private static final int FLOAT_SIZE_BYTES = 4; 
    private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 
    private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 
    private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 
    private final float[] mTriangleVerticesData = { 
      // X, Y, Z, U, V 
      -1.0f, -1.0f, 0, 0.f, 0.f, 
      1.0f, -1.0f, 0, 1.f, 0.f, 
      -1.0f, 1.0f, 0, 0.f, 1.f, 
      1.0f, 1.0f, 0, 1.f, 1.f, 
    }; 
    private FloatBuffer mTriangleVertices; 
    private static final String VERTEX_SHADER = 
      "uniform mat4 uMVPMatrix;\n" + 
        "uniform mat4 uSTMatrix;\n" + 
        "attribute vec4 aPosition;\n" + 
        "attribute vec4 aTextureCoord;\n" + 
        "varying vec2 vTextureCoord;\n" + 
        "void main() {\n" + 
        " gl_Position = uMVPMatrix * aPosition;\n" + 
        " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + 
        "}\n"; 
    private static final String FRAGMENT_SHADER = 
      "#extension GL_OES_EGL_image_external : require\n" + 
        "precision mediump float;\n" +  // highp here doesn't seem to matter 
        "varying vec2 vTextureCoord;\n" + 
        "uniform samplerExternalOES sTexture;\n" + 
        "void main() {\n" + 
        " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + 
        "}\n"; 
    private float[] mMVPMatrix = new float[16]; 
    private float[] mSTMatrix = new float[16]; 
    private int mProgram; 
    private int mTextureID = -12345; 
    private int mTextureBitmapID = -12345; 
    private int muMVPMatrixHandle; 
    private int muSTMatrixHandle; 
    private int maPositionHandle; 
    private int maTextureHandle; 
    public TextureRender() { 
     mTriangleVertices = ByteBuffer.allocateDirect(
       mTriangleVerticesData.length * FLOAT_SIZE_BYTES) 
       .order(ByteOrder.nativeOrder()).asFloatBuffer(); 
     mTriangleVertices.put(mTriangleVerticesData).position(0); 
     Matrix.setIdentityM(mSTMatrix, 0); 
    } 
    public int getTextureId() { 
     return mTextureID; 
    } 
    public void drawFrame(SurfaceTexture st) { 
     checkGlError("onDrawFrame start"); 
     st.getTransformMatrix(mSTMatrix); 
     GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); 
     GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); 
     GLES20.glUseProgram(mProgram); 
     checkGlError("glUseProgram"); 
     //Bing textrues 
     GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 
     GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); 
     GLES20.glActiveTexture(GLES20.GL_TEXTURE_2D); 
     GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureBitmapID); 

     mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 
     GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 
       TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); 
     checkGlError("glVertexAttribPointer maPosition"); 
     GLES20.glEnableVertexAttribArray(maPositionHandle); 
     checkGlError("glEnableVertexAttribArray maPositionHandle"); 
     mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 
     GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, 
       TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); 
     checkGlError("glVertexAttribPointer maTextureHandle"); 
     GLES20.glEnableVertexAttribArray(maTextureHandle); 
     checkGlError("glEnableVertexAttribArray maTextureHandle"); 
     Matrix.setIdentityM(mMVPMatrix, 0); 
     GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0); 
     GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0); 
     GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 
     checkGlError("glDrawArrays"); 
     GLES20.glFinish(); 
    } 
    /** 
    * Initializes GL state. Call this after the EGL surface has been created and made current. 
    */ 
    public void surfaceCreated() { 
     mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 
     if (mProgram == 0) { 
      throw new RuntimeException("failed creating program"); 
     } 
     maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); 
     checkGlError("glGetAttribLocation aPosition"); 
     if (maPositionHandle == -1) { 
      throw new RuntimeException("Could not get attrib location for aPosition"); 
     } 
     maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord"); 
     checkGlError("glGetAttribLocation aTextureCoord"); 
     if (maTextureHandle == -1) { 
      throw new RuntimeException("Could not get attrib location for aTextureCoord"); 
     } 
     muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); 
     checkGlError("glGetUniformLocation uMVPMatrix"); 
     if (muMVPMatrixHandle == -1) { 
      throw new RuntimeException("Could not get attrib location for uMVPMatrix"); 
     } 
     muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix"); 
     checkGlError("glGetUniformLocation uSTMatrix"); 
     if (muSTMatrixHandle == -1) { 
      throw new RuntimeException("Could not get attrib location for uSTMatrix"); 
     } 
     int[] textures = new int[1]; 
     GLES20.glGenTextures(1, textures, 0); 
     mTextureID = textures[0]; 
     GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); 
     checkGlError("glBindTexture mTextureID"); 
     GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, 
       GLES20.GL_NEAREST); 
     GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, 
       GLES20.GL_LINEAR); 
     GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 
       GLES20.GL_CLAMP_TO_EDGE); 
     GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 
       GLES20.GL_CLAMP_TO_EDGE); 
     checkGlError("glTexParameter"); 

     mTextureBitmapID = loadBitmapTexture(); 
    } 


    private int loadBitmapTexture() 
    { 
     final int[] textureHandle = new int[1]; 

     GLES20.glGenTextures(1, textureHandle, 0); 

     if (textureHandle[0] != 0) 
     { 
      // Bind to the texture in OpenGL 
      GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]); 

      // Set filtering 
      GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); 
      GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); 

      // Load the bitmap into the bound texture. 
      GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0); 
     } 

     if (textureHandle[0] == 0) 
     { 
      throw new RuntimeException("Error loading texture."); 
     } 

     return textureHandle[0]; 
    } 

    /** 
    * Replaces the fragment shader. 
    */ 
    public void changeFragmentShader(String fragmentShader) { 
     GLES20.glDeleteProgram(mProgram); 
     mProgram = createProgram(VERTEX_SHADER, fragmentShader); 
     if (mProgram == 0) { 
      throw new RuntimeException("failed creating program"); 
     } 
    } 
    private int loadShader(int shaderType, String source) { 
     int shader = GLES20.glCreateShader(shaderType); 
     checkGlError("glCreateShader type=" + shaderType); 
     GLES20.glShaderSource(shader, source); 
     GLES20.glCompileShader(shader); 
     int[] compiled = new int[1]; 
     GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 
     if (compiled[0] == 0) { 
      Log.e(TAG, "Could not compile shader " + shaderType + ":"); 
      Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 
      GLES20.glDeleteShader(shader); 
      shader = 0; 
     } 
     return shader; 
    } 
    private int createProgram(String vertexSource, String fragmentSource) { 
     int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 
     if (vertexShader == 0) { 
      return 0; 
     } 
     int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 
     if (pixelShader == 0) { 
      return 0; 
     } 
     int program = GLES20.glCreateProgram(); 
     checkGlError("glCreateProgram"); 
     if (program == 0) { 
      Log.e(TAG, "Could not create program"); 
     } 
     GLES20.glAttachShader(program, vertexShader); 
     checkGlError("glAttachShader"); 
     GLES20.glAttachShader(program, pixelShader); 
     checkGlError("glAttachShader"); 
     GLES20.glLinkProgram(program); 
     int[] linkStatus = new int[1]; 
     GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 
     if (linkStatus[0] != GLES20.GL_TRUE) { 
      Log.e(TAG, "Could not link program: "); 
      Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 
      GLES20.glDeleteProgram(program); 
      program = 0; 
     } 
     return program; 
    } 
    public void checkGlError(String op) { 
     int error; 
     while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 
      Log.e(TAG, op + ": glError " + error); 
      throw new RuntimeException(op + ": glError " + error); 
     } 
    } 

    public void setBitmap(Bitmap bitmap){ 
     this.bitmap = bitmap; 

    } 
    /** 
    * Saves the current frame to disk as a PNG image. Frame starts from (0,0). 
    * <p> 
    * Useful for debugging. 
    */ 
    public static void saveFrame(String filename, int width, int height) { 
     // glReadPixels gives us a ByteBuffer filled with what is essentially big-endian RGBA 
     // data (i.e. a byte of red, followed by a byte of green...). We need an int[] filled 
     // with native-order ARGB data to feed to Bitmap. 
     // 
     // If we implement this as a series of buf.get() calls, we can spend 2.5 seconds just 
     // copying data around for a 720p frame. It's better to do a bulk get() and then 
     // rearrange the data in memory. (For comparison, the PNG compress takes about 500ms 
     // for a trivial frame.) 
     // 
     // So... we set the ByteBuffer to little-endian, which should turn the bulk IntBuffer 
     // get() into a straight memcpy on most Android devices. Our ints will hold ABGR data. 
     // Swapping B and R gives us ARGB. We need about 30ms for the bulk get(), and another 
     // 270ms for the color swap. 
     // 
     // Making this even more interesting is the upside-down nature of GL, which means we 
     // may want to flip the image vertically here. 
     ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4); 
     buf.order(ByteOrder.LITTLE_ENDIAN); 
     GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); 
     buf.rewind(); 
     int pixelCount = width * height; 
     int[] colors = new int[pixelCount]; 
     buf.asIntBuffer().get(colors); 
     for (int i = 0; i < pixelCount; i++) { 
      int c = colors[i]; 
      colors[i] = (c & 0xff00ff00) | ((c & 0x00ff0000) >> 16) | ((c & 0x000000ff) << 16); 
     } 
     FileOutputStream fos = null; 
     try { 
      fos = new FileOutputStream(filename); 
      Bitmap bmp = Bitmap.createBitmap(colors, width, height, Bitmap.Config.ARGB_8888); 
      bmp.compress(Bitmap.CompressFormat.PNG, 90, fos); 
      bmp.recycle(); 
     } catch (IOException ioe) { 
      throw new RuntimeException("Failed to write file " + filename, ioe); 
     } finally { 
      try { 
       if (fos != null) fos.close(); 
      } catch (IOException ioe2) { 
       throw new RuntimeException("Failed to close file " + filename, ioe2); 
      } 
     } 
     Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'"); 
    } 
} 

Wyjątek rzucony:

E/ExtractDecodeEditEncodeMuxTest: error while releasing muxer 
            java.lang.IllegalStateException: Can't stop due to wrong state. 
            at android.media.MediaMuxer.stop(MediaMuxer.java:231) 
            at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest.extractDecodeEditEncodeMux(ExtractDecodeEditEncodeMuxTest.java 434) 
            at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest.access$000(ExtractDecodeEditEncodeMuxTest.java:58) 
            at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest$TestWrapper.run(ExtractDecodeEditEncodeMuxTest.java:171) 
            at java.lang.Thread.run(Thread.java:841) 

Jeśli komentarz GLES20.glActiveTexture(GLES20.GL_TEXTURE_2D); w drawFrame film jest renderowany poprawnie ale nie bitmapy. Jeśli komentarz GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureBitmapID); w drawFrame otrzymuję następujący wyjątek:

java.lang.RuntimeException: glVertexAttribPointer maPosition: glError 1280 
at com.example.name.videoeditortest.TextureRender.checkGlError(TextureRender.java:259) 
at com.example.name.videoeditortest.TextureRender.drawFrame(TextureRender.java:111) 
at com.example.name.videoeditortest.OutputSurface.drawImage(OutputSurface.java:252) 
at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest.doExtractDecodeEditEncodeMux(ExtractDecodeEditEncodeMuxTest.java:793) 
at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest.extractDecodeEditEncodeMux(ExtractDecodeEditEncodeMuxTest.java:341) 
at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest.access$000(ExtractDecodeEditEncodeMuxTest.java:58) 
at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest$TestWrapper.run(ExtractDecodeEditEncodeMuxTest.java:171) 
at java.lang.Thread.run(Thread.java:841) 

Odpowiedz

2

widzę dwie rzeczy, które wydaje mi się źle.

  1. Próbujesz związać wszystko w tym samym czasie, i nadzieję, że jeden telefon do GLES20.glDrawArrays() zwróci wszystko.

  2. Masz tylko jeden moduł cieniujący, w którym powinieneś mieć dwa: jeden do renderowania tekstury wideo, a drugi do renderowania warstwy bitmap.

Co trzeba wiedzieć jest to, że rama może być remis poprzez wielokrotne wywołanie glDrawArrays każde wezwanie po prostu malować małą część nad uprzednio przygotowanego materiału (zasadniczo).


Pierwsza część renderowania ramkę w twoim przypadku powinien wyglądać tak:

startowych

loadShaderForVideo()

loadShaderForBitmapLayer()

prepareYourArraysEtc()

...

pętli

glClear()

updateVideoTexture()

drawFrame() {

drawVideo(){ 

    bindYourActiveTextureToVideo() 

    setYourVertexAttribAndUniform() 

    GLES20.glDrawArrays() 

} 

drawBitmap() { 

    bindYourActiveTextureToBitmap() 

    setYourVertexAttribAndUniform() // This should be the same as above for video 
    // Considering you want to draw above your video, consider activating the blending for transparency : 

    GLES20.glEnable(GLES20.GL_BLEND); 
    GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); 

    GLES20.glDrawArrays() 

} 

}


dotyczące cieniującego, należy przyjąć spojrzenie na nich:

Częstym Vertex Shader dla obu:

public static final String vertexDefaultShaderCode = 
     "uniform mat4 uVPMatrix;" + 
       "uniform mat4 uModelMatrix;" + // uniform = input const 
       "attribute vec3 aPosition;" + // attribute = input property different for each vertex 
       "attribute vec2 aTexCoordinate;" + 
       "varying vec2 vTexCoordinate;" +// varying = output property different for each pixel 

       "void main() {" + 
       "vTexCoordinate = aTexCoordinate;" + 
       "gl_Position = uVPMatrix * uModelMatrix * vec4(aPosition,1.0);" + 
       "}"; 

Wtedy podstawowym shader fragment (dla bitmap 2D tekstury):

public static final String fragmentDefaultShaderCode = 
     "precision mediump float;" + 
       "uniform sampler2D uTexture;" + 
       "varying vec2 vTexCoordinate;" + 

       "void main() {" + 
       " gl_FragColor = texture2D(uTexture, vTexCoordinate);" + 
       "}"; 

Następnie inna wersja do renderowania wideo :

public static final String fragmentExternalShaderCode = 
     "#extension GL_OES_EGL_image_external : require\n" + 
       "precision mediump float;" + 
       "uniform samplerExternalOES sTexture;" + 
       "varying vec2 vTexCoordinate;" + 

       "void main() {" + 
       " gl_FragColor = texture2D(sTexture, vTexCoordinate);" + 
       "}"; 

Potrzebne są dwa programy, jeden z domyślnymVertexShader + defaultFragmentShader i inny z defaultVertexShader + fragmentExternalShaderCode.

Mam nadzieję, że tylko te modyfikacje rozwiążą Twój problem.

Pozdrowienia

+0

Dziękuję za odpowiedź, to naprawdę pomocne. Jak widzisz, oświadczyłem, że oczekuję od zwycięzcy nagrody, że dostarczy funkcjonujący kod. Dzięki jeszcze raz! –

+0

Tak, widziałem twoje warunki. Zobaczę to głębiej, jeśli mam zbyt dużo wolnego czasu, ale nie mogę zrobić więcej teraz. –

-1
+0

Witam. Podczas gdy link do pomocnego zasobu zewnętrznego jest zawsze mile widziany, proszę również * zacytować kluczowy fragment strony, do której prowadzi link *. Jest tak, że jeśli strona internetowa ulegnie awarii lub strona zostanie (ponownie) przeniesiona, wówczas ta odpowiedź nie stanie się bezużyteczna –