Android OpenGL開發實踐 - GLSurfaceView對YUV格式數據的處理

一、創建OpenGL ES 環境

在清單中聲明OpenGL ES

爲了使您的應用程序能夠使用OpenGL ES 2.0 API,您必須在清單中添加以下聲明:

<uses-feature android:glEsVersion="0x00020000" android:required="true" />
構建GLSurfaceView對象
class MyGLSurfaceView(context: Context, attributeSet: AttributeSet?) : GLSurfaceView(context, attributeSet) {
    companion object {
        private const val TAG = "MyGLSurfaceView"
    }

    constructor(context: Context) : this(context, null)

    private val renderer: MyGLRenderer

    init {

        // Create an OpenGL ES 2.0 context
        setEGLContextClientVersion(2)

        renderer = MyGLRenderer()

        // Set the Renderer for drawing on the GLSurfaceView
        setRenderer(renderer)

        // Render the view only when there is a change in the drawing data
        renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY
    }
    
    /**
     * 設置顯示方向
     * @param degrees 顯示旋轉角度(逆時針),有效值是(0, 90, 180, and 270.)
     */
    fun setDisplayOrientation(degrees: Int) {
        renderer.setDisplayOrientation(degrees)
    }
    
    /**
     * 設置渲染的YUV數據的寬高
     * @param width 寬度
     * @param height 高度
     */
    fun setYuvDataSize(width: Int, height: Int) {
        Log.d(TAG, "setYuvDataSize $width * $height")
        renderer.setYuvDataSize(width, height)
    }

    /**
     * 填充預覽YUV格式數據
     * @param yuvData yuv格式的數據
     * @param type YUV數據的格式 0 -> I420  1 -> NV12  2 -> NV21
     */
    fun feedData(yuvData: ByteArray?, type: Int = 0) {
        if (yuvData == null) {
            return
        }
        renderer.feedData(yuvData, type)
        // 請求渲染新的YUV數據
        requestRender()
    }
}

主要工作:
1、指定OpenGL ES Context版本
2、設置渲染的 Renderer
3、設定渲染模式爲RENDERMODE_WHEN_DIRTY,只有在調用requestRender()後才觸發redraw工作
4、傳入YUV數據的寬度、高度
5、傳輸需要渲染的YUV格式的數據(I420、NV12、NV21)

構建Renderer類
class MyGLRenderer : GLSurfaceView.Renderer {
    companion object {
        private const val TAG = "MyGLRenderer"
    }

    private lateinit var mProgram: MyGLProgram
    // GLSurfaceView寬度
    private var mScreenWidth: Int = 0
    // GLSurfaceView高度
    private var mScreenHeight: Int = 0
    // 預覽YUV數據寬度
    private var mVideoWidth: Int = 0
    // 預覽YUV數據高度
    private var mVideoHeight: Int = 0

    // vPMatrix is an abbreviation for "Model View Projection Matrix"
    private val vPMatrix = FloatArray(16)
    private val projectionMatrix = FloatArray(16)
    private val viewMatrix = FloatArray(16)

    // y分量數據
    private var y: ByteBuffer = ByteBuffer.allocate(0)
    // u分量數據
    private var u: ByteBuffer = ByteBuffer.allocate(0)
    // v分量數據
    private var v: ByteBuffer = ByteBuffer.allocate(0)
    // uv分量數據
    private var uv: ByteBuffer = ByteBuffer.allocate(0)

    // YUV數據格式 0 -> I420  1 -> NV12  2 -> NV21
    private var type: Int = 0
    // 標識GLSurfaceView是否準備好
    private var hasVisibility = false

    //  Called once to set up the view's OpenGL ES environment.
    override fun onSurfaceCreated(unused: GL10, config: EGLConfig) {
        // Set the background frame color
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f)

        // 配置OpenGL ES 環境
        mProgram = MyGLProgram()
    }

    //  Called if the geometry of the view changes, for example when the device's screen orientation changes.
    override fun onSurfaceChanged(unused: GL10, width: Int, height: Int) {
        GLES20.glViewport(0, 0, width, height)

        mScreenWidth = width
        mScreenHeight = height

        mScreenWidth = width
        mScreenHeight = height
        val ratio: Float = width.toFloat() / height.toFloat()

        // this projection matrix is applied to object coordinates
        // in the onDrawFrame() method
        Matrix.frustumM(projectionMatrix, 0, -ratio, ratio, -1f, 1f, 3f, 7f)

        // Set the camera position (View matrix)
        Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 1.0f, 0.0f, 0.0f)
        
        if (mVideoWidth > 0 && mVideoHeight > 0) {
            createBuffers(mVideoWidth, mVideoHeight)
        }
        hasVisibility = true
        Log.d(TAG, "onSurfaceChanged width:$width * height:$height")
    }

    //  Called for each redraw of the view.
    override fun onDrawFrame(unused: GL10) {
        synchronized(this) {
            if (y.capacity() > 0) {
                y.position(0)
                if (type == 0) {
                    u.position(0)
                    v.position(0)
                    mProgram.feedTextureWithImageData(y, u, v, mVideoWidth, mVideoHeight)
                } else {
                    uv.position(0)
                    mProgram.feedTextureWithImageData(y, uv, mVideoWidth, mVideoHeight)
                }
                // Redraw background color
                GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)

                // Calculate the projection and view transformation
                Matrix.multiplyMM(vPMatrix, 0, projectionMatrix, 0, viewMatrix, 0)

                try {
                    mProgram.drawTexture(vPMatrix, type)
                } catch (e: Exception) {
                    Log.w(TAG, e.message)
                }
            }
        }
    }

    /**
     * 設置顯示方向
     * @param degrees 顯示旋轉角度(逆時針),有效值是(0, 90, 180, and 270.)
     */
    fun setDisplayOrientation(degrees: Int) {
        // Set the camera position (View matrix)
        if (degrees == 0) {
            Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 1.0f, 0.0f, 0.0f)
        } else if (degrees == 90) {
            Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 0.0f, 1.0f, 0.0f)
        } else if (degrees == 180) {
            Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, -1.0f, 0.0f, 0.0f)
        } else if (degrees == 270) {
            Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 0.0f, -1.0f, 0.0f)
        } else {
            Log.e(TAG, "degrees pram must be in (0, 90, 180, 270) ")
        }
    }
    
    /**
     * 設置渲染的YUV數據的寬高
     * @param width 寬度
     * @param height 高度
     */
    fun setYuvDataSize(width: Int, height: Int) {
        if (width > 0 && height > 0) {
            // 調整比例
            createBuffers(width, height)

            // 初始化容器
            if (width != mVideoWidth && height != mVideoHeight) {
                this.mVideoWidth = width
                this.mVideoHeight = height
                val yarraySize = width * height
                val uvarraySize = yarraySize / 4
                synchronized(this) {
                    y = ByteBuffer.allocate(yarraySize)
                    u = ByteBuffer.allocate(uvarraySize)
                    v = ByteBuffer.allocate(uvarraySize)
                    uv = ByteBuffer.allocate(uvarraySize * 2)
                }
            }
        }
    }

    /**
     * 調整渲染紋理的縮放比例
     * @param width YUV數據寬度
     * @param height YUV數據高度
     */
    private fun createBuffers(width: Int, height: Int) {
        if (mScreenWidth > 0 && mScreenHeight > 0) {
            val f1 = mScreenHeight.toFloat() / mScreenWidth.toFloat()
            val f2 = height.toFloat() / width.toFloat()
            if (f1 == f2) {
                mProgram.createBuffers(MyGLProgram.squareVertices)
            } else if (f1 < f2) {
                val widthScale = f1 / f2
                mProgram.createBuffers(
                    floatArrayOf(-widthScale, -1.0f, widthScale, -1.0f, -widthScale, 1.0f, widthScale, 1.0f)
                )
            } else {
                val heightScale = f2 / f1
                mProgram.createBuffers(
                    floatArrayOf(-1.0f, -heightScale, 1.0f, -heightScale, -1.0f, heightScale, 1.0f, heightScale)
                )
            }
        }
    }

    /**
     * 預覽YUV格式數據
     * @param yuvdata yuv格式的數據
     * @param type YUV數據的格式 0 -> I420  1 -> NV12  2 -> NV21
     */
    fun feedData(yuvdata: ByteArray, type: Int = 0) {
        synchronized(this) {
            if (hasVisibility) {
                this.type = type
                if (type == 0) {
                    y.clear()
                    u.clear()
                    v.clear()
                    y.put(yuvdata, 0, mVideoWidth * mVideoHeight)
                    u.put(yuvdata, mVideoWidth * mVideoHeight, mVideoWidth * mVideoHeight / 4)
                    v.put(yuvdata, mVideoWidth * mVideoHeight * 5 / 4, mVideoWidth * mVideoHeight / 4)
                } else {
                    y.clear()
                    uv.clear()
                    y.put(yuvdata, 0, mVideoWidth * mVideoHeight)
                    uv.put(yuvdata, mVideoWidth * mVideoHeight, mVideoWidth * mVideoHeight / 2)
                }
            }
        }
    }
}
使用流程:

1、onSurfaceCreated()中配置 OpenGL ES 環境:加載着色器程序,鏈接program,生成紋理句柄等初始工作
2、onSurfaceChanged()設置渲染區域位置、大小,計算轉換矩陣等
3、onDrawFrame()每次redraw時調用:

  • GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT) 清除背景色
  • 繪製紋理

二、Program

加載shader程序
/**
 * 加載着色器程序
 * @param type GLES20.GL_VERTEX_SHADER -> vertex shader
 *              GLES20.GL_FRAGMENT_SHADER -> fragment shader
 * @param shaderCode 着色器程序代碼
 */
fun loadShader(type: Int, shaderCode: String): Int {

    // create a vertex shader type (GLES20.GL_VERTEX_SHADER)
    // or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
    return GLES20.glCreateShader(type).also { shader ->

        // add the source code to the shader and compile it
        GLES20.glShaderSource(shader, shaderCode)
        GLES20.glCompileShader(shader)
    }
}
Vertex Shader 頂點着色器 Code
/**
 * 頂點着色器程序
 * vertex shader在每個頂點上都執行一次,通過不同世界的座標系轉化定位頂點的最終位置。
 * 它可以傳遞數據給fragment shader,如紋理座標、頂點座標,變換矩陣等
 */
const val vertexShaderCode =
    "uniform mat4 uMVPMatrix;" +
            "attribute vec4 vPosition;" +
            "attribute vec2 texCoord;" +
            "varying vec2 tc;" +
            "void main() {" +
            "  gl_Position = uMVPMatrix * vPosition;" +
            "  tc = texCoord;" +
            "}"
  • uMVPMatrix 頂點座標變換矩陣
  • vPosition 頂點座標
  • texCoord 紋理貼圖座標
  • tc 頂點着色器傳遞給片段着色器的紋理座標
Fragment Shader 片段着色器 Code
/**
 * 片段着色器程序
 * fragment shader在每個像素上都會執行一次,通過插值確定像素的最終顯示顏色
 */
const val fragmentShaderCode =
    "precision mediump float;" +
            "uniform sampler2D samplerY;" +
            "uniform sampler2D samplerU;" +
            "uniform sampler2D samplerV;" +
            "uniform sampler2D samplerUV;" +
            "uniform int yuvType;" +
            "varying vec2 tc;" +
            "void main() {" +
            "  vec4 c = vec4((texture2D(samplerY, tc).r - 16./255.) * 1.164);" +
            "  vec4 U; vec4 V;" +
            "  if (yuvType == 0){" +
            // 因爲是YUV的一個平面,所以採樣後的r,g,b,a這四個參數的數值是一樣的
            "    U = vec4(texture2D(samplerU, tc).r - 128./255.);" +
            "    V = vec4(texture2D(samplerV, tc).r - 128./255.);" +
            "  } else if (yuvType == 1){" +
            // 因爲NV12是2平面的,對於UV平面,在加載紋理時,會指定格式,讓U值存在r,g,b中,V值存在a中
            "    U = vec4(texture2D(samplerUV, tc).r - 128./255.);" +
            "    V = vec4(texture2D(samplerUV, tc).a - 128./255.);" +
            "  } else {" +
            // 因爲NV21是2平面的,對於UV平面,在加載紋理時,會指定格式,讓U值存在a中,V值存在r,g,b中
            "    U = vec4(texture2D(samplerUV, tc).a - 128./255.);" +
            "    V = vec4(texture2D(samplerUV, tc).r - 128./255.);" +
            "  } " +
            "  c += V * vec4(1.596, -0.813, 0, 0);" +
            "  c += U * vec4(0, -0.392, 2.017, 0);" +
            "  c.a = 1.0;" +
            "  gl_FragColor = c;" +
            "}"
  • samplerY/U/V/UV:sample2D的常量,用來獲取YUV數據的Y/U/V/UV平面數據
  • yuvType:YUV數據類型 -> 0 代表 I420, 1 代表 NV12,2 代表 NV21
  • tc:頂點着色器傳遞過來的紋理座標
主要的負責紋理渲染的類
class MyGLProgram {
    companion object {
        private const val TAG = "MyGLProgram"
        var squareVertices = floatArrayOf(-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f) // 全屏
    }

    private var mProgram: Int

    private var mPlanarTextureHandles = IntBuffer.wrap(IntArray(3))
    private val mSampleHandle = IntArray(3)
    // handles
    private var mPositionHandle = -1
    private var mCoordHandle = -1
    private var mVPMatrixHandle: Int = -1

    // vertices buffer
    private var mVertexBuffer: FloatBuffer? = null
    private var mCoordBuffer: FloatBuffer? = null
    // whole-texture
    private val mCoordVertices = floatArrayOf(0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f)

    init {
        val vertexShader: Int = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode)
        val fragmentShader: Int = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode)
        Log.d(TAG, "vertexShader = $vertexShader \n fragmentShader = $fragmentShader")

        // create empty OpenGL ES Program
        mProgram = GLES20.glCreateProgram().also {
            checkGlError("glCreateProgram")
            // add the vertex shader to program
            GLES20.glAttachShader(it, vertexShader)

            // add the fragment shader to program
            GLES20.glAttachShader(it, fragmentShader)

            // creates OpenGL ES program executables
            GLES20.glLinkProgram(it)
        }

        val linkStatus = IntArray(1)
        GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, linkStatus, 0)
        if (linkStatus[0] != GLES20.GL_TRUE) {
            Log.w(TAG, "Could not link program: ${GLES20.glGetProgramInfoLog(mProgram)}")
            GLES20.glDeleteProgram(mProgram)
            mProgram = 0
        }

        Log.d(TAG, "mProgram = $mProgram")

        checkGlError("glCreateProgram")

        // 生成紋理句柄
        GLES20.glGenTextures(3, mPlanarTextureHandles)

        checkGlError("glGenTextures")
    }

	/**
     * 繪製紋理貼圖
     * @param mvpMatrix 頂點座標變換矩陣
     * @param type YUV數據格式類型
     */
    fun drawTexture(mvpMatrix: FloatArray, type: Int) {

        GLES20.glUseProgram(mProgram)
        checkGlError("glUseProgram")
        /*
         * get handle for "vPosition" and "a_texCoord"
         */
        mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition").also {
            GLES20.glVertexAttribPointer(it, 2, GLES20.GL_FLOAT, false, 8, mVertexBuffer)
            GLES20.glEnableVertexAttribArray(it)
        }

        // 傳紋理座標給fragment shader
        mCoordHandle = GLES20.glGetAttribLocation(mProgram, "texCoord").also {
            GLES20.glVertexAttribPointer(it, 2, GLES20.GL_FLOAT, false, 8, mCoordBuffer)
            GLES20.glEnableVertexAttribArray(it)
        }

        // get handle to shape's transformation matrix
        mVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix")

        // Pass the projection and view transformation to the shader
        GLES20.glUniformMatrix4fv(mVPMatrixHandle, 1, false, mvpMatrix, 0)

        //傳紋理的像素格式給fragment shader
        val yuvType = GLES20.glGetUniformLocation(mProgram, "yuvType")
        checkGlError("glGetUniformLocation yuvType")
        GLES20.glUniform1i(yuvType, type)

        //type: 0是I420, 1是NV12
        var planarCount = 0
        if (type == 0) {
            //I420有3個平面
            planarCount = 3
            mSampleHandle[0] = GLES20.glGetUniformLocation(mProgram, "samplerY")
            mSampleHandle[1] = GLES20.glGetUniformLocation(mProgram, "samplerU")
            mSampleHandle[2] = GLES20.glGetUniformLocation(mProgram, "samplerV")
        } else {
            //NV12、NV21有兩個平面
            planarCount = 2
            mSampleHandle[0] = GLES20.glGetUniformLocation(mProgram, "samplerY")
            mSampleHandle[1] = GLES20.glGetUniformLocation(mProgram, "samplerUV")
        }
        (0 until planarCount).forEach { i ->
            GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i)
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mPlanarTextureHandles[i])
            GLES20.glUniform1i(mSampleHandle[i], i)
        }

        // 調用這個函數後,vertex shader先在每個頂點執行一次,之後fragment shader在每個像素執行一次,
        // 繪製後的圖像存儲在render buffer中
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
        GLES20.glFinish()

        GLES20.glDisableVertexAttribArray(mPositionHandle)
        GLES20.glDisableVertexAttribArray(mCoordHandle)
    }

    /**
     * 將圖片數據綁定到紋理目標,適用於UV分量分開存儲的(I420)
     * @param yPlane YUV數據的Y分量
     * @param uPlane YUV數據的U分量
     * @param vPlane YUV數據的V分量
     * @param width YUV圖片寬度
     * @param height YUV圖片高度
     */
    fun feedTextureWithImageData(yPlane: ByteBuffer, uPlane: ByteBuffer, vPlane: ByteBuffer, width: Int, height: Int) {
        //根據YUV編碼的特點,獲得不同平面的基址
        textureYUV(yPlane, width, height, 0)
        textureYUV(uPlane, width / 2, height / 2, 1)
        textureYUV(vPlane, width / 2, height / 2, 2)
    }

    /**
     * 將圖片數據綁定到紋理目標,適用於UV分量交叉存儲的(NV12、NV21)
     * @param yPlane YUV數據的Y分量
     * @param uvPlane YUV數據的UV分量
     * @param width YUV圖片寬度
     * @param height YUV圖片高度
     */
    fun feedTextureWithImageData(yPlane: ByteBuffer, uvPlane: ByteBuffer, width: Int, height: Int) {
        //根據YUV編碼的特點,獲得不同平面的基址
        textureYUV(yPlane, width, height, 0)
        textureNV12(uvPlane, width / 2, height / 2, 1)
    }

    /**
     * 將圖片數據綁定到紋理目標,適用於UV分量分開存儲的(I420)
     * @param imageData YUV數據的Y/U/V分量
     * @param width YUV圖片寬度
     * @param height YUV圖片高度
     */
    private fun textureYUV(imageData: ByteBuffer, width: Int, height: Int, index: Int) {
        // 將紋理對象綁定到紋理目標
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mPlanarTextureHandles[index])
        // 設置放大和縮小時,紋理的過濾選項爲:線性過濾
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR)
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
        // 設置紋理X,Y軸的紋理環繞選項爲:邊緣像素延伸
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE)
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE)
        // 加載圖像數據到紋理,GL_LUMINANCE指明瞭圖像數據的像素格式爲只有亮度,雖然第三個和第七個參數都使用了GL_LUMINANCE,
        // 但意義是不一樣的,前者指明瞭紋理對象的顏色分量成分,後者指明瞭圖像數據的像素格式
        // 獲得紋理對象後,其每個像素的r,g,b,a值都爲相同,爲加載圖像的像素亮度,在這裏就是YUV某一平面的分量值
        GLES20.glTexImage2D(
            GLES20.GL_TEXTURE_2D, 0,
            GLES20.GL_LUMINANCE, width, height, 0,
            GLES20.GL_LUMINANCE,
            GLES20.GL_UNSIGNED_BYTE, imageData
        )
    }

    /**
     * 將圖片數據綁定到紋理目標,適用於UV分量交叉存儲的(NV12、NV21)
     * @param imageData YUV數據的UV分量
     * @param width YUV圖片寬度
     * @param height YUV圖片高度
     */
    private fun textureNV12(imageData: ByteBuffer, width: Int, height: Int, index: Int) {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mPlanarTextureHandles[index])
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR)
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE)
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE)
        GLES20.glTexImage2D(
            GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE_ALPHA, width, height, 0,
            GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, imageData
        )
    }

    /**
     * 創建兩個緩衝區用於保存頂點 -> 屏幕頂點和紋理頂點
     * @param vert 屏幕頂點數據
     */
    fun createBuffers(vert: FloatArray) {
        mVertexBuffer = ByteBuffer.allocateDirect(vert.size * 4).run {
            // use the device hardware's native byte order
            order(ByteOrder.nativeOrder())

            // create a floating point buffer from the ByteBuffer
            asFloatBuffer().apply {
                // add the coordinates to the FloatBuffer
                put(vert)
                // set the buffer to read the first coordinate
                position(0)
            }
        }

        if (mCoordBuffer == null) {
            mCoordBuffer = ByteBuffer.allocateDirect(mCoordVertices.size * 4).run {
                // use the device hardware's native byte order
                order(ByteOrder.nativeOrder())

                // create a floating point buffer from the ByteBuffer
                asFloatBuffer().apply {
                    // add the coordinates to the FloatBuffer
                    put(mCoordVertices)
                    // set the buffer to read the first coordinate
                    position(0)
                }
            }
        }
        Log.d(TAG, "createBuffers vertice_buffer $mVertexBuffer  coord_buffer $mCoordBuffer")
    }

    /**
     * 檢查GL操作是否有error
     * @param op 檢查當前所做的操作
     */
    private fun checkGlError(op: String) {
        var error: Int = GLES20.glGetError()
        while (error != GLES20.GL_NO_ERROR) {
            Log.e(TAG, "***** $op: glError $error")
            error = GLES20.glGetError()
        }
    }
}

三、使用

1、在佈局文件中加入自定義的 GLSufaceView

<com.lkl.opengl.MyGLSurfaceView
        android:id="@+id/openGlSurface"
        android:layout_width="match_parent"
        android:layout_height="match_parent"/>

2、設置YUV數據的寬度,高度

openGlSurface.setYuvDataSize(width, height)

3、在GLSurfaceView.Renderer onSurfaceChanged() 回調方法調用後傳遞數據

openGlSurface.feedData(yuvData, 2)

源代碼

Android OpenGL處理YUV數據(I420、NV12、NV21)

參考文獻

https://developer.android.google.cn/training/graphics/opengl
Android OpenGL開發實踐 - GLSurfaceView對攝像頭數據的再處理
https://blog.csdn.net/junzia/article/category/6462864
Android上使用OpenGLES2.0顯示YUV數據
OpenGL渲染YUV數據
Android OpenGLES2.0(九)——利用OpenGL進行圖片處理

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章