NDK OpenGL ES 3.0 開發(十六):相機預覽

該原創文章首發於微信公衆號:字節流動

OpenGL ES 相機預覽

在這裏插入圖片描述

相機開發是 OpenGL ES 的重要應用,利用 OpenGL 可以很方便地實現相機美顏、濾鏡、塑型以及一些動態特效,其性能顯著優於對應功能的 CPU 實現。

相機的預覽實現一般有 2 種方式,一種是基於 Android 原生 SurfaceTexture 的純 GPU 實現方式,另一種是通過相機的預覽回調接口獲取幀的 YUV 數據,利用 CPU 算法處理完成之後,傳入顯存,再利用 GPU 實現 YUV 轉 RGBA 進行渲染,即 CPU + GPU 的實現方式。

基於 Android 原生 SurfaceTexture 的純 GPU 實現方式,相機可以使用 SurfaceTexture 作爲預覽載體,SurfaceTexture 可來自於 GLSurfaceView、TextureView 或 SurfaceView 這些獨立擁有 Surface 的封裝類,也可以自定義實現。

作爲預覽載體的 SurfaceTexture 綁定的紋理需要是 OES 紋理,即 GLES11Ext.GL_TEXTURE_EXTERNAL_OES 紋理,來自於 GLES 的擴展 #extension GL_OES_EGL_image_external 中,使用 OES 紋理後,我們不需要在片段着色器中自己做 YUV to RGBA 的轉換,因爲 OES 紋理可以直接接收 YUV 數據或者直接輸出 YUV 數據。

類似於普通 2D 紋理的創建,OES 紋理創建的實現如下:

private int createOESTexture(){
    int[] texture = new int[1];
    GLES20.glGenTextures(1, texture, 0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
    GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
        GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_LINEAR);
    GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
        GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
        GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
        GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
    return texture[0];
}

使用 OES 紋理需要修改片段着色器,在着色器腳本的頭部增加擴展紋理的聲明 #extension GL_OES_EGL_image_external : require ,並且紋理採樣器不再使用 sample2D ,需要換成 samplerExternalOES 作爲紋理採樣器。

#version 300 es
#extension GL_OES_EGL_image_external : require
precision mediump float;
in vec2 v_texCoord;
uniform samplerExternalOES s_TexSampler;
void main() {
    gl_FragColor = texture(s_TexSampler, v_texCoord);
}

實際上當使用 TextureView 時,實際上也不需要自己去創建 OES 紋理,只需要綁定相機,配置好變換矩陣後即可實現相機預覽。具體例子可直接參考Android 官方的 Samples https://github.com/android/camera-samples

相機預覽基於 Android 原生 API 的純 GPU 實現方式,操作簡單,代碼量很少,原生 API 已經做了很多封裝,可以利用片段着色器輕易實現美顏濾鏡等相機特效,缺點是擴展性差,例如要使用傳統的 CPU 算法做一些濾鏡或者美顏特效就很不方便,圖像數據需要多次在內存與顯存之間拷貝,會造成性能和功耗問題。

本文主要介紹將圖像數據取出,傳入 Native 層,然後對數據做一些處理(可選),最後再做渲染的方式,這種方式相對複雜一些。相機預覽數據的常見格式是 YUV420P 或者 YUV420SP(NV21) ,需要將圖像數據對應 YUV 3 個分量使用 3 個紋理傳入顯存,在片段着色器中將 YUV 數據轉爲 RGBA 。

相關原理可參考NDK OpenGL ES 3.0 開發(三):YUV 渲染 章節。

相機預覽數據獲取,以 Camera2 爲例,主要是通過 ImageReader 實現,該類封裝了 Surface :

private ImageReader.OnImageAvailableListener mOnPreviewImageAvailableListener = new ImageReader.OnImageAvailableListener() {
    @Override
    public void onImageAvailable(ImageReader reader) {
        Image image = reader.acquireLatestImage();
        if (image != null) {
            if (mCamera2FrameCallback != null) {
                mCamera2FrameCallback.onPreviewFrame(CameraUtil.YUV_420_888_data(image), image.getWidth(), image.getHeight());
            }
            image.close();
        }
    }
};

mPreviewImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.YUV_420_888, 2);
mPreviewImageReader.setOnImageAvailableListener(mOnPreviewImageAvailableListener, mBackgroundHandler);

CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
builder.addTarget(mPreviewImageReader.getSurface());

ession.setRepeatingRequest(mPreviewRequest, null, mBackgroundHandler);

//在自定義接口中獲取預覽數據,通過 JNI 傳入到 C++ 層
public void onPreviewFrame(byte[] data, int width, int height) {
    Log.d(TAG, "onPreviewFrame() called with: data = [" + data + "], width = [" + width + "], height = [" + height + "]");
    mByteFlowRender.setRenderFrame(IMAGE_FORMAT_I420, data, width, height);

    //每次傳入新數據,請求重新渲染
    mByteFlowRender.requestRender();
}

主要的 JNI :

public abstract class ByteFlowRender {
    public static final int GL_RENDER_TYPE = 0;
    public static final int CL_RENDER_TYPE = 1;

    public static final int IMAGE_FORMAT_RGBA = 0x01;
    public static final int IMAGE_FORMAT_NV21 = 0x02;
    public static final int IMAGE_FORMAT_NV12 = 0x03;
    public static final int IMAGE_FORMAT_I420 = 0x04;

    public static final int PARAM_TYPE_SET_SHADER_INDEX = 201;

    static {
        System.loadLibrary("byteflow_render");
    }

    private long mNativeContextHandle;

    protected native void native_CreateContext(int renderType);

    protected native void native_DestroyContext();

    protected native int native_Init(int initType);

    protected native int native_UnInit();

    protected native void native_UpdateFrame(int format, byte[] data, int width, int height);

    protected native void native_LoadFilterData(int index, int format, int width, int height, byte[] bytes);

    protected native void native_LoadShaderScript(int shaderIndex, String scriptStr);

    protected native void native_SetTransformMatrix(float translateX, float translateY, float scaleX, float scaleY, int degree, int mirror);

    protected native void native_SetParamsInt(int paramType, int value);

    protected native int native_GetParamsInt(int paramType);

    protected native void native_OnSurfaceCreated();

    protected native void native_OnSurfaceChanged(int width, int height);

    protected native void native_OnDrawFrame();
}

渲染 YUV 數據用到的着色器腳本,主要是將 3 個紋理對應的 YUV 分量,分別採樣後轉成 RGBA :

//頂點着色器
#version 100
varying vec2 v_texcoord;
attribute vec4 position;
attribute vec2 texcoord;
uniform mat4 MVP;
void main() {
    v_texcoord = texcoord;
    gl_Position = MVP*position;
}

//片段着色器
#version 100
precision highp float;
varying vec2 v_texcoord;
uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
void main() {
    float y, u, v, r, g, b;
    y = texture2D(s_textureY, v_texcoord).r;
    u = texture2D(s_textureU, v_texcoord).r;
    v = texture2D(s_textureV, v_texcoord).r;
    u = u - 0.5;
    v = v - 0.5;
    r = y + 1.403 * v;
    g = y - 0.344 * u - 0.714 * v;
    b = y + 1.770 * u;
    gl_FragColor = vec4(r, g, b, 1.0);
}

C++ 層的主要實現:

//編譯鏈接着色器
int GLByteFlowRender::CreateProgram(const char *pVertexShaderSource, const char *pFragShaderSource)
{
	m_Program = GLUtils::CreateProgram(pVertexShaderSource, pFragShaderSource, m_VertexShader,
									   m_FragShader);
	if (!m_Program)
	{
		GLUtils::CheckGLError("Create Program");
		LOGCATE("GLByteFlowRender::CreateProgram Could not create program.");
		return 0;
	}

	m_YTextureHandle = glGetUniformLocation(m_Program, "s_textureY");
	m_UTextureHandle = glGetUniformLocation(m_Program, "s_textureU");
	m_VTextureHandle = glGetUniformLocation(m_Program, "s_textureV");

	m_VertexCoorHandle = (GLuint) glGetAttribLocation(m_Program, "position");
	m_TextureCoorHandle = (GLuint) glGetAttribLocation(m_Program, "texcoord");
	m_MVPHandle = glGetUniformLocation(m_Program, "MVP");

	return m_Program;
}

//創建 YUV 分量對應的 3 個紋理
bool GLByteFlowRender::CreateTextures()
{
	LOGCATE("GLByteFlowRender::CreateTextures");
	GLsizei yWidth = static_cast<GLsizei>(m_RenderFrame.width);
	GLsizei yHeight = static_cast<GLsizei>(m_RenderFrame.height);

	glActiveTexture(GL_TEXTURE0);
	glGenTextures(1, &m_YTextureId);
	glBindTexture(GL_TEXTURE_2D, m_YTextureId);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, yWidth, yHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
				 NULL);

	if (!m_YTextureId)
	{
		GLUtils::CheckGLError("Create Y texture");
		return false;
	}

	GLsizei uWidth = static_cast<GLsizei>(m_RenderFrame.width / 2);
	GLsizei uHeight = yHeight / 2;

	glActiveTexture(GL_TEXTURE1);
	glGenTextures(1, &m_UTextureId);
	glBindTexture(GL_TEXTURE_2D, m_UTextureId);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, uWidth, uHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
				 NULL);

	if (!m_UTextureId)
	{
		GLUtils::CheckGLError("Create U texture");
		return false;
	}

	GLsizei vWidth = static_cast<GLsizei>(m_RenderFrame.width / 2);
	GLsizei vHeight = (GLsizei) yHeight / 2;

	glActiveTexture(GL_TEXTURE2);
	glGenTextures(1, &m_VTextureId);
	glBindTexture(GL_TEXTURE_2D, m_VTextureId);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, vWidth, vHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
				 NULL);

	if (!m_VTextureId)
	{
		GLUtils::CheckGLError("Create V texture");
		return false;
	}

	return true;
}

//每傳入一幀新數據後,更新紋理
bool GLByteFlowRender::UpdateTextures()
{
	LOGCATE("GLByteFlowRender::UpdateTextures");
	if (m_RenderFrame.ppPlane[0] == NULL)
	{
		return false;
	}

	if (!m_YTextureId && !m_UTextureId && !m_VTextureId && !CreateTextures())
	{
		return false;
	}

	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_YTextureId);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_RenderFrame.width,
				 (GLsizei) m_RenderFrame.height, 0,
				 GL_LUMINANCE, GL_UNSIGNED_BYTE, m_RenderFrame.ppPlane[0]);

	glActiveTexture(GL_TEXTURE1);
	glBindTexture(GL_TEXTURE_2D, m_UTextureId);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_RenderFrame.width >> 1,
				 (GLsizei) m_RenderFrame.height >> 1, 0,
				 GL_LUMINANCE, GL_UNSIGNED_BYTE, m_RenderFrame.ppPlane[1]);

	glActiveTexture(GL_TEXTURE2);
	glBindTexture(GL_TEXTURE_2D, m_VTextureId);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_RenderFrame.width >> 1,
				 (GLsizei) m_RenderFrame.height >> 1, 0,
				 GL_LUMINANCE, GL_UNSIGNED_BYTE, m_RenderFrame.ppPlane[2]);

	return true;
}

//綁定紋理到着色器,傳入頂點和紋理座標數據
GLuint GLByteFlowRender::UseProgram()
{
	LOGCATE("GLByteFlowRender::UseProgram");
	ByteFlowLock lock(&m_ShaderBufLock);
	if (m_IsShaderChanged)
	{
		GLUtils::DeleteProgram(m_Program);
		CreateProgram(kVertexShader, m_pFragShaderBuf);
		m_IsShaderChanged = false;
		m_IsProgramChanged = true;
	}


	if (!m_Program)
	{
		LOGCATE("GLByteFlowRender::UseProgram Could not use program.");
		return 0;
	}

	if (m_IsProgramChanged)
	{
		glUseProgram(m_Program);
		GLUtils::CheckGLError("GLByteFlowRender::UseProgram");

		glVertexAttribPointer(m_VertexCoorHandle, 2, GL_FLOAT, GL_FALSE, 2 * 4, VERTICES_COORS);
		glEnableVertexAttribArray(m_VertexCoorHandle);

		glUniform1i(m_YTextureHandle, 0);
		glUniform1i(m_UTextureHandle, 1);
		glUniform1i(m_VTextureHandle, 2);

		glVertexAttribPointer(m_TextureCoorHandle, 2, GL_FLOAT, GL_FALSE, 2 * 4, TEXTURE_COORS);
		glEnableVertexAttribArray(m_TextureCoorHandle);

		m_IsProgramChanged = false;
	}
	return m_Program;
}

//渲染預覽圖像
void GLByteFlowRender::OnDrawFrame()
{
	LOGCATE("GLByteFlowRender::OnDrawFrame");
	glViewport(0, 0, m_ViewportWidth, m_ViewportHeight);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
	glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
	glDisable(GL_CULL_FACE);

	if (!UpdateTextures() || !UseProgram())
	{
		LOGCATE("GLByteFlowRender::OnDrawFrame skip frame");
		return;
	}
	glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}

後面文章會基於該預覽實現添加一些濾鏡效果。

實現代碼路徑:
OpenGLCamera2

聯繫與交流

我的公衆號
我的公衆號
我的微信
我的微信

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章