The original article was first published on the wechat official account Byteflow

YUV rendering principle

In the previous article, the basic processing of YUV image is introduced. The basic format of YUV is commonly used. This article takes NV21/NV12 rendering as an example.

As mentioned above, YUV graph cannot be directly used for display and needs to be converted to RGB format. However, YUV conversion to RGB is a time-consuming operation with per-pixel processing, and the conversion efficiency is too low at the CPU end. At this time, it is just right to use the powerful parallel processing capability of GPU to realize YUV conversion to RGB.

Conversion formula between YUV and RGB.

Note that OpenGLES ‘built-in matrices are actually constructed column by column, such as YUV and RGB conversion matrices:

mat3 convertMat = mat3(1.0.1.0.1.0./ / the first column
                       0.0.0.338.1.732./ / the second column
                       1.371.0.698.0.0);/ / the third column
Copy the code

OpenGLES implements YUV rendering using GL_LUMINANCE and GL_LUMINANCE_ALPHA textures, where GL_LUMINANCE is used to load NV21 Y Plane data. The GL_LUMINANCE_ALPHA texture is used to load UV Plane data.

OpenGLES commonly used texture format type

GL_LUMINANCE texture samples texture pixels in shaders in the format of (L, L, L, 1), where L denotes brightness. The GL_LUMINANCE texture sampled in the shader is in the texture pixel format (L, L, L, A), where A denotes transparency.

YUV rendering implementation

YUV rendering steps:

  • Generate 2 textures, compile link shader program;
  • Determine texture coordinates and corresponding vertex coordinates;
  • Load NV21 Plane data into 2 textures, load texture coordinates and vertex coordinates data into shader program;
  • To draw.

Fragment shader script

#version 300 es                                     
precision mediump float;                            
in vec2 v_texCoord;                                 
layout(location = 0) out vec4 outColor;             
uniform sampler2D y_texture;                        
uniform sampler2D uv_texture;                        
void main(a)                                         
{                                                   
vec3 yuv;										
yuv.x = texture(y_texture, v_texCoord).r;  	
yuv.y = texture(uv_texture, v_texCoord).a0.5;	
yuv.z = texture(uv_texture, v_texCoord).r0.5;	
vec3 rgb =mat3( 1.0.1.0.1.0.0.0.0.344.1.770.1.403.0.714.0.0) * yuv; 			
outColor = vec4(rgb, 1);						
}
Copy the code

Y_texture and uv_texture are the samplers for NV21 Y Plane and UV Plane textures respectively. The two textures are sampled to form a (Y, U, V) 3D vector, which is then converted to a (R, G,b) 3D vector by the left multiplication transformation matrix.

Load NV21 data at the Java layer

    private void LoadNV21Image(a) {
        InputStream is = null;
        try {
            is = getAssets().open("YUV_Image_840x1074.NV21");
        } catch (IOException e) {
            e.printStackTrace();
        }

        int lenght = 0;
        try {
            lenght = is.available();
            byte[] buffer = new byte[lenght];
            is.read(buffer);
            mGLSurfaceView.getNativeRender().native_SetImageData(IMAGE_FORMAT_NV21, 840.1074, buffer);
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try
            {
                is.close();
            }
            catch(IOException e) { e.printStackTrace(); }}}Copy the code

The Native layer is converted to the NativeImage

void MyGLRenderContext::SetImageData(int format, int width, int height, uint8_t *pData)
{
	LOGCATE("MyGLRenderContext::SetImageData format=%d, width=%d, height=%d, pData=%p", format, width, height, pData);
	NativeImage nativeImage;
	nativeImage.format = format;
	nativeImage.width = width;
	nativeImage.height = height;
	nativeImage.ppPlane[0] = pData;

	switch (format)
	{
		case IMAGE_FORMAT_NV12:
		case IMAGE_FORMAT_NV21:
			nativeImage.ppPlane[1] = nativeImage.ppPlane[0] + width * height;
			break;
		case IMAGE_FORMAT_I420:
			nativeImage.ppPlane[1] = nativeImage.ppPlane[0] + width * height;
			nativeImage.ppPlane[2] = nativeImage.ppPlane[1] + width * height / 4;
			break;
		default:
			break;
	}

	if(m_Sample) { m_Sample->LoadImage(&nativeImage); }}/ / copy to the sample
void NV21TextureMapSample::LoadImage(NativeImage *pImage)
{
	LOGCATE("NV21TextureMapSample::LoadImage pImage = %p", pImage->ppPlane[0]);
	if(pImage) { m_RenderImage.width = pImage->width; m_RenderImage.height = pImage->height; m_RenderImage.format = pImage->format; NativeImageUtil::CopyNativeImage(pImage, &m_RenderImage); }}Copy the code

Load NV21 2 Plane data into the texture, ppPlane[0] represents Y Plane pointer, ppPlane[1] represents UV Plane pointer, note the format and width of the 2 textures.

//upload Y plane data
glBindTexture(GL_TEXTURE_2D, m_yTextureId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, m_RenderImage.width, m_RenderImage.height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, m_RenderImage.ppPlane[0]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, GL_NONE);

//update UV plane data
glBindTexture(GL_TEXTURE_2D, m_uvTextureId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE_ALPHA, m_RenderImage.width >> 1, m_RenderImage.height >> 1.0, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, m_RenderImage.ppPlane[1]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
Copy the code

Simple code implementation

// Compile the link shader program to generate 2 textures
void NV21TextureMapSample::Init()
{
	char vShaderStr[] =
			"#version 300 es \n"
			"layout(location = 0) in vec4 a_position; \n"
			"layout(location = 1) in vec2 a_texCoord; \n"
			"out vec2 v_texCoord; \n"
			"void main() \n"
			"{ \n"
			" gl_Position = a_position; \n"
			" v_texCoord = a_texCoord; \n"
			"} \n";

	char fShaderStr[] =
			"#version 300 es \n"
			"precision mediump float; \n"
			"in vec2 v_texCoord; \n"
			"layout(location = 0) out vec4 outColor; \n"
			"uniform sampler2D y_texture; \n"
			"uniform sampler2D uv_texture; \n"
			"void main() \n"
			"{ \n"
			" vec3 yuv; \n"
			" yuv.x = texture(y_texture, v_texCoord).r; \n"
			Y = texture(uv_texture, v_texCoord).a-0.5; \n"
			"Yuv.z = texture(uv_texture, v_texCoord).r-0.5; \n"
			" highp vec3 rgb = mat3( 1, 1, 1, \n"
			"0, -0.344, 1.770, \n"
			"1.403, -0.714, 0); \n"
			" outColor = vec4(rgb, 1); \n"
			"} \n";

	// Load the shaders and get a linked program object
	m_ProgramObj= GLUtils::CreateProgram(vShaderStr, fShaderStr, m_VertexShader, m_FragmentShader);

	// Get the sampler location
	m_ySamplerLoc = glGetUniformLocation (m_ProgramObj, "y_texture" );
	m_uvSamplerLoc = glGetUniformLocation(m_ProgramObj, "uv_texture");

	//create textures
	GLuint textureIds[2] = {0};
	glGenTextures(2, textureIds);

	m_yTextureId = textureIds[0];
	m_uvTextureId = textureIds[1];
}

// Load NV21 image data to texture, load texture coordinates and vertex coordinates data to shader program, draw implementation YUV rendering
void NV21TextureMapSample::Draw(int screenW, int screenH)
{
	LOGCATE("NV21TextureMapSample::Draw()");

	if(m_ProgramObj == GL_NONE || m_yTextureId == GL_NONE || m_uvTextureId == GL_NONE) return;

	//upload Y plane data
	glBindTexture(GL_TEXTURE_2D, m_yTextureId);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, m_RenderImage.width, m_RenderImage.height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, m_RenderImage.ppPlane[0]);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glBindTexture(GL_TEXTURE_2D, GL_NONE);

	//update UV plane data
	glBindTexture(GL_TEXTURE_2D, m_uvTextureId);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE_ALPHA, m_RenderImage.width >> 1, m_RenderImage.height >> 1.0, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, m_RenderImage.ppPlane[1]);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glBindTexture(GL_TEXTURE_2D, GL_NONE);

	//glViewport(0, 0, m_RenderImage.width, m_RenderImage.height);

	GLfloat verticesCoords[] = {
			1.0 f.0.78 f.0.0 f.// Position 0
			1.0 f.0.78 f.0.0 f.// Position 1
			1.0 f.0.78 f.0.0 f.// Position 2
			1.0 f.0.78 f.0.0 f.// Position 3
	};

	GLfloat textureCoords[] = {
			0.0 f.0.0 f.// TexCoord 0
			0.0 f.1.0 f.// TexCoord 1
			1.0 f.1.0 f.// TexCoord 2
			1.0 f.0.0 f         // TexCoord 3
	};

	GLushort indices[] = { 0.1.2.0.2.3 };

	// Use the program object
	glUseProgram (m_ProgramObj);

	// Load the vertex position
	glVertexAttribPointer (0.3, GL_FLOAT,
						   GL_FALSE, 3 * sizeof (GLfloat), verticesCoords);
	// Load the texture coordinate
	glVertexAttribPointer (1.2, GL_FLOAT,
						   GL_FALSE, 2 * sizeof (GLfloat), textureCoords);

	glEnableVertexAttribArray (0);
	glEnableVertexAttribArray (1);

	// Bind the Y plane map
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_yTextureId);

	// Set the Y plane sampler to texture unit to 0
	glUniform1i(m_ySamplerLoc, 0);

	// Bind the UV plane map
	glActiveTexture(GL_TEXTURE1);
	glBindTexture(GL_TEXTURE_2D, m_uvTextureId);

	// Set the UV plane sampler to texture unit to 1
	glUniform1i(m_uvSamplerLoc, 1);

	glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
}
Copy the code

Results the figure

Implementation code path: github.com/githubhaoha…

Contact and exchange