刚学习了OpenGL的纹理相关知识,终于可以接着写Android音视频系列了。
本篇博客会介绍使用OpenGL ES 3.0相关知识预览Camera,并且提供Camera和Camera2两个版本实现。
整体流程
- 在 GLSurfaceView.Render 中创建一个纹理,再使用该纹理创建一个 SurfaceTexture。
- 将该SurfaceTexture 作为相机预览输出
- 使用Camera时直接将该SurfaceTexture传给相机。
- 使用Camera2时使用该SurfaceTexture创建一个 Surface 传给相机。
- 使用 GLSurfaceView.Render 将该纹理渲染到 GLSurfaceView 窗口上。
- 使用 GLSurfaceTexture 的 setOnFrameAvailableListener 方法给 SurfaceTexture 添加一个数据帧数据可用的监听器,在监听器中调用 GLSurfaceView 的 requestRender 方法渲染该帧数据,这样相机每次输出一帧数据就可以渲染一次,就可以在GLSurfaceView窗口中看到相机的预览数据了。
着色器
顶点着色器
1 2 3 4 5 6 7 8 9 10 11 12
| #version 300 es layout (location = 0) in vec4 vPosition; layout (location = 1) in vec4 aTextureCoord;
uniform mat4 uTextureMatrix; out vec2 yuvTexCoords; void main() { gl_Position = vPosition; gl_PointSize = 10.0; yuvTexCoords = (uTextureMatrix * aTextureCoord).xy; }
|
片段着色器
1 2 3 4 5 6 7 8 9 10
| #version 300 es
#extension GL_OES_EGL_image_external_essl3 : require precision mediump float; uniform samplerExternalOES yuvTexSampler; in vec2 yuvTexCoords; out vec4 vFragColor; void main() { vFragColor = texture(yuvTexSampler, yuvTexCoords); }
|
纹理的类型需要使用 samplerExternalOES ,而不是之前渲染图片的 sampler2D。
我们知道Android相机输出的原始数据一般都为YUV数据,而在OpenGL中使用的绝大部分纹理ID都是RGBA的格式,所以原始数据都是无法直接用OpenGL ES来渲染的。所以我们添加了一个扩展#extension GL_OES_EGL_image_external_essl3 : require
,其中定义了一个纹理的扩展类型GL_TEXTURE_EXTERNAL_OES
。后面绑定纹理时需要绑定到GL_TEXTURE_EXTERNAL_OES
上,而不是类型GL_TEXTURE_2D上。
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
|
public int loadTexture() { int[] tex = new int[1]; GLES30.glGenTextures(1, tex, 0); GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]); GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_NEAREST); GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR); GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE); GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE); GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); return tex[0]; }
|
Camera
初始化
1 2 3 4 5 6 7 8 9 10 11 12
| public CameraSurfaceRenderer(GLSurfaceView glSurfaceView) { this.mCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT; this.mGLSurfaceView = glSurfaceView; mCamera = Camera.open(mCameraId); setCameraDisplayOrientation(mCameraId, mCamera); ...... }
|
onSurfaceCreated
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
| GLES30.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
final int vertexShaderId = RenderUtil.compileShader(GLES30.GL_VERTEX_SHADER,ResReadUtils.readResource(R.raw.vertex_camera_shader)); final int fragmentShaderId = RenderUtil.compileShader(GLES30.GL_FRAGMENT_SHADER,ResReadUtils.readResource(R.raw.fragment_camera_shader));
mProgram = RenderUtil.linkProgram(vertexShaderId, fragmentShaderId);
uTextureMatrixLocation = GLES30.glGetUniformLocation(mProgram, "uTextureMatrix");
uTextureSamplerLocation = GLES30.glGetUniformLocation(mProgram, "yuvTexSampler");
textureId = loadTexture();
loadSurfaceTexture(textureId);
|
其实前面部分和加载图片没有什么区别,最后两行,对应上面流程中的1、2步。创建纹理,绑定外部纹理,然后根据纹理ID创建SurfaceTexture作为相机预览输出。
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
| public boolean loadSurfaceTexture(int textureId) { mSurfaceTexture = new SurfaceTexture(textureId); mSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() { @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { mGLSurfaceView.requestRender(); } }); try { mCamera.setPreviewTexture(mSurfaceTexture); } catch (IOException e) { e.printStackTrace(); return false; } mCamera.startPreview(); return true; }
|
绘制
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
| @Override public void onDrawFrame(GL10 gl) { GLES30.glClear(GLES30.GL_COLOR_BUFFER_BIT);
GLES30.glUseProgram(mProgram);
mSurfaceTexture.updateTexImage(); mSurfaceTexture.getTransformMatrix(transformMatrix);
GLES30.glActiveTexture(GLES30.GL_TEXTURE0); GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId); GLES30.glUniform1i(uTextureSamplerLocation, 0);
GLES30.glUniformMatrix4fv(uTextureMatrixLocation, 1, false, transformMatrix, 0);
GLES30.glEnableVertexAttribArray(0); GLES30.glVertexAttribPointer(0, 3, GLES30.GL_FLOAT, false, 0, vertexBuffer);
GLES30.glEnableVertexAttribArray(1); GLES30.glVertexAttribPointer(1, 2, GLES30.GL_FLOAT, false, 0, mTexVertexBuffer);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, VERTEX_INDEX.length, GLES20.GL_UNSIGNED_SHORT, mVertexIndexBuffer); }
|
Activity显示
1 2 3 4 5 6 7
| private void setupViews() { mGLSurfaceView = new GLSurfaceView(this); mGLSurfaceView.setEGLContextClientVersion(3); mGLSurfaceView.setRenderer(new CameraSurfaceRenderer(mGLSurfaceView)); setContentView(mGLSurfaceView); }
|
Camera2
使用Camera2在OpenGL方面其实是一样的,并没有什么改动。所以只需要看一下Camera2的调用就好。
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
| @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { Log.e("Renderer", "onSurfaceCreated"); textureId = loadTexture(); mSurfaceTexture = new SurfaceTexture(textureId);
GLES30.glClearColor(0.5f, 0.5f, 0.5f, 0.5f); final int vertexShaderId = RenderUtil.compileShader(GLES30.GL_VERTEX_SHADER, ResReadUtils.readResource(R.raw.vertex_camera_shader)); final int fragmentShaderId = RenderUtil.compileShader(GLES30.GL_FRAGMENT_SHADER, ResReadUtils.readResource(R.raw.fragment_camera_shader)); mProgram = RenderUtil.linkProgram(vertexShaderId, fragmentShaderId);
uTextureMatrixLocation = GLES30.glGetUniformLocation(mProgram, "uTextureMatrix"); uTextureSamplerLocation = GLES30.glGetUniformLocation(mProgram, "yuvTexSampler"); }
|
1 2 3 4
| public SurfaceTexture getSurfaceTexture() { return mSurfaceTexture; }
|
初始化
1 2 3 4 5 6
| private void initCamera() { cameraManager = (CameraManager) MyApplication.getApplication().getSystemService(Context.CAMERA_SERVICE); outputSizes = getCameraOutputSizes(cameraId, SurfaceTexture.class); photoSize = outputSizes.get(1); }
|
打开摄像头
1 2 3 4 5 6 7 8 9
| @SuppressLint("MissingPermission") private void openCamera() { try { cameraManager.openCamera(String.valueOf(cameraId), cameraStateCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); Log.e(TAG, "openCamera fail"); } }
|
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39
| CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(CameraDevice camera) { surfaceTexture = camera2SurfaceRenderer.getSurfaceTexture(); if (surfaceTexture == null) { return; } surfaceTexture.setDefaultBufferSize(photoSize.getWidth(), photoSize.getHeight()); surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() { @Override public void onFrameAvailable(final SurfaceTexture surfaceTexture) { mGLSurfaceView.requestRender(); } }); surface = new Surface(surfaceTexture);
try { cameraDevice = camera; previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); previewRequestBuilder.addTarget(surface); previewRequest = previewRequestBuilder.build();
cameraDevice.createCaptureSession(Arrays.asList(surface), sessionsStateCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
@Override public void onDisconnected(CameraDevice camera) { }
@Override public void onError(CameraDevice camera, int error) { Log.e(TAG, "Open onError"); } };
|
最后看我路飞
源码地址