EGLHelper

本文详细介绍了EGLHelper类在Android应用中初始化EGL、创建OpenGL表面、交换缓冲区等关键操作的过程,提供了实现高效图形渲染的技术细节。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGL11;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
import javax.microedition.khronos.opengles.GL;

import android.view.SurfaceHolder;

/**
 * Copyright (C) 2008 Google Inc.
 * 
 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
 * use this file except in compliance with the License. You may obtain a copy of
 * the License at
 * 
 * http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 * License for the specific language governing permissions and limitations under
 * the License.
 * 
 * EGLHelper class extracted from GLView
 */

public class EGLHelper
{
        publicEGLHelper()
        {

        }

        /**
         * Initialize EGL for a given configuration spec.
         * 
         * @param configSpec
         */
        publicvoid start(int[] configSpec)
        {
                /*
                 * Get an EGL instance
                 */
                mEgl =(EGL10)EGLContext.getEGL();

                /*
                 * Get to the default display.
                 */
                mEglDisplay = mEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);

                /*
                 * We can now initialize EGL for that display
                 */
                int[] version= new int[2];
                mEgl.eglInitialize(mEglDisplay, version);

                EGLConfig[] configs= new EGLConfig[1];
                int[] num_config= new int[1];
                mEgl.eglChooseConfig(mEglDisplay, configSpec, configs,1, num_config);
                mEglConfig = configs[0];

                /*
                 * Create an OpenGL ES context. This must be done only once, an OpenGL
                 * context is a somewhat heavy object.
                 */
                mEglContext = mEgl.eglCreateContext(mEglDisplay, mEglConfig,
                                EGL10.EGL_NO_CONTEXT,null);

                mEglSurface =null;
        }

        /*
         * Create and return an OpenGL surface
         */
        public GL createSurface(SurfaceHolder holder)
        {
                /*
                 * The window size has changed, so we need to create a new surface.
                 */
                if(mEglSurface!=null)
                {

                        /*
                         * Unbind and destroy the old EGL surface, if there is one.
                         */
                        mEgl.eglMakeCurrent(mEglDisplay, EGL10.EGL_NO_SURFACE,
                                        EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
                        mEgl.eglDestroySurface(mEglDisplay, mEglSurface);
                }

                /*
                 * Create an EGL surface we can render into.
                 */
                mEglSurface = mEgl.eglCreateWindowSurface(mEglDisplay, mEglConfig,
                                holder,null);

                /*
                 * Before we can issue GL commands, we need to make sure the context is
                 * current and bound to a surface.
                 */
                mEgl.eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext);
                if(mEgl.eglGetError()==EGL11.EGL_CONTEXT_LOST)
                        AngleMainEngine.mDirty=true;

                GL gl = mEglContext.getGL();
                return gl;
        }

        /**
         * Display the current render surface.
         * 
         * @return false if the context has been lost.
         */
        publicboolean swap()
        {
                mEgl.eglSwapBuffers(mEglDisplay, mEglSurface);

                /*
                 * Always check for EGL_CONTEXT_LOST, which means the context and all
                 * associated data were lost (For instance because the device went to
                 * sleep). We need to sleep until we get a new surface.
                 */
                return mEgl.eglGetError()!= EGL11.EGL_CONTEXT_LOST;
        }

        publicvoid finish()
        {
                boolean success=true;
                if(mEglSurface!=null)
                {
                        success&=mEgl.eglMakeCurrent(mEglDisplay, EGL10.EGL_NO_SURFACE,
                                        EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
                        success&=mEgl.eglDestroySurface(mEglDisplay, mEglSurface);
                        mEglSurface =null;
                }
                if(mEglContext!=null)
                {
                        success&=mEgl.eglDestroyContext(mEglDisplay, mEglContext);
                        mEglContext =null;
                }
                if(mEglDisplay!=null)
                {
                        success&=mEgl.eglTerminate(mEglDisplay);
                        mEglDisplay =null;
                }
                AngleMainEngine.mDirty=!success;
        }

        EGL10 mEgl;
        EGLDisplay mEglDisplay;
        EGLSurface mEglSurface;
        EGLConfig mEglConfig;
        EGLContext mEglContext;
}

联系方式:weinyzhou86@gmail.com

QQ:514540005

版权所有,禁止转载.

发布自:http://blog.youkuaiyun.com/weinyzhou/article/details/8242910


### 使用Camera2 API与OpenGL集成 对于搭载 Android 9 或更高版本的设备,强烈建议使用相机 HAL3[^1]。为了实现 Camera2 API 和 OpenGL 的集成,可以遵循以下方法: #### 创建纹理视图并初始化OpenGL环境 首先,在布局文件中创建 `TextureView` 组件用于显示摄像头预览图像。 ```xml <TextureView android:id="@+id/texture" android:layout_width="match_parent" android:layout_height="match_parent"/> ``` 接着设置 TextureView 并配置 OpenGL ES 上下文: ```java public class MainActivity extends AppCompatActivity implements TextureView.SurfaceTextureListener { private TextureView textureView; private EglHelper eglHelper; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); textureView = findViewById(R.id.texture); textureView.setSurfaceTextureListener(this); // 初始化EGL上下文 eglHelper = new EglHelper(); } @Override public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { openCamera(width, height); } } ``` #### 打开相机并获取帧数据 通过调用 `CameraManager.openCamera()` 方法打开指定ID的相机,并注册回调监听器来接收每一帧的数据流。 ```java private void openCamera(int width, int height){ final Activity activity = this; CameraManager manager = (CameraManager)getSystemService(Context.CAMERA_SERVICE); try{ String cameraId = manager.getCameraIdList()[0]; CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); Size[] outputSizes = map.getOutputSizes(ImageFormat.YUV_420_888); ImageReader reader = ImageReader.newInstance(outputSizes[outputSizes.length - 1].getWidth(), outputSizes[outputSizes.length - 1].getHeight(),ImageFormat.YUV_420_888, /*maxImages*/2); List<Surface> outputSurfaces = new ArrayList<>(2); Surface previewSurface = new Surface(textureView.getSurfaceTexture()); outputSurfaces.add(previewSurface); outputSurfaces.add(reader.getSurface()); CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); captureBuilder.addTarget(previewSurface); captureBuilder.addTarget(reader.getSurface()); cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback(){ @Override public void onConfigured(@NonNull CameraCaptureSession session){ updatePreview(session); } @Override public void onConfigureFailed(@NonNull CameraCaptureSession session){} }, null); reader.setOnImageAvailableListener( new ImageReader.OnImageAvailableListener() { ... },null); } catch(Exception ex){...} } // 更新预览会话的方法 private void updatePreview(final CameraCaptureSession session){ if(null == cameraDevice || !textureView.isAvailable()){ return; } try{ CaptureRequest request = createCaptureRequest(session); HandlerThread thread = new HandlerThread("CameraPreview"); thread.start(); Handler backgroundHandler = new Handler(thread.getLooper()); session.setRepeatingRequest(request,bgCallback,backgroundHandler); }catch(...){...} } ``` 上述代码片段展示了如何利用 Camera2 API 来捕获视频帧并将这些帧传递给 `TextureView` 显示出来。 #### 将YUV格式转换为RGB并通过OpenGL渲染到屏幕上 当接收到 YUV 图像时,需要将其转换成 RGB 格式再交给 GPU 处理。这一步骤通常是在 CPU 中完成后再上传至显存供着色器程序读取。 ```c++ extern "C" JNIEXPORT jstring JNICALL Java_com_example_openglcamera_MainActivity_convertYuvToRgb(JNIEnv* env,jobject thiz,jbyteArray yData,jint width,jint height){ unsigned char *y,*u,*v,*rgbBuffer; jint size=width*height; rgbBuffer=(unsigned char *)malloc(size*3*sizeof(unsigned char)); y=jniByteArrayToUnsignedCharArray(env,yData,size); u=y+size; v=u+(size>>2); for(jint i=0;i<height;i++){ for(jint j=0;j<width;j+=2){ jint pos=i*width+j; jint uvPos=((i>>1)*(width>>1)+(j>>1))*2; jint r,g,b; jint yValue=y[pos]&0xff; jint uValue=u[uvPos]-128; jint vValue=v[uvPos]-128; r=yValue+((45*vValue)/32); g=yValue-(((88*uValue)/32)+((183*vValue)/64)); b=yValue+((112*uValue)/32); rgbBuffer[(pos)*3]=CLAMP(b,0,255); rgbBuffer[(pos)*3+1]=CLAMP(g,0,255); rgbBuffer[(pos)*3+2]=CLAMP(r,0,255); ... } } return byteArrayFromUnsignedChar(env,rgbBuffer,width*height*3); } ``` 最后,将处理后的 RGB 数据作为纹理映射到四边形上进行绘制即可看到最终效果。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值