Code Fragment-提前创建与懒创建。

本文介绍了Java中提前加载和懒加载的概念及其应用实例。通过具体的代码示例,展示了如何实现这两种不同的加载策略,并简单提及了单例模式下懒汉式与饿汉式的区别。

提前加载,可以放在static{}中,当类被加载的时候创建,如:

static {
    sURLMatcher.addURI("mms_temp_file", "scrapSpace", MMS_SCRAP_SPACE);
}

懒加载,当被使用的时候再去创建

private void buildReadContentValues() {
    if (mReadContentValues == null) {
        mReadContentValues = new ContentValues(2);
        mReadContentValues.put("read", 1);
        mReadContentValues.put("seen", 1);
    }
}


类似的还有单例模式的不同创建方式:懒汉式,饿汉式。

public class MainActivity4 extends AppCompatActivity { private static final String TAG = "camera2api"; // 1. 权限相机相关变量 private static final int REQUEST_CAMERA_PERMISSIONS = 100; private TextureView mTextureView; // 预览显示载体 private Button mToggleFilterBtn; // 滤镜切换按钮 private CameraManager mCameraManager; // Camera2的“相机管理器”(列举相机、获取信息) private StreamConfigurationMap configMap; private String cameraId; // 当前使用的相机ID private CameraDevice mCameraDevice; // 代表物理相机设备(打开后才能操作) private CameraCaptureSession mCaptureSession; // 相机会话(所有预览/拍照操作通过它发起) private HandlerThread mBackgroundThread; // 相机操作后台线程(避免阻塞UI) private Handler mBackgroundHandler; // 2. OpenGL渲染器(连接Camera2和GPU) private CameraRenderer mCameraRenderer; private GLSurfaceView mGLSurfaceView; // 隐藏的GLSurfaceView(调度OpenGL渲染,不直接显示) // 3. Canvas绘制相关(叠加取景框) private Paint mFramePaint; // 画笔(绘制取景框) private volatile boolean isDrawingActive = false; // 核心控制标志 private SurfaceTexture surfaceTexture; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main4); // 初始化UI控件 initViews(); // 初始化Canvas画笔(用于叠加取景框) initCanvasPaint(); // 初始化OpenGL渲染器(隐藏的GLSurfaceView,仅用于调度GPU) initRenderer(); } @Override protected void onResume() { super.onResume(); Log.i(TAG, "判断是否有相机权限"); List<String> permissions=new ArrayList<>(); permissions.add(Manifest.permission.CAMERA); permissions.add(Manifest.permission.WRITE_EXTERNAL_STORAGE); permissions.add(Manifest.permission.READ_EXTERNAL_STORAGE); permissions.add(Manifest.permission.RECORD_AUDIO); if (!checkPermission(permissions)){ Log.i(TAG, "没有相机权限——>开始请求相机权限"); ActivityCompat.requestPermissions(this, permissions.toArray(new String[0]), REQUEST_CAMERA_PERMISSIONS); } Log.e(TAG, "1.1"); Log.i(TAG, "授权判断"); if (mTextureView.isAvailable()) { Log.i(TAG, "授权成功"); openCamera(); } Log.e(TAG, "1"); } // -------------------------- // 初始化UI控件 // -------------------------- private void initViews() { mTextureView = findViewById(R.id.texture_view); mToggleFilterBtn = findViewById(R.id.btn_toggle_filter); try { // 初始化相机管理器 mCameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); cameraId = mCameraManager.getCameraIdList()[0]; CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(cameraId); configMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); } catch (CameraAccessException e) { throw new RuntimeException(e); } Log.i(TAG, "使用相机ID: " + cameraId); Log.i(TAG, "设置监听: " ); // 滤镜切换按钮点击事件(需在GL线程执行,避免线程安全问题) mToggleFilterBtn.setOnClickListener(v -> { // queueEvent:将任务提交到GL线程执行 mGLSurfaceView.queueEvent(() -> mCameraRenderer.toggleFilter()); }); Log.e(TAG, "1.2"); // TextureView监听:Surface可用时打开相机 mTextureView.setSurfaceTextureListener(surfaceTextureListener); } private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() { @Override public void onSurfaceTextureAvailable(@NonNull SurfaceTexture surface, int width, int height) { Log.e(TAG, "1.3"); // 检查权限,有权限则打开相机 if (ActivityCompat.checkSelfPermission(MainActivity4.this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) { Log.e(TAG, "1.4"); openCamera(); } else { Log.e(TAG, "1.5"); // 申请相机权限 ActivityCompat.requestPermissions(MainActivity4.this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSIONS); } // 重写TextureView的onDraw方法(演示Canvas绘制) mTextureView.setWillNotDraw(false); // 允许TextureView执行onDraw startDrawingThread(); // 启动绘制线程 // -------------------------- // 演示Canvas绘制(在预览上叠加白色取景框) // -------------------------- // 注意:Canvas绘制需在SurfaceTexture可用后执行,且每次预览刷新后需重绘 mTextureView.postInvalidate(); // 触发onDraw } @Override public void onSurfaceTextureSizeChanged(@NonNull SurfaceTexture surface, int width, int height) { // 预览尺寸变化时,重新设置渲染视口(由OpenGL渲染器处理) } @Override public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surface) { // TextureView销毁时,释放相机资源 closeCamera(); return false; } @Override public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surface) { // 每次预览帧更新后,重绘Canvas(避免取景框消失) mTextureView.postInvalidate(); } }; // 新增辅助方法:检查单权限 private boolean checkPermission(List<String> permissions) { boolean isPermissionFlag=true; for (String permission : permissions) { if (ActivityCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED){ isPermissionFlag=false; } } return isPermissionFlag; } private void startDrawingThread() { new Thread(() -> { while (isDrawingActive) { Canvas canvas = mTextureView.lockCanvas(); if (canvas != null) { drawFrameWithCanvas(canvas); // 执行绘制逻辑 mTextureView.unlockCanvasAndPost(canvas); } try { Thread.sleep(16); } catch (InterruptedException e) {} // 60FPS } }).start(); } // -------------------------- // 初始化Canvas画笔(绘制取景框) // -------------------------- private void initCanvasPaint() { mFramePaint = new Paint(); mFramePaint.setColor(Color.WHITE); // 白色取景框 mFramePaint.setStrokeWidth(5); // 线宽5px mFramePaint.setStyle(Paint.Style.STROKE); // 空心框 } // -------------------------- // Canvas绘制:在预览上叠加取景框(CPU层面) // -------------------------- private void drawFrameWithCanvas(Canvas canvas) { if (canvas == null) return; // 取景框尺寸:居中,宽高为屏幕的80% int screenWidth = canvas.getWidth(); int screenHeight = canvas.getHeight(); int frameSize = Math.min(screenWidth, screenHeight) * 4 / 5; // 80%尺寸 int left = (screenWidth - frameSize) / 2; int top = (screenHeight - frameSize) / 2; int right = left + frameSize; int bottom = top + frameSize; // 绘制取景框(空心矩形) canvas.drawRect(left, top, right, bottom, mFramePaint); } // -------------------------- // 初始化OpenGL渲染器(隐藏的GLSurfaceView) // -------------------------- private void initRenderer() { // 创建隐藏的GLSurfaceView(仅用于调度OpenGL,不添加到布局) mGLSurfaceView = new GLSurfaceView(this); mGLSurfaceView.setEGLContextClientVersion(2); // 使用OpenGL ES 2.0(移动端通用) mCameraRenderer = new CameraRenderer(this); // 注册回调(确保非空) mCameraRenderer.setSurfaceTextureListener(this::handleSurfaceTextureReady); // 同步启动GL线程 mGLSurfaceView.setRenderer(mCameraRenderer); mGLSurfaceView.onResume(); // 关键:强制启动渲染线程 // 等待纹理初始化(最多500ms) synchronized (mGLSurfaceView) { try { mGLSurfaceView.wait(500); } catch (InterruptedException e) { Log.w(TAG, "等待纹理初始化超时"); } } // 渲染模式:持续渲染(30帧/秒,保证预览流畅) mGLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY); } // 回调方法(GL线程触发) private void handleSurfaceTextureReady(SurfaceTexture surfaceTexture) { synchronized (mGLSurfaceView) { mGLSurfaceView.notifyAll(); // 唤醒等待线程 this.surfaceTexture = surfaceTexture; } } // -------------------------- // Camera2核心1:打开相机(原生API流程) // -------------------------- private void openCamera() { Log.e(TAG, "————————>"+ Log.getStackTraceString(new Throwable())); Log.e(TAG, "1.6"); // 启动后台线程(Camera2操作必须在后台执行,避免阻塞UI) startBackgroundThread(); try { // 1. 打开相机(需权限,回调中获取CameraDevice) if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { return; } mCameraManager.openCamera(cameraId, new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice camera) { Log.e(TAG, "相机打开成功"); // 相机打开成功,保存CameraDevice实例 mCameraDevice = camera; // 下一步:配置预览会话(Camera2的核心流程) createPreviewSession(); } @Override public void onDisconnected(@NonNull CameraDevice camera) { // 相机关闭连接,释放资源 camera.close(); mCameraDevice = null; } @Override public void onError(@NonNull CameraDevice camera, int error) { // 相机出错,释放资源并退出 camera.close(); mCameraDevice = null; Toast.makeText(MainActivity4.this, "相机打开失败:" + error, Toast.LENGTH_SHORT).show(); finish(); } }, mBackgroundHandler); // 用后台线程处理回调 } catch (CameraAccessException e) { e.printStackTrace(); Toast.makeText(this, "相机访问异常:" + e.getMessage(), Toast.LENGTH_SHORT).show(); } } // -------------------------- // Camera2核心2:创建预览会话(配置输出目标+发送预览请求) // -------------------------- private void createPreviewSession() { try { // 1. 获取OpenGL渲染器的SurfaceTexture,包装成Surface(Camera2的输出目标) surfaceTexture = mCameraRenderer.getSurfaceTexture(); if (surfaceTexture == null) { Toast.makeText(this, "SurfaceTexture为空", Toast.LENGTH_SHORT).show(); return; } // 2. 配置预览尺寸(匹配相机支持的尺寸,避免画面变形) if (configMap == null) { Toast.makeText(this, "相机配置异常", Toast.LENGTH_SHORT).show(); return; } // 获取相机支持的SurfaceTexture输出尺寸,选择第一个(简化处理,实际需选屏幕比例匹配的) Size[] previewSizes = configMap.getOutputSizes(SurfaceTexture.class); Size previewSize = previewSizes[0]; // 设置SurfaceTexture的默认缓冲区尺寸(相机输出尺寸一致) surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); // 包装成Surface(Camera2的输出目标必须是Surface) Surface previewSurface = new Surface(surfaceTexture); // 3. 构建预览请求(告诉Camera2:要执行“预览”操作) CaptureRequest.Builder previewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); previewRequestBuilder.addTarget(previewSurface); // 相机输出的YUV数据发送到这个Surface // 4. 配置预览参数(自动对焦、自动曝光,提升体验) previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); // 连续对焦 previewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); // 自动曝光+闪光灯 // 5. 创建相机会话(Camera2的“工作模式”,所有操作通过会话发起) mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession session) { // 会话配置成功,保存会话实例 mCaptureSession = session; try { // 发送“重复预览请求”(持续输出YUV数据,30帧/秒) CaptureRequest previewRequest = previewRequestBuilder.build(); // setRepeatingRequest:重复执行请求,直到停止 mCaptureSession.setRepeatingRequest(previewRequest, null, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession session) { Toast.makeText(MainActivity4.this, "预览会话配置失败", Toast.LENGTH_SHORT).show(); } }, mBackgroundHandler); // 用后台线程处理回调 } catch (CameraAccessException e) { e.printStackTrace(); Toast.makeText(this, "创建会话异常:" + e.getMessage(), Toast.LENGTH_SHORT).show(); } } // -------------------------- // 启动相机操作后台线程 // -------------------------- private void startBackgroundThread() { mBackgroundThread = new HandlerThread("CameraBackground"); // 线程名(方便调试) mBackgroundThread.start(); mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); // 线程的“遥控器” } // -------------------------- // 停止后台线程(避免内存泄漏) // -------------------------- private void stopBackgroundThread() { if (mBackgroundThread != null) { mBackgroundThread.quitSafely(); // 安全退出线程 try { mBackgroundThread.join(); // 等待线程执行完毕 mBackgroundThread = null; mBackgroundHandler = null; } catch (InterruptedException e) { e.printStackTrace(); } } } // -------------------------- // 关闭相机,释放所有资源 // -------------------------- private void closeCamera() { // 停止预览请求 if (mCaptureSession != null) { try { mCaptureSession.stopRepeating(); // 停止重复请求 mCaptureSession.abortCaptures(); // 终止所有未完成的请求 mCaptureSession.close(); } catch (CameraAccessException e) { e.printStackTrace(); } mCaptureSession = null; } // 关闭相机设备 if (mCameraDevice != null) { mCameraDevice.close(); mCameraDevice = null; } // 停止后台线程 stopBackgroundThread(); } // -------------------------- // 权限申请结果回调 // -------------------------- @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); if (requestCode == REQUEST_CAMERA_PERMISSIONS) { if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { } else { // 权限拒绝,提示并退出 Toast.makeText(this, "需要相机权限才能预览", Toast.LENGTH_SHORT).show(); finish(); } } } // -------------------------- // 生命周期:暂停时释放资源 // -------------------------- @Override protected void onPause() { closeCamera(); super.onPause(); } @Override protected void onDestroy() { if (surfaceTexture != null) { surfaceTexture.release(); // 释放纹理资源 surfaceTexture = null; } super.onDestroy(); } }public class CameraRenderer extends AppCompatActivity implements GLSurfaceView.Renderer { private static final String TAG = "camera2api"; // 1. 基础变量 private Context mContext; private SurfaceTexture mSurfaceTexture; // 连接Camera2和GPU的桥梁(接收YUV数据) private int mCameraTextureId; // GPU纹理ID(存储YUV数据的“容器”) private int mProgramNoFilter; // 无滤镜OpenGL程序(顶点+片段着色器) private int mProgramBlackWhite; // 黑白滤镜OpenGL程序 private int mCurrentProgram; // 当前使用的程序(默认无滤镜) private boolean mIsBlackWhite = false; // 滤镜开关 // 2. 顶点坐标和纹理坐标(固定值:覆盖全屏) // 顶点坐标:左下、右下、左上、右上(OpenGL坐标系:-1~1) private float[] mVertexCoords = {-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f}; // 纹理坐标:解决相机纹理上下颠倒问题(0~1,对应顶点坐标) private float[] mTexCoords = {0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f}; // 转为GPU可读取的FloatBuffer(避免CPU/GPU数据格式不兼容) private FloatBuffer mVertexBuffer; private FloatBuffer mTexCoordBuffer; // 3. 构造方法(初始化坐标缓冲区) public CameraRenderer(Context context) { mContext = context; // 初始化顶点坐标缓冲区 mVertexBuffer = ByteBuffer.allocateDirect(mVertexCoords.length * 4) .order(ByteOrder.nativeOrder()) // 按GPU原生字节序排列 .asFloatBuffer() .put(mVertexCoords); mVertexBuffer.position(0); // 重置读取指针 // 初始化纹理坐标缓冲区 mTexCoordBuffer = ByteBuffer.allocateDirect(mTexCoords.length * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer() .put(mTexCoords); mTexCoordBuffer.position(0); } // 在CameraRenderer类中添加 public interface SurfaceTextureListener { void onSurfaceTextureCreated(SurfaceTexture surfaceTexture); } private SurfaceTextureListener mListener; public void setSurfaceTextureListener(SurfaceTextureListener listener) { mListener = listener; } // -------------------------- // 渲染器核心1:初始化OpenGL环境(只执行1次,在GL线程) // -------------------------- @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { // 1. 创建相机专用纹理(告诉GPU:这是相机YUV数据,不是普通图片) mCameraTextureId = createCameraTexture(); // 2. 绑定纹理到SurfaceTexture(让Camera2的YUV数据流入GPU) mSurfaceTexture = new SurfaceTexture(mCameraTextureId); // 当有新YUV帧时,通知GLSurfaceView刷新渲染(保证预览流畅) GLSurfaceView mGLSurfaceView = (GLSurfaceView) findViewById(R.id.texture_view); mSurfaceTexture.setOnFrameAvailableListener(surfaceTexture -> { ((GLSurfaceView) findViewById(R.id.texture_view)).requestRender(); }); // 修复1:必须设置帧监听器(否则onDrawFrame不触发) mSurfaceTexture.setOnFrameAvailableListener(st -> { if (mGLSurfaceView != null) { mGLSurfaceView.requestRender(); } }); // 修复2:空指针防护 if (mListener != null) { mListener.onSurfaceTextureCreated(mSurfaceTexture); } else { Log.w(TAG, "SurfaceTexture回调未注册!"); } // 3. 编译并链接两个OpenGL程序(无滤镜+黑白滤镜) String vertexShader = loadShaderFromRaw(R.raw.vertex_shader); // 通用顶点着色器 String fragNoFilter = loadShaderFromRaw(R.raw.frag_shader_no_filter); String fragBlackWhite = loadShaderFromRaw(R.raw.frag_shader_black_white); mProgramNoFilter = createOpenGLProgram(vertexShader, fragNoFilter); mProgramBlackWhite = createOpenGLProgram(vertexShader, fragBlackWhite); // 默认使用无滤镜程序 mCurrentProgram = mProgramNoFilter; } // -------------------------- // 渲染器核心2:每帧渲染(约30次/秒,在GL线程) // -------------------------- @Override public void onDrawFrame(GL10 gl) { // 1. 清空屏幕(避免上一帧画面残留,黑色背景) GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); // 2. 更新纹理数据(从Camera2获取最新YUV帧,同步到GPU纹理) mSurfaceTexture.updateTexImage(); // 3. 切换当前OpenGL程序(根据滤镜开关选择无滤镜/黑白) mCurrentProgram = mIsBlackWhite ? mProgramBlackWhite : mProgramNoFilter; GLES20.glUseProgram(mCurrentProgram); // 激活当前程序(GPU开始执行该程序的着色器) // 4. 绑定顶点坐标(告诉GPU:画面要画在屏幕的哪个位置) int vPositionLoc = GLES20.glGetAttribLocation(mCurrentProgram, "vPosition"); GLES20.glEnableVertexAttribArray(vPositionLoc); // 启用顶点属性 // 传递顶点坐标给顶点着色器:2个值为1组(x,y),float类型,不归一化,无偏移,从mVertexBuffer读取 GLES20.glVertexAttribPointer(vPositionLoc, 2, GLES20.GL_FLOAT, false, 0, mVertexBuffer); // 5. 绑定纹理坐标(告诉GPU:纹理如何映射到屏幕顶点) int vTexCoordLoc = GLES20.glGetAttribLocation(mCurrentProgram, "vTexCoord"); GLES20.glEnableVertexAttribArray(vTexCoordLoc); GLES20.glVertexAttribPointer(vTexCoordLoc, 2, GLES20.GL_FLOAT, false, 0, mTexCoordBuffer); // 6. 绑定相机纹理(告诉GPU:要处理的YUV数据在这个纹理中) GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // 激活纹理单元0(GPU有多个纹理单元,这里用第0个) GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mCameraTextureId); // 绑定相机纹理 // 传递纹理单元0给片段着色器的sTexture变量(告诉片段着色器:采样这个纹理) int sTextureLoc = GLES20.glGetUniformLocation(mCurrentProgram, "sTexture"); GLES20.glUniform1i(sTextureLoc, 0); // 7. 绘制画面(GPU执行着色器,将处理后的RGB数据写入Surface) // GL_TRIANGLE_STRIP:用4个顶点画2个三角形,覆盖全屏(效率最高) GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); // 8. 禁用顶点属性(避免GPU资源泄漏) GLES20.glDisableVertexAttribArray(vPositionLoc); GLES20.glDisableVertexAttribArray(vTexCoordLoc); // -------------------------- // 演示Canvas绘制(CPU层面叠加简单UI:如白色取景框) // -------------------------- // 注意:Canvas绘制需在UI线程或SurfaceTexture的回调中执行,这里仅演示逻辑 // 实际代码需在MainActivity的SurfaceTextureListener中处理(见步骤4) } // -------------------------- // 渲染器核心3:视图尺寸变化(如屏幕旋转,执行1次) // -------------------------- @Override public void onSurfaceChanged(GL10 gl, int width, int height) { // 设置渲染视口(画面显示的区域:左上角(0,0),宽width,高height,即全屏) GLES20.glViewport(0, 0, width, height); } // -------------------------- // 辅助方法1:创建相机专用纹理 // -------------------------- private int createCameraTexture() { int[] textures = new int[1]; // 1. 向GPU申请1个纹理ID(类似“分配GPU内存地址”) GLES20.glGenTextures(1, textures, 0); int textureId = textures[0]; // 2. 绑定纹理(指定纹理类型为“相机专用纹理”GL_TEXTURE_EXTERNAL_OES) GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId); // 3. 设置纹理过滤参数(避免拉伸时画面模糊/失真) // 缩小过滤:线性插值(画面缩小时平滑) GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); // 放大过滤:线性插值(画面放大时平滑) GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); // 纹理边缘处理: clamp_to_edge(边缘像素不重复,避免黑边) GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); // 4. 解绑纹理(避免后续操作污染当前纹理) GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); return textureId; } // -------------------------- // 辅助方法2:从raw目录加载着色器代码 // -------------------------- private String loadShaderFromRaw(int rawId) { try { InputStream is = mContext.getResources().openRawResource(rawId); byte[] buffer = new byte[is.available()]; is.read(buffer); is.close(); return new String(buffer, "UTF-8"); // 转为字符串格式的着色器代码 } catch (Exception e) { e.printStackTrace(); throw new RuntimeException("加载着色器失败:" + e.getMessage()); } } // -------------------------- // 辅助方法3:创建OpenGL程序(编译+链接着色器) // -------------------------- private int createOpenGLProgram(String vertexShaderCode, String fragmentShaderCode) { // 1. 编译顶点着色器 int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode); // 2. 编译片段着色器 int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode); // 3. 链接程序(将两个着色器组合为GPU可执行的“指令集”) int program = GLES20.glCreateProgram(); GLES20.glAttachShader(program, vertexShader); // 附加顶点着色器 GLES20.glAttachShader(program, fragmentShader); // 附加片段着色器 GLES20.glLinkProgram(program); // 链接程序 // 4. 检查链接结果(避免编译失败) int[] linkStatus = new int[1]; GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); if (linkStatus[0] == 0) { String errorLog = GLES20.glGetProgramInfoLog(program); GLES20.glDeleteProgram(program); // 删除无效程序 throw new RuntimeException("OpenGL程序链接失败:" + errorLog); } return program; } // -------------------------- // 辅助方法4:编译单个着色器 // -------------------------- private int compileShader(int type, String shaderCode) { int shader = GLES20.glCreateShader(type); // 创建着色器(顶点/片段) GLES20.glShaderSource(shader, shaderCode); // 设置着色器代码 GLES20.glCompileShader(shader); // 编译着色器 // 检查编译结果 int[] compileStatus = new int[1]; GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0); if (compileStatus[0] == 0) { String errorLog = GLES20.glGetShaderInfoLog(shader); GLES20.glDeleteShader(shader); // 删除无效着色器 throw new RuntimeException("着色器编译失败:" + errorLog); } return shader; } // -------------------------- // 对外提供的方法:切换滤镜(需在GL线程执行) // -------------------------- public void toggleFilter() { mIsBlackWhite = !mIsBlackWhite; } // -------------------------- // 对外提供的方法:获取SurfaceTexture(供Camera2绑定输出目标) // -------------------------- public SurfaceTexture getSurfaceTexture() { return mSurfaceTexture; } }看一下是什么问题,仍然不能正常预览
09-18
package com.android.example.cameraappxjava; import android.Manifest; import android.content.ContentResolver; import android.content.ContentValues; import android.content.Intent; import android.content.pm.PackageManager; import android.graphics.ImageFormat; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CaptureFailure; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.params.StreamConfigurationMap; import android.media.Image; import android.media.ImageReader; import android.net.Uri; import android.opengl.GLSurfaceView; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.HandlerThread; import android.os.SystemClock; import android.provider.MediaStore; import android.util.Log; import android.util.Size; import android.view.Surface; import android.widget.Button; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.appcompat.app.AppCompatActivity; import androidx.core.app.ActivityCompat; import com.android.example.cameraappxjava.util.CameraGLRenderer; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; /** * 修复后:Camera2 + GLSurfaceView 自定义YUV预览Demo */ public class MainActivity2 extends AppCompatActivity implements CameraGLRenderer.SurfaceSizeCallback { private static final String TAG = "camera2api_fixed"; private static final int REQUEST_CAMERA_PERMISSION = 100; private static final long MIN_CLICK_INTERVAL = 1000; // GLSurfaceView 相关 private GLSurfaceView glSurfaceView; private CameraGLRenderer cameraGLRenderer; private int glSurfaceWidth = 0; private int glSurfaceHeight = 0; // Camera2 核心组件 private Button captureButton; private CameraDevice cameraDevice; private CameraCaptureSession cameraCaptureSession; private CaptureRequest.Builder captureRequestBuilder; private String cameraId; private Handler backgroundHandler; private HandlerThread backgroundThread; private CameraManager cameraManager; private volatile boolean isCapturing = false; private long lastClickTime = 0; // ImageReader:预览(YUV)+ 拍照(JPEG) private ImageReader previewImageReader; private ImageReader captureImageReader; private Size previewSize; // GLSurfaceView匹配的预览尺寸 private Size captureSize; // 拍照尺寸 // 图片保存相关 private ContentResolver contentResolver; private ContentValues mediaValues; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); Log.d(TAG, "onCreate ——————————————————————"); // 1. 初始化视图组件 glSurfaceView = findViewById(R.id.glsurfaceView); captureButton = findViewById(R.id.btnCapture); contentResolver = getContentResolver(); mediaValues = new ContentValues(); mediaValues.put(MediaStore.Images.Media.MIME_TYPE, "image/jpeg"); mediaValues.put(MediaStore.Images.Media.RELATIVE_PATH, Environment.DIRECTORY_PICTURES); // 2. 初始化GL渲染器(设置尺寸回调) initGLRenderer(); // 3. 拍照按钮点击事件(防连点) captureButton.setOnClickListener(v -> { long currentTime = SystemClock.elapsedRealtime(); if (currentTime - lastClickTime > MIN_CLICK_INTERVAL) { lastClickTime = currentTime; takePicture(); } else { Log.d(TAG, "点击过快,已忽略"); } }); // 4. 初始化CameraManager(提前获取,避免重复创建) cameraManager = (CameraManager) getSystemService(CAMERA_SERVICE); } /** * 初始化GLSurfaceView和自定义渲染器(核心) */ private void initGLRenderer() { // 设置OpenGL ES 2.0版本 glSurfaceView.setEGLContextClientVersion(2); // 创建渲染器并设置尺寸回调(监听GLSurfaceView实际尺寸) cameraGLRenderer = new CameraGLRenderer(this); glSurfaceView.setRenderer(cameraGLRenderer); // 按需渲染(有新帧才重绘,节省性能) glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); } /** * 初始化相机核心配置(预览/拍照ImageReader) * 必须在GLSurfaceView尺寸确定后调用(避免尺寸不匹配) */ private void initCamera() { if (glSurfaceWidth == 0 || glSurfaceHeight == 0 || backgroundHandler == null) { Log.w(TAG, "initCamera: 条件不满足(GL尺寸未确定/后台线程未启动)"); Log.w(TAG, "glSurfaceWidth:"+glSurfaceWidth+"glSurfaceHeight:"+glSurfaceHeight+"backgroundHandler:"+backgroundHandler); return; } Log.d(TAG, "initCamera: 开始初始化,GL尺寸=" + glSurfaceWidth + "x" + glSurfaceHeight); try { // 1. 获取相机ID(默认后置相机,0为后置,1为前置) String[] cameraIds = cameraManager.getCameraIdList(); if (cameraIds.length == 0) { Log.e(TAG, "无可用相机设备"); return; } cameraId = cameraIds[0]; // 优先后置相机 // 2. 获取相机支持的配置(预览/拍照尺寸) CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId); StreamConfigurationMap configMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (configMap == null) { Log.e(TAG, "StreamConfigurationMap为空,相机不支持该配置"); return; } // 3. 初始化预览ImageReader(YUV_420_888格式,GLSurfaceView尺寸匹配) Size[] yuvSizes = configMap.getOutputSizes(ImageFormat.YUV_420_888); previewSize = chooseOptimalSize(yuvSizes, glSurfaceWidth, glSurfaceHeight); if (previewSize == null) { Log.e(TAG, "未找到合适的预览尺寸"); return; } // 关闭旧的ImageReader(避免内存泄漏) if (previewImageReader != null) { previewImageReader.close(); } // 创建预览ImageReader(2个缓冲区,避免帧丢失) previewImageReader = ImageReader.newInstance( previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 4 ); // 设置YUV帧回调(Camera后台线程执行,避免阻塞UI) previewImageReader.setOnImageAvailableListener(reader -> { Image image = reader.acquireLatestImage(); if (image == null) return; try { if (cameraGLRenderer != null) { // 传递YUV数据给渲染器,并触发渲染 cameraGLRenderer.setYUVData(image); glSurfaceView.requestRender(); }else { image.close(); } } catch (Exception e) { Log.e(TAG, "预览帧处理失败: " + e.getMessage(), e); } }, backgroundHandler); // 4. 初始化拍照ImageReader(JPEG格式,选择最大支持尺寸) Size[] jpegSizes = configMap.getOutputSizes(ImageFormat.JPEG); if (jpegSizes.length == 0) { Log.e(TAG, "相机不支持JPEG拍照"); return; } // 选择最大的拍照尺寸 captureSize = Collections.max(Arrays.asList(jpegSizes), new CompareSizesByArea()); // 关闭旧的ImageReader if (captureImageReader != null) { captureImageReader.close(); } // 创建拍照ImageReader captureImageReader = ImageReader.newInstance( captureSize.getWidth(), captureSize.getHeight(), ImageFormat.JPEG, 1 // 拍照一次一张,1个缓冲区足够 ); Log.d(TAG, "initCamera: 完成,预览尺寸=" + previewSize + ",拍照尺寸=" + captureSize); } catch (CameraAccessException e) { Log.e(TAG, "相机访问异常: " + e.getMessage(), e); } catch (SecurityException e) { Log.e(TAG, "相机权限异常: " + e.getMessage(), e); } } /** * 打开相机(需权限已授予) */ private void openCamera() { if (cameraId == null || previewImageReader == null || backgroundHandler == null) { Log.w(TAG, "openCamera: 条件不满足,延迟1000ms重试"); backgroundHandler.postDelayed(this::openCamera, 1000); return; } Log.d(TAG, "openCamera: 尝试打开相机,ID=" + cameraId); try { if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) { // 打开相机(传入状态回调和后台线程Handler) cameraManager.openCamera(cameraId, cameraStateCallback, backgroundHandler); } else { Log.w(TAG, "相机权限未授予,无法打开"); } } catch (CameraAccessException e) { Log.e(TAG, "打开相机失败: " + e.getMessage(), e); } } /** * 创建相机预览会话(核心:绑定预览/拍照Surface) */ private void createCaptureSession() { if (cameraDevice == null || previewImageReader == null || captureImageReader == null) { Log.e(TAG, "createCaptureSession: 核心组件为空(相机/ImageReader)"); return; } try { // 1. 获取预览和拍照的Surface Surface previewSurface = previewImageReader.getSurface(); Surface captureSurface = captureImageReader.getSurface(); List<Surface> outputSurfaces = new ArrayList<>(); outputSurfaces.add(previewSurface); // 预览Surface(YUV输出) outputSurfaces.add(captureSurface); // 拍照Surface(JPEG输出) // 2. 创建CaptureSession(配置输出Surface) cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession session) { Log.i(TAG, "CaptureSession配置成功"); cameraCaptureSession = session; // 配置预览请求(持续输出YUV帧到预览Surface) try { captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); captureRequestBuilder.addTarget(previewSurface); // 预览目标:YUV ImageReader // 开启自动对焦和自动曝光(预览必备) captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); // 下发持续预览请求 cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler); Log.i(TAG, "预览请求已下发,开始预览"); } catch (CameraAccessException e) { Log.e(TAG, "配置预览请求失败: " + e.getMessage(), e); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession session) { Log.e(TAG, "CaptureSession配置失败"); runOnUiThread(() -> Toast.makeText(MainActivity2.this, "相机预览配置失败", Toast.LENGTH_SHORT).show()); } }, backgroundHandler); } catch (CameraAccessException e) { Log.e(TAG, "创建CaptureSession异常: " + e.getMessage(), e); } } /** * 拍照逻辑(停止预览→下发拍照请求→恢复预览) */ private void takePicture() { if (cameraDevice == null || cameraCaptureSession == null || captureImageReader == null) { Log.w(TAG, "takePicture: 核心组件未就绪,无法拍照"); runOnUiThread(() -> Toast.makeText(this, "相机未就绪", Toast.LENGTH_SHORT).show()); return; } Log.i(TAG, "takePicture: 开始拍照流程"); isCapturing = true; try { // 1. 配置拍照请求(JPEG格式,输出到拍照ImageReader) CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(captureImageReader.getSurface()); // 配置拍照参数(自动对焦、曝光、旋转角度) captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); // 修正照片旋转角度(匹配屏幕方向) int rotation = getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getJpegOrientation(rotation)); // 2. 设置拍照ImageReader回调(保存照片) captureImageReader.setOnImageAvailableListener(reader -> { try (Image image = reader.acquireLatestImage()) { if (image == null) { Log.w(TAG, "拍照Image为空,保存失败"); return; } // 提取JPEG数据(Image的planes[0]为JPEG数据) Image.Plane[] planes = image.getPlanes(); ByteBuffer jpegBuffer = planes[0].getBuffer(); byte[] jpegData = new byte[jpegBuffer.remaining()]; jpegBuffer.get(jpegData); // 保存照片到相册 savePhotoToGallery(jpegData); } catch (Exception e) { Log.e(TAG, "保存照片失败: " + e.getMessage(), e); runOnUiThread(() -> Toast.makeText(MainActivity2.this, "照片保存失败", Toast.LENGTH_SHORT).show()); } finally { isCapturing = false; // 恢复预览(重新下发持续预览请求) if (cameraCaptureSession != null && captureRequestBuilder != null) { try { cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler); Log.i(TAG, "拍照完成,已恢复预览"); } catch (CameraAccessException e) { Log.e(TAG, "恢复预览失败: " + e.getMessage(), e); } } } }, backgroundHandler); // 3. 停止预览→下发拍照请求 cameraCaptureSession.stopRepeating(); cameraCaptureSession.capture(captureBuilder.build(), new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) { super.onCaptureFailed(session, request, failure); Log.e(TAG, "拍照失败,原因: " + failure.getReason()); isCapturing = false; runOnUiThread(() -> Toast.makeText(MainActivity2.this, "拍照失败", Toast.LENGTH_SHORT).show()); // 恢复预览 if (cameraCaptureSession != null && captureRequestBuilder != null) { try { cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler); } catch (CameraAccessException e) { Log.e(TAG, "恢复预览失败: " + e.getMessage(), e); } } } }, backgroundHandler); } catch (CameraAccessException e) { Log.e(TAG, "拍照流程异常: " + e.getMessage(), e); isCapturing = false; } } /** * 保存照片到系统相册 */ private void savePhotoToGallery(byte[] jpegData) { if (jpegData == null || jpegData.length == 0) { Log.w(TAG, "JPEG数据为空,无法保存"); return; } try { // 1. 生成唯一文件名(时间戳) String fileName = "Camera2_" + System.currentTimeMillis() + ".jpg"; mediaValues.put(MediaStore.Images.Media.DISPLAY_NAME, fileName); // 2. 插入到媒体库(获取Uri) Uri imageUri = contentResolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, mediaValues); if (imageUri == null) { Log.e(TAG, "插入媒体库失败,无法获取Uri"); return; } // 3. 写入文件(通过ContentResolver避免存储权限问题) try (FileOutputStream outputStream = (FileOutputStream) contentResolver.openOutputStream(imageUri)) { outputStream.write(jpegData); outputStream.flush(); Log.i(TAG, "照片保存成功,路径: " + imageUri); // 通知媒体库扫描(刷新相册) sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, imageUri)); // 显示成功提示 runOnUiThread(() -> Toast.makeText(this, "照片已保存到相册", Toast.LENGTH_SHORT).show()); } } catch (IOException e) { Log.e(TAG, "写入照片文件失败: " + e.getMessage(), e); } } /** * 计算JPEG照片的旋转角度(匹配屏幕方向) */ private int getJpegOrientation(int screenRotation) { try { CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId); int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); // 根据相机传感器方向和屏幕旋转计算最终角度 int orientation = (sensorOrientation + screenRotation * 90) % 360; return orientation; } catch (CameraAccessException e) { Log.e(TAG, "获取传感器方向失败: " + e.getMessage(), e); return 0; } } /** * 选择最优尺寸(匹配View尺寸比例,避免拉伸) */ private Size chooseOptimalSize(Size[] sizes, int viewWidth, int viewHeight) { if (sizes == null || sizes.length == 0) return null; List<Size> candidateSizes = new ArrayList<>(); float viewRatio = (float) viewWidth / viewHeight; // 筛选:比例接近View(误差≤0.1),且尺寸不超过View for (Size size : sizes) { float sizeRatio = (float) size.getWidth() / size.getHeight(); if (Math.abs(sizeRatio - viewRatio) <= 0.1 && size.getWidth() <= viewWidth && size.getHeight() <= viewHeight) { candidateSizes.add(size); } } // 有符合条件的尺寸:选最大的(画质最好) if (!candidateSizes.isEmpty()) { return Collections.max(candidateSizes, new CompareSizesByArea()); } // 无符合条件:选最大的尺寸(降级处理) return Collections.max(Arrays.asList(sizes), new CompareSizesByArea()); } /** * 启动相机后台线程(处理相机回调和ImageReader回调) */ private void startBackgroundThread() { if (backgroundThread == null) { backgroundThread = new HandlerThread("Camera2_Background"); backgroundThread.start(); backgroundHandler = new Handler(backgroundThread.getLooper()); Log.d(TAG, "后台线程已启动"); } } /** * 停止相机后台线程(避免内存泄漏) */ private void stopBackgroundThread() { if (backgroundThread != null) { backgroundThread.quitSafely(); try { backgroundThread.join(); backgroundThread = null; backgroundHandler = null; Log.d(TAG, "后台线程已停止"); } catch (InterruptedException e) { Log.e(TAG, "停止后台线程异常: " + e.getMessage(), e); } } } /** * 释放相机资源(避免内存泄漏) */ private void releaseCameraResources() { Log.d(TAG, "releaseCameraResources: 开始释放"); // 停止预览请求 if (cameraCaptureSession != null) { try { cameraCaptureSession.stopRepeating(); } catch (CameraAccessException e) { Log.e(TAG, "停止预览失败: " + e.getMessage(), e); } cameraCaptureSession.close(); cameraCaptureSession = null; } // 关闭相机设备 if (cameraDevice != null) { cameraDevice.close(); cameraDevice = null; } // 关闭ImageReader if (previewImageReader != null) { previewImageReader.close(); previewImageReader = null; } if (captureImageReader != null) { captureImageReader.close(); captureImageReader = null; } Log.d(TAG, "releaseCameraResources: 完成释放"); } // ---------------------- 生命周期方法 ---------------------- @Override protected void onResume() { super.onResume(); Log.d(TAG, "onResume ——————————————————————"); // 恢复GLSurfaceView(必须调用,否则OpenGL上下文丢失) glSurfaceView.onResume(); // 启动后台线程 startBackgroundThread(); // 检查相机权限 if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { Log.i(TAG, "请求相机权限"); ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION); return; } // 初始化相机(GL尺寸确定后会自动调用openCamera) initCamera(); } @Override protected void onPause() { super.onPause(); Log.d(TAG, "onPause ——————————————————————"); // 暂停GLSurfaceView(保存OpenGL上下文) glSurfaceView.onPause(); // 停止预览并释放相机资源 if (!isCapturing) { releaseCameraResources(); } else { Log.w(TAG, "拍照中,延迟1000ms释放资源"); backgroundHandler.postDelayed(this::releaseCameraResources, 1000); } // 停止后台线程 stopBackgroundThread(); } @Override protected void onDestroy() { super.onDestroy(); Log.d(TAG, "onDestroy ——————————————————————"); // 释放渲染器资源 if (cameraGLRenderer != null) { cameraGLRenderer.release(); } // 置空引用(帮助GC) glSurfaceView = null; cameraGLRenderer = null; captureButton = null; cameraManager = null; contentResolver = null; mediaValues = null; } // ---------------------- 权限回调 ---------------------- @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); if (requestCode == REQUEST_CAMERA_PERMISSION) { if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { Log.i(TAG, "相机权限已授予"); // 初始化相机并打开 if (glSurfaceWidth == 0 || glSurfaceHeight == 0 || backgroundHandler == null) { Log.w(TAG, "相机权限授权后 initCamera: 条件不满足(GL尺寸未确定/后台线程未启动)"); Log.w(TAG, "glSurfaceWidth:"+glSurfaceWidth+"glSurfaceHeight:"+glSurfaceHeight+"backgroundHandler:"+backgroundHandler); } initCamera(); } else { Log.w(TAG, "相机权限被拒绝,无法预览"); runOnUiThread(() -> { Toast.makeText(this, "需要相机权限才能使用", Toast.LENGTH_SHORT).show(); finish(); // 无权限则退出 }); } } } // ---------------------- CameraDevice状态回调 ---------------------- private final CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice camera) { Log.i(TAG, "相机已打开,ID=" + camera.getId()); cameraDevice = camera; // 相机打开后,创建预览会话 createCaptureSession(); } @Override public void onDisconnected(@NonNull CameraDevice camera) { Log.w(TAG, "相机已断开连接"); camera.close(); cameraDevice = null; } @Override public void onError(@NonNull CameraDevice camera, int error) { Log.e(TAG, "相机错误,代码=" + error); camera.close(); cameraDevice = null; runOnUiThread(() -> Toast.makeText(MainActivity2.this, "相机初始化错误", Toast.LENGTH_SHORT).show()); } }; // ---------------------- GLSurface尺寸回调(来自CameraGLRenderer) ---------------------- @Override public void onSurfaceSizeChanged(int width, int height) { Log.d(TAG, "onSurfaceSizeChanged: GL尺寸=" + width + "x" + height); // 更新GLSurface尺寸 glSurfaceWidth = width; glSurfaceHeight = height; // 重新初始化相机并打开 initCamera(); openCamera(); } // ---------------------- 尺寸比较器 ---------------------- static class CompareSizesByArea implements Comparator<Size> { @Override public int compare(Size lhs, Size rhs) { // 比较面积(避免溢出,用long) return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); } } }package com.android.example.cameraappxjava.util; import android.graphics.ImageFormat; import android.media.Image; import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.util.Log; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; /** * 修复后:YUV_420_888 自定义GL渲染器 */ public class CameraGLRenderer implements GLSurfaceView.Renderer { private static final String TAG = "CameraGLRenderer"; private static final int TEXTURE_COUNT = 3; // Y/U/V 三个纹理 // ---------------------- OpenGL ES 2.0 核心配置 ---------------------- // 顶点着色器(全屏显示) private static final String VERTEX_SHADER = "attribute vec4 vPosition;\n" + "attribute vec2 vTexCoord;\n" + "varying vec2 texCoord;\n" + "void main() {\n" + " gl_Position = vPosition;\n" + " texCoord = vTexCoord;\n" + "}"; // 片段着色器(YUV转RGB) private static final String FRAGMENT_SHADER = "precision mediump float;\n" + "varying vec2 texCoord;\n" + "uniform sampler2D yTex;\n" + "uniform sampler2D uTex;\n" + "uniform sampler2D vTex;\n" + "void main() {\n" + " // YUV转RGB(BT.601标准)\n" + " float y = texture2D(yTex, texCoord).r;\n" + " float u = texture2D(uTex, texCoord).r - 0.5;\n" + " float v = texture2D(vTex, texCoord).r - 0.5;\n" + " float r = y + 1.402 * v;\n" + " float g = y - 0.34414 * u - 0.71414 * v;\n" + " float b = y + 1.772 * u;\n" + " // 颜色范围限制(0.0~1.0)\n" + " r = clamp(r, 0.0, 1.0);\n" + " g = clamp(g, 0.0, 1.0);\n" + " b = clamp(b, 0.0, 1.0);\n" + " gl_FragColor = vec4(r, g, b, 1.0);\n" + "}"; // 全屏顶点坐标(左手坐标系:左上→左下→右上→右下) private static final float[] VERTEX_COORDS = { -1.0f, 1.0f, 0.0f, // 左上 -1.0f, -1.0f, 0.0f, // 左下 1.0f, 1.0f, 0.0f, // 右上 1.0f, -1.0f, 0.0f // 右下 }; // 纹理坐标(适配竖屏:解决画面颠倒,顶点坐标对应) private static final float[] TEX_COORDS = { 0.0f, 1.0f, // 左上(对应顶点左上) 1.0f, 1.0f, // 左下(对应顶点左下) 0.0f, 0.0f, // 右上(对应顶点右上) 1.0f, 0.0f // 右下(对应顶点右下) }; // ---------------------- 动态变量 ---------------------- private final SurfaceSizeCallback sizeCallback; // GL尺寸回调 private int shaderProgram; // 着色器程序ID private int[] textureIds = new int[TEXTURE_COUNT]; // Y/U/V纹理ID private FloatBuffer vertexBuffer; // 顶点坐标缓冲区 private FloatBuffer texCoordBuffer; // 纹理坐标缓冲区 // YUV数据(线程安全管理) private final Object yuvLock = new Object(); private Image pendingImage; // 待处理的YUV图像 private byte[] yData, uData, vData; // 提取后的Y/U/V数据 private int yuvWidth, yuvHeight; // YUV图像尺寸 // 纹理尺寸记录(避免重复创建纹理) private int yTexWidth = 0, yTexHeight = 0; private int uvTexWidth = 0, uvTexHeight = 0; private ByteBuffer yBuffer; private ByteBuffer uBuffer; private ByteBuffer vBuffer; private boolean hasNewFrame = false; // 新帧标志 // ---------------------- 构造方法(传入尺寸回调) ---------------------- public CameraGLRenderer(SurfaceSizeCallback callback) { this.sizeCallback = callback; } // ---------------------- 对外接口 ---------------------- /** * 设置待处理的YUV图像(从Camera2 ImageReader回调调用) */ public void setYUVData(Image image) { Log.d(TAG, "acquire image: " + image + " @ " + System.identityHashCode(image)); if (image == null || image.getFormat() != ImageFormat.YUV_420_888) { Log.w(TAG, "无效Image:格式非YUV_420_888或为空"); if (image != null) image.close(); return; } synchronized (yuvLock) { // 关闭未处理的旧图像(避免内存泄漏) if (pendingImage != null) { pendingImage.close(); // 关闭旧图像 Log.d(TAG, "关闭未处理的旧Image"); } // 仅当无待处理帧时才更新 if (pendingImage == null) { pendingImage = image; hasNewFrame = true; } else { image.close(); // 直接丢弃过载帧 } Log.e(TAG, "调用setYUVData,pendingImage:" + pendingImage); } } /** * 释放渲染器资源(Activity销毁时调用) */ public void release() { synchronized (yuvLock) { // 关闭待处理图像 if (pendingImage != null) { pendingImage.close(); pendingImage = null; } // 清空YUV数据 yData = uData = vData = null; yuvWidth = yuvHeight = 0; } // 释放OpenGL资源(必须在GL线程调用,此处通过GLSurfaceView队列) GLES20.glDeleteTextures(TEXTURE_COUNT, textureIds, 0); GLES20.glDeleteProgram(shaderProgram); Log.d(TAG, "渲染器资源已释放"); } // ---------------------- OpenGL生命周期回调 ---------------------- @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { Log.d(TAG, "onSurfaceCreated:初始化OpenGL"); // 初始化OpenGL状态 GLES20.glDisable(GLES20.GL_BLEND); // 关闭混合(避免透明) GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // 背景黑色 // 初始化坐标缓冲区(native内存,避免GC) vertexBuffer = createFloatBuffer(VERTEX_COORDS); texCoordBuffer = createFloatBuffer(TEX_COORDS); // 编译着色器程序 shaderProgram = compileShaderProgram(VERTEX_SHADER, FRAGMENT_SHADER); if (shaderProgram == 0) { Log.e(TAG, "着色器程序创建失败,预览不可用"); return; } // 创建Y/U/V三个纹理 GLES20.glGenTextures(TEXTURE_COUNT, textureIds, 0); initTexture(textureIds[0]); // Y纹理 initTexture(textureIds[1]); // U纹理 initTexture(textureIds[2]); // V纹理 // 检查OpenGL错误 int glError = GLES20.glGetError(); if (glError != GLES20.GL_NO_ERROR) { Log.e(TAG, "onSurfaceCreated OpenGL错误: " + glError); } } @Override public void onSurfaceChanged(GL10 gl, int width, int height) { Log.d(TAG, "onSurfaceChanged:GL尺寸=" + width + "x" + height); // 设置视口(全屏显示) GLES20.glViewport(0, 0, width, height); // 通知Activity更新相机预览尺寸 if (sizeCallback != null) { sizeCallback.onSurfaceSizeChanged(width, height); } // 重置纹理尺寸记录(避免尺寸变化导致纹理不匹配) yTexWidth = yTexHeight = uvTexWidth = uvTexHeight = 0; } @Override public void onDrawFrame(GL10 gl) { Log.e(TAG, "调用着色器onDrawFrame"); Log.d(TAG, "PendingImage: " + (pendingImage != null) + " | YUV尺寸: " + yuvWidth + "x" + yuvHeight + " | 纹理尺寸: Y=" + yTexWidth + "x" + yTexHeight + " UV=" + uvTexWidth + "x" + uvTexHeight); // 1. 处理待处理的YUV数据 boolean hasNewData = processPendingYUV(); if (!hasNewData) { // 无新数据:清空屏幕(黑色背景) GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); return; } // 2. 检查着色器程序是否有效 if (shaderProgram == 0 || textureIds == null) { Log.e(TAG, "着色器程序或纹理无效,跳过渲染"); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); return; } // 3. 清空上一帧 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); // 4. 使用着色器程序 GLES20.glUseProgram(shaderProgram); // 5. 上传Y/U/V纹理数据 uploadTexture(textureIds[0], yData, yuvWidth, yuvHeight, true); // Y纹理 uploadTexture(textureIds[1], uData, uvTexWidth, uvTexHeight, false); // U纹理 uploadTexture(textureIds[2], vData, uvTexWidth, uvTexHeight, false); // V纹理 // 6. 绑定纹理到着色器采样器 bindTexturesToSamplers(); // 7. 传递顶点坐标和纹理坐标 passVertexAndTexCoord(); // 8. 绘制(三角形带:4个顶点→2个三角形→全屏) GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, VERTEX_COORDS.length / 3); // 9. 禁用顶点属性(避免后续干扰) int vPositionLoc = GLES20.glGetAttribLocation(shaderProgram, "vPosition"); int vTexCoordLoc = GLES20.glGetAttribLocation(shaderProgram, "vTexCoord"); GLES20.glDisableVertexAttribArray(vPositionLoc); GLES20.glDisableVertexAttribArray(vTexCoordLoc); // 检查渲染错误 int glError = GLES20.glGetError(); if (glError != GLES20.GL_NO_ERROR) { Log.e(TAG, "onDrawFrame OpenGL错误: " + glError); } } // ---------------------- OpenGL辅助方法 ---------------------- /** * 创建Float缓冲区(native内存,避免Java堆内存拷贝) */ private FloatBuffer createFloatBuffer(float[] data) { if (data == null || data.length == 0) return null; ByteBuffer byteBuffer = ByteBuffer.allocateDirect(data.length * 4); // float占4字节 byteBuffer.order(ByteOrder.nativeOrder()); // 匹配 native 字节序 FloatBuffer floatBuffer = byteBuffer.asFloatBuffer(); floatBuffer.put(data); floatBuffer.position(0); // 重置读取位置 return floatBuffer; } /** * 编译着色器程序(顶点+片段) */ private int compileShaderProgram(String vertexCode, String fragmentCode) { // 1. 编译顶点着色器 int vertexShader = compileSingleShader(GLES20.GL_VERTEX_SHADER, vertexCode); if (vertexShader == 0) return 0; // 2. 编译片段着色器 int fragmentShader = compileSingleShader(GLES20.GL_FRAGMENT_SHADER, fragmentCode); if (fragmentShader == 0) { GLES20.glDeleteShader(vertexShader); // 清理已创建的顶点着色器 return 0; } // 3. 链接着色器程序 int program = GLES20.glCreateProgram(); GLES20.glAttachShader(program, vertexShader); GLES20.glAttachShader(program, fragmentShader); GLES20.glLinkProgram(program); // 4. 检查链接错误 int[] linkStatus = new int[1]; GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); if (linkStatus[0] != GLES20.GL_TRUE) { String errorLog = GLES20.glGetProgramInfoLog(program); Log.e(TAG, "着色器程序链接失败: " + errorLog); GLES20.glDeleteProgram(program); program = 0; } // 5. 清理临时着色器(链接后不再需要) GLES20.glDeleteShader(vertexShader); GLES20.glDeleteShader(fragmentShader); return program; } /** * 编译单个着色器(顶点或片段) */ private int compileSingleShader(int shaderType, String shaderCode) { int shader = GLES20.glCreateShader(shaderType); if (shader == 0) { Log.e(TAG, "创建着色器失败,类型: " + (shaderType == GLES20.GL_VERTEX_SHADER ? "顶点" : "片段")); return 0; } // 加载着色器代码并编译 GLES20.glShaderSource(shader, shaderCode); GLES20.glCompileShader(shader); // 检查编译错误 int[] compileStatus = new int[1]; GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0); if (compileStatus[0] != GLES20.GL_TRUE) { String errorLog = GLES20.glGetShaderInfoLog(shader); Log.e(TAG, (shaderType == GLES20.GL_VERTEX_SHADER ? "顶点" : "片段") + "着色器编译失败: " + errorLog); GLES20.glDeleteShader(shader); shader = 0; } return shader; } /** * 初始化纹理参数(Y/U/V通用) */ private void initTexture(int textureId) { if (textureId == 0) return; GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); // 纹理过滤:线性插值(画质更平滑) GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); // 纹理包裹:边缘拉伸(避免黑边) GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); // 解绑纹理 GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); } /** * 处理待处理的YUV图像(提取Y/U/V数据,考虑内存对齐) */ private boolean processPendingYUV() { Image image = null; Log.e(TAG, "调用着色器onDrawFrame2,pendingImage:" + pendingImage); synchronized (yuvLock) { if (pendingImage == null) { return false; // 无新数据 } Log.e(TAG, "调用着色器onDrawFrame4"); // 取出待处理图像(释放锁,避免长时间占用) image = pendingImage; pendingImage = null; Log.e(TAG, "调用着色器onDrawFrame4"); } Log.e(TAG, "调用着色器onDrawFrame3"); try { Log.e(TAG, "image是否可用:" + image); // 1. 获取YUV图像尺寸 yuvWidth = image.getWidth(); yuvHeight = image.getHeight(); Image.Plane[] planes = image.getPlanes(); if (planes.length < 3) { Log.e(TAG, "YUV平面数量不足3,无法提取数据"); return false; } // 2. 提取Y数据(Plane 0:Y通道,pixelStride=1,rowStride可能有对齐) Image.Plane yPlane = planes[0]; int yRowStride = yPlane.getRowStride(); int yPixelStride = yPlane.getPixelStride(); yBuffer = yPlane.getBuffer(); yData = extractPlaneData(yBuffer, yRowStride, yPixelStride, yuvWidth, yuvHeight); if (yData == null || yData.length != yuvWidth * yuvHeight) { Log.e(TAG, "Y数据提取失败,长度不匹配: " + (yData != null ? yData.length : 0) + " vs " + (yuvWidth * yuvHeight)); return false; } // 3. 提取U/V数据(Plane 1:U通道,Plane 2:V通道,或交错) Image.Plane uPlane = planes[1]; Image.Plane vPlane = planes[2]; int uvRowStride = uPlane.getRowStride(); int uvPixelStride = uPlane.getPixelStride(); int uvWidth = yuvWidth / 2; // YUV_420:U/V尺寸是Y的1/2 int uvHeight = yuvHeight / 2; uvTexWidth = uvWidth; uvTexHeight = uvHeight; // 处理Planar(U/V分离)或Semi-Planar(UV交错) if (uvPixelStride == 2) { ByteBuffer uvBuffer = uPlane.getBuffer(); int uvBufferSize = uvBuffer.remaining(); uData = new byte[uvWidth * uvHeight]; vData = new byte[uvWidth * uvHeight]; uvBuffer.rewind(); // 确保从0开始 // 使用批量复制以提高效率 byte[] rowBuffer = new byte[uvRowStride]; for (int row = 0; row < uvHeight; row++) { int rowStart = row * uvRowStride; if (rowStart >= uvBufferSize) break; int bytesToRead = Math.min(uvRowStride, uvBufferSize - rowStart); uvBuffer.position(rowStart); uvBuffer.get(rowBuffer, 0, bytesToRead); // 从rowBuffer中提取UV for (int col = 0; col < uvWidth; col++) { int offset = col * 2; // 每列占2字节 if (offset >= bytesToRead) break; // 防止行内越界 vData[row * uvWidth + col] = rowBuffer[offset + 1]; uData[row * uvWidth + col] = rowBuffer[offset]; } } } else { // Planar(U/V分离,如I420):U和V各自在独立Plane uBuffer = uPlane.getBuffer(); vBuffer = vPlane.getBuffer(); uData = extractPlaneData(uBuffer, uvRowStride, uvPixelStride, uvWidth, uvHeight); vData = extractPlaneData(vBuffer, uvRowStride, uvPixelStride, uvWidth, uvHeight); } // 4. 验证U/V数据长度 if (uData == null || vData == null || uData.length != uvWidth * uvHeight || vData.length != uvWidth * uvHeight) { Log.e(TAG, "U/V数据提取失败,长度不匹配"); return false; } hasNewFrame = false;//处理完帧后需要重置标志 Log.d(TAG, "YUV数据处理成功: " + yuvWidth + "x" + yuvHeight + ",Y长度=" + yData.length + ",U/V长度=" + uData.length); return true; } catch (Exception e) { Log.e(TAG, "处理YUV数据异常: " + e.getMessage(), e); return false; } finally { // 必须关闭Image,否则内存泄漏 if (image != null) { image.close(); } } } /** * 提取平面数据(处理rowStride和pixelStride,避免读取padding字节) */ private byte[] extractPlaneData(ByteBuffer buffer, int rowStride, int pixelStride, int width, int height) { if (buffer == null || rowStride <= 0 || pixelStride <= 0 || width <= 0 || height <= 0) { Log.w(TAG, "提取平面数据参数无效"); return null; } byte[] data = new byte[width * height]; int dataIdx = 0; // 按行读取(跳过rowStride中的padding字节) for (int row = 0; row < height; row++) { // 每行的起始位置 int bufferRowStart = row * rowStride; // 读取当前行的有效数据(width个像素,每个像素pixelStride字节) for (int col = 0; col < width; col++) { int bufferPos = bufferRowStart + col * pixelStride; data[dataIdx++] = buffer.get(bufferPos); } } return data; } /** * 上传数据到纹理(首次创建纹理,后续更新数据) */ private void uploadTexture(int textureId, byte[] data, int width, int height, boolean isYTexture) { if (textureId == 0 || data == null || width <= 0 || height <= 0) { Log.w(TAG, "上传纹理参数无效"); return; } // 绑定纹理 GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); // 设置像素对齐(YUV数据是1字节对齐,默认是4字节,必须修改) GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); // 检查纹理是否已创建(尺寸匹配则更新,否则重新创建) boolean textureCreated = false; if (isYTexture) { textureCreated = (yTexWidth == width && yTexHeight == height); } else { textureCreated = (uvTexWidth == width && uvTexHeight == height); } ByteBuffer dataBuffer = ByteBuffer.wrap(data); if (!textureCreated) { // 首次创建纹理(GL_LUMINANCE:单通道亮度数据) GLES20.glTexImage2D( GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width, height, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, dataBuffer ); // 更新纹理尺寸记录 if (isYTexture) { yTexWidth = width; yTexHeight = height; } else { uvTexWidth = width; uvTexHeight = height; } Log.d(TAG, "创建纹理: " + (isYTexture ? "Y" : "UV") + ",尺寸=" + width + "x" + height); } else { // 纹理已存在,更新数据(只更新像素,不重新创建纹理) GLES20.glTexSubImage2D( GLES20.GL_TEXTURE_2D, 0, 0, 0, // 起始坐标(x,y) width, height, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, dataBuffer ); } // 解绑纹理 GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); } /** * 绑定纹理到着色器的采样器(yTex/uTex/vTex) */ private void bindTexturesToSamplers() { // 绑定Y纹理到TEXTURE0,对应着色器的yTex GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIds[0]); int yTexLoc = GLES20.glGetUniformLocation(shaderProgram, "yTex"); GLES20.glUniform1i(yTexLoc, 0); // 绑定U纹理到TEXTURE1,对应着色器的uTex GLES20.glActiveTexture(GLES20.GL_TEXTURE1); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIds[1]); int uTexLoc = GLES20.glGetUniformLocation(shaderProgram, "uTex"); GLES20.glUniform1i(uTexLoc, 1); // 绑定V纹理到TEXTURE2,对应着色器的vTex GLES20.glActiveTexture(GLES20.GL_TEXTURE2); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIds[2]); int vTexLoc = GLES20.glGetUniformLocation(shaderProgram, "vTex"); GLES20.glUniform1i(vTexLoc, 2); // 检查采样器位置是否有效 if (yTexLoc == -1 || uTexLoc == -1 || vTexLoc == -1) { Log.e(TAG, "着色器采样器位置无效: y=" + yTexLoc + ", u=" + uTexLoc + ", v=" + vTexLoc); } } /** * 传递顶点坐标和纹理坐标到着色器 */ private void passVertexAndTexCoord() { // 传递顶点坐标(vPosition) int vPositionLoc = GLES20.glGetAttribLocation(shaderProgram, "vPosition"); GLES20.glEnableVertexAttribArray(vPositionLoc); GLES20.glVertexAttribPointer( vPositionLoc, 3, // 每个顶点3个坐标(x,y,z) GLES20.GL_FLOAT, false, // 不归一化 3 * 4, // 顶点步长(3个float,每个4字节) vertexBuffer ); // 传递纹理坐标(vTexCoord) int vTexCoordLoc = GLES20.glGetAttribLocation(shaderProgram, "vTexCoord"); GLES20.glEnableVertexAttribArray(vTexCoordLoc); GLES20.glVertexAttribPointer( vTexCoordLoc, 2, // 每个纹理坐标2个值(s,t) GLES20.GL_FLOAT, false, 2 * 4, // 纹理坐标步长(2个float,每个4字节) texCoordBuffer ); // 检查坐标位置是否有效 if (vPositionLoc == -1 || vTexCoordLoc == -1) { Log.e(TAG, "着色器坐标位置无效: vPosition=" + vPositionLoc + ", vTexCoord=" + vTexCoordLoc); } } // ---------------------- GLSurface尺寸回调接口 ---------------------- public interface SurfaceSizeCallback { void onSurfaceSizeChanged(int width, int height); } } 你看看为什么美颜调用
最新发布
09-24
package com.android.example.cameraappxjava.util; import android.graphics.ImageFormat; import android.media.Image; import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.util.Log; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; /** * GLES 2.0 相机渲染器:处理 YUV_420_888 预览 * */ public class CameraGLRenderer implements GLSurfaceView.Renderer { private static final String TAG = "CameraGLRenderer"; private static final int TEXTURE_COUNT = 3; // Y/U/V 3个纹理(GLES 2.0 支持) // -------------------------- 1. GLES 2.0 兼容配置(无任何不支持API) -------------------------- /** * 顶点着色器(GLES 2.0 标准语法,必加精度声明) */ private static final String VERTEX_SHADER = "attribute vec4 vPosition;\n" + // 顶点坐标(输入) "attribute vec2 vTexCoord;\n" + // 纹理坐标(输入) "varying vec2 texCoord;\n" + // 传递纹理坐标到片段着色器 "void main() {\n" + " gl_Position = vPosition;\n" + // 全屏顶点位置(-1~1 覆盖屏幕) " texCoord = vTexCoord;\n" + // 传递纹理坐标 "}"; /** * 片段着色器(GLES 2.0 兼容:用 GL_LUMINANCE 单通道格式,无 GL_RED) */ private static final String FRAGMENT_SHADER = "precision mediump float;\n" + // GLES 2.0 必须声明精度(中等精度平衡性能) "varying vec2 texCoord;\n" + // 从顶点着色器接收的纹理坐标 "uniform sampler2D yTex;\n" + // Y通道纹理采样器(纹理单元0) "uniform sampler2D uTex;\n" + // U通道纹理采样器(纹理单元1) "uniform sampler2D vTex;\n" + // V通道纹理采样器(纹理单元2) "void main() {\n" + // GLES 2.0 兼容:读取 GL_LUMINANCE 纹理的 r 通道(亮度值) " float y = texture2D(yTex, texCoord).r;\n" + " float u = texture2D(uTex, texCoord).r - 0.5;\n" + // U/V 偏移 0.5(YUV 标准) " float v = texture2D(vTex, texCoord).r - 0.5;\n" + // BT.601 YUV转RGB 公式(手机相机通用,避免偏色) " float r = y + 1.402 * v;\n" + " float g = y - 0.34414 * u - 0.71414 * v;\n" + " float b = y + 1.772 * u;\n" + // 限制 RGB 范围 0~1(避免颜色溢出,GLES 2.0 支持 clamp 函数) " r = clamp(r, 0.0, 1.0);\n" + " g = clamp(g, 0.0, 1.0);\n" + " b = clamp(b, 0.0, 1.0);\n" + " gl_FragColor = vec4(r, g, b, 1.0);\n" + // 输出 RGB 颜色(不透明) "}"; /** * 全屏顶点坐标(GLES 2.0 标准坐标,顺序:左上→左下→右上→右下) */ private static final float[] VERTEX_COORDS = { -1.0f, 1.0f, 0.0f, // 左上 -1.0f, -1.0f, 0.0f, // 左下 1.0f, 1.0f, 0.0f, // 右上 1.0f, -1.0f, 0.0f // 右下 }; /** * 纹理坐标(GLES 2.0 兼容,适配竖屏预览,解决画面颠倒) * 映射规则:纹理坐标 → 屏幕坐标(确保竖屏显示正常) */ private static final float[] TEX_COORDS = { 0.0f, 1.0f, // 纹理左上 → 屏幕左上 1.0f, 1.0f, // 纹理左下 → 屏幕左下 0.0f, 0.0f, // 纹理右上 → 屏幕右上 1.0f, 0.0f // 纹理右下 → 屏幕右下 }; // -------------------------- 2. 动态变量(新增:手动记录纹理尺寸,替代GL查询) -------------------------- private int mShaderProgram; // GLES 2.0 着色器程序ID private int[] mTextureIds = new int[TEXTURE_COUNT]; // Y/U/V 纹理ID(GPU资源) private FloatBuffer mVertexBuffer; // 顶点坐标缓冲区(GLES 2.0 要求Buffer格式) private FloatBuffer mTexBuffer; // 纹理坐标缓冲区(GLES 2.0 要求Buffer格式) private int mViewWidth, mViewHeight; // GLSurfaceView 宽高(渲染视口尺寸) // 关键:手动记录 Y/U/V 纹理的宽高(替代 GLES 2.0 不支持的 glGetTexLevelParameteriv) private int mYTexWidth = 0, mYTexHeight = 0; // Y纹理尺寸 private int mUTexWidth = 0, mUTexHeight = 0; // U纹理尺寸(Y的1/2) private int mVTexWidth = 0, mVTexHeight = 0; // V纹理尺寸(Y的1/2) // YUV 数据线程安全管理(避免相机线程渲染线程竞争) private final Object mYuvLock = new Object(); private Image mPendingImage; // 待处理的相机Image(从Camera2接收) private byte[] mYData, mUData, mVData; // 提取后的 Y/U/V 字节数据 private int mYuvWidth, mYuvHeight; // 相机输出的 YUV 帧宽高 // -------------------------- 3. 对外接口(无修改,直接复用) -------------------------- /** * 设置相机预览Image(线程安全,GLES 2.0/3.0 通用) * * @param image 相机输出的 YUV_420_888 格式Image(必须关闭,避免内存泄漏) */ public void setYUVData(Image image) { Log.w(TAG, "调用setYUVData方法"); if (image == null || image.getFormat() != ImageFormat.YUV_420_888) { Log.w(TAG, "无效Image:格式非 YUV_420_888 或 Image为空"); if (image != null) image.close(); // 必须关闭,避免相机缓冲区泄漏 return; } synchronized (mYuvLock) { // 先关闭之前未处理的Image(防止缓冲区堆积导致卡顿) if (mPendingImage != null) { mPendingImage.close(); Log.d(TAG, "关闭未处理的PendingImage,避免内存泄漏"); } mPendingImage = image; // 存储新的待处理Image } } /** * 释放所有资源(Activity/Fragment 销毁时调用,避免内存泄漏) */ public void release() { synchronized (mYuvLock) { // 1. 关闭待处理的Image if (mPendingImage != null) { mPendingImage.close(); mPendingImage = null; } // 2. 释放CPU端 YUV 数据 mYData = null; mUData = null; mVData = null; mYuvWidth = 0; mYuvHeight = 0; // 3. 重置手动记录的纹理尺寸 mYTexWidth = mYTexHeight = 0; mUTexWidth = mUTexHeight = 0; mVTexWidth = mVTexHeight = 0; } // 4. 释放 GLES 2.0 GPU 资源(纹理+着色器程序) if (mTextureIds != null) { GLES20.glDeleteTextures(TEXTURE_COUNT, mTextureIds, 0); mTextureIds = null; } if (mShaderProgram != 0) { GLES20.glDeleteProgram(mShaderProgram); mShaderProgram = 0; } // 5. 释放缓冲区(帮助GC回收) mVertexBuffer = null; mTexBuffer = null; Log.d(TAG, "所有资源释放完成(GLES 2.0 兼容)"); } // -------------------------- 4. GLES 2.0 生命周期回调(无任何不支持API) -------------------------- /** * 初始化回调:GLSurfaceView 首次创建时调用(仅1次) * 作用:初始化OpenGL环境、编译着色器、创建纹理、准备坐标缓冲区 */ @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { Log.d(TAG, "onSurfaceCreated(GLES 2.0):初始化OpenGL环境"); // GLES 2.0 基础配置:禁用混合(避免透明层干扰预览)、黑色背景 GLES20.glDisable(GLES20.GL_BLEND); GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // 准备坐标缓冲区(GLES 2.0 仅支持 Buffer 格式,不支持直接用数组) mVertexBuffer = createFloatBuffer(VERTEX_COORDS); mTexBuffer = createFloatBuffer(TEX_COORDS); // 编译 GLES 2.0 着色器程序(创建渲染"画笔") mShaderProgram = compileShaderProgram(VERTEX_SHADER, FRAGMENT_SHADER); if (mShaderProgram == 0) { Log.e(TAG, "着色器程序创建失败(GLES 2.0),预览不可用"); return; } // 创建 Y/U/V 3个纹理(GLES 2.0 2D纹理),配置基础参数 GLES20.glGenTextures(TEXTURE_COUNT, mTextureIds, 0); initTexture(mTextureIds[0]); // 初始化 Y 纹理 initTexture(mTextureIds[1]); // 初始化 U 纹理 initTexture(mTextureIds[2]); // 初始化 V 纹理 Log.d(TAG, "GLES 2.0 初始化完成,纹理ID:Y=" + mTextureIds[0] + ", U=" + mTextureIds[1] + ", V=" + mTextureIds[2]); } /** * 尺寸变化回调:GLSurfaceView 宽高改变时调用(如屏幕旋转) * 作用:设置渲染视口(画面显示范围),确保全屏渲染 */ @Override public void onSurfaceChanged(GL10 gl, int width, int height) { mViewWidth = width; mViewHeight = height; // GLES 2.0 设置视口:渲染范围 = GLSurfaceView 全屏(左上角(0,0),宽高=View宽高) GLES20.glViewport(0, 0, width, height); Log.d(TAG, "onSurfaceChanged(GLES 2.0):视口尺寸=" + width + "x" + height); } /** * 帧渲染回调:每帧调用1次(渲染线程执行,核心渲染逻辑) * 流程:处理待处理Image → 上传YUV数据到纹理 → 绑定着色器 → 执行渲染 */ @Override public void onDrawFrame(GL10 gl) { Log.e(TAG, "调用onDrawFrame方法"); // 1. 处理待处理的Image(线程安全,提取Y/U/V数据) boolean hasNewData = processPendingImage(); if (!hasNewData) { // 无新数据:清除屏幕为黑色,避免显示上一帧残留 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); return; } // 2. 清除上一帧画面(避免画面重叠) GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); // 3. 激活 GLES 2.0 着色器程序(使用"画笔") GLES20.glUseProgram(mShaderProgram); // 4. 上传 Y/U/V 数据到对应纹理(手动判断纹理尺寸,替代GL查询) uploadTexture(mTextureIds[0], mYData, mYuvWidth, mYuvHeight, true); // Y纹理 uploadTexture(mTextureIds[1], mUData, mYuvWidth / 2, mYuvHeight / 2, false); // U纹理(1/2尺寸) uploadTexture(mTextureIds[2], mVData, mYuvWidth / 2, mYuvHeight / 2, false); // V纹理(1/2尺寸) // 5. 绑定纹理到着色器采样器(让"画笔"找到"画布") bindTextureToSampler(); // 6. 传递顶点/纹理坐标(告诉"画笔"画在哪里) passVertexAndTexCoord(); // 7. 执行渲染:GLES 2.0 支持 GL_TRIANGLE_STRIP,4个顶点画2个三角形覆盖全屏 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, VERTEX_COORDS.length / 3); // 8. 禁用顶点/纹理坐标输入(避免后续渲染干扰) int vPositionLoc = GLES20.glGetAttribLocation(mShaderProgram, "vPosition"); int vTexCoordLoc = GLES20.glGetAttribLocation(mShaderProgram, "vTexCoord"); GLES20.glDisableVertexAttribArray(vPositionLoc); GLES20.glDisableVertexAttribArray(vTexCoordLoc); } private void passVertexAndTexCoord() { int vPositionLoc = GLES20.glGetAttribLocation(mShaderProgram, "vPosition"); GLES20.glEnableVertexAttribArray(vPositionLoc); GLES20.glVertexAttribPointer( vPositionLoc, 3, GLES20.GL_FLOAT, false, 3 * 4, mVertexBuffer ); int vTexCoordLoc = GLES20.glGetAttribLocation(mShaderProgram, "vTexCoord"); GLES20.glEnableVertexAttribArray(vTexCoordLoc); GLES20.glVertexAttribPointer( vTexCoordLoc, 2, GLES20.GL_FLOAT, false, 2 * 4, mTexBuffer ); } // -------------------------- 5. GLES 2.0 辅助方法(无任何不支持API) -------------------------- /** * 创建 FloatBuffer:将 Java float 数组转为 GLES 2.0 支持的 Buffer 格式 * * @param array 原始 float 数组(顶点/纹理坐标) * @return GLES 2.0 可识别的 FloatBuffer */ private FloatBuffer createFloatBuffer(float[] array) { if (array == null || array.length == 0) return null; // 1. 分配直接内存(避免JVM GC移动,提升OpenGL访问效率) ByteBuffer byteBuffer = ByteBuffer.allocateDirect(array.length * 4); // 1float=4字节 // 2. 设置字节序(必须硬件一致,否则数据错乱) byteBuffer.order(ByteOrder.nativeOrder()); // 3. 转换为 FloatBuffer 并写入数据 FloatBuffer floatBuffer = byteBuffer.asFloatBuffer(); floatBuffer.put(array); // 4. 重置读指针(从缓冲区开头开始读取) floatBuffer.position(0); return floatBuffer; } /** * 编译 GLES 2.0 着色器程序:编译顶点+片段着色器,链接为可执行程序 * * @param vertexCode 顶点着色器代码 * @param fragmentCode 片段着色器代码 * @return 着色器程序ID(0 表示失败) */ private int compileShaderProgram(String vertexCode, String fragmentCode) { // 1. 编译顶点着色器(GLES 2.0) int vertexShader = compileSingleShader(GLES20.GL_VERTEX_SHADER, vertexCode); if (vertexShader == 0) return 0; // 2. 编译片段着色器(GLES 2.0) int fragmentShader = compileSingleShader(GLES20.GL_FRAGMENT_SHADER, fragmentCode); if (fragmentShader == 0) { GLES20.glDeleteShader(vertexShader); // 清理已编译的顶点着色器 return 0; } // 3. 链接着色器程序(GLES 2.0) int program = GLES20.glCreateProgram(); GLES20.glAttachShader(program, vertexShader); // 绑定顶点着色器 GLES20.glAttachShader(program, fragmentShader); // 绑定片段着色器 GLES20.glLinkProgram(program); // 执行链接 // 4. 检查链接结果(GLES 2.0) int[] linkStatus = new int[1]; GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); if (linkStatus[0] != GLES20.GL_TRUE) { Log.e(TAG, "着色器链接失败(GLES 2.0):" + GLES20.glGetProgramInfoLog(program)); GLES20.glDeleteProgram(program); // 清理无效程序 program = 0; } // 5. 清理中间着色器(程序已链接,单个着色器可删除) GLES20.glDeleteShader(vertexShader); GLES20.glDeleteShader(fragmentShader); return program; } /** * 编译单个 GLES 2.0 着色器:编译顶点/片段着色器代码 * * @param shaderType 着色器类型(GL_VERTEX_SHADER / GL_FRAGMENT_SHADER) * @param shaderCode 着色器代码 * @return 着色器ID(0 表示失败) */ private int compileSingleShader(int shaderType, String shaderCode) { // 1. 创建着色器对象(GLES 2.0) int shader = GLES20.glCreateShader(shaderType); if (shader == 0) { Log.e(TAG, "创建着色器失败(GLES 2.0),类型=" + (shaderType == GLES20.GL_VERTEX_SHADER ? "顶点" : "片段")); return 0; } // 2. 绑定着色器代码并编译(GLES 2.0) GLES20.glShaderSource(shader, shaderCode); GLES20.glCompileShader(shader); // 3. 检查编译结果(GLES 2.0) int[] compileStatus = new int[1]; GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0); if (compileStatus[0] != GLES20.GL_TRUE) { Log.e(TAG, (shaderType == GLES20.GL_VERTEX_SHADER ? "顶点" : "片段") + "着色器编译失败(GLES 2.0):" + GLES20.glGetShaderInfoLog(shader)); GLES20.glDeleteShader(shader); // 清理无效着色器 shader = 0; } return shader; } /** * 初始化 GLES 2.0 纹理参数:配置过滤、边缘处理,确保画面清晰无重复 * * @param textureId 纹理ID(Y/U/V 纹理) */ private void initTexture(int textureId) { if (textureId == 0) return; // 绑定纹理(选中GPU"画布",GLES 2.0 必须先绑定再配置) GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); // 1. 纹理过滤:缩小时线性过滤(画面平滑,避免锯齿) GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); // 2. 纹理过滤:放大时线性过滤(画面平滑,避免像素块) GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); // 3. 纹理边缘:水平方向超出范围时"夹紧"(不重复显示,避免边缘错乱) GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); // 4. 纹理边缘:垂直方向超出范围时"夹紧" GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); // 解绑纹理(避免后续误操作其他纹理) GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); } /** * 处理待处理Image:从 mPendingImage 提取 Y/U/V 数据(线程安全) * * @return true=有新数据,false=无新数据 */ private boolean processPendingImage() { Image image = null; synchronized (mYuvLock) { if (mPendingImage == null) { return false; // 无待处理数据 } // 取出待处理Image(释放锁,避免长时间占用) image = mPendingImage; mPendingImage = null; } try { // 1. 提取Image的宽高和Planes(YUV_420_888 格式固定3个Planes) mYuvWidth = image.getWidth(); mYuvHeight = image.getHeight(); Log.e(TAG, "YUV的宽高:"+mYuvWidth+"×"+mYuvHeight); Image.Plane[] planes = image.getPlanes(); if (planes.length < 3) { Log.e(TAG, "Image Planes 数量不足3,无法提取 YUV 数据"); return false; } // 2. 提取 Y 通道数据(Plane[0]:Y通道,无交错) ByteBuffer yBuffer = planes[0].getBuffer(); mYData = byteBufferToByteArray(yBuffer); // 3. 提取 U/V 通道数据(区分 Semi-Planar 和 Planar 模式) if (planes[1].getPixelStride() == 2) { // 模式1:Semi-Planar(UV 交错存储在 Plane[1],Plane[2] 无数据) ByteBuffer uvBuffer = planes[1].getBuffer(); int uvLength = uvBuffer.remaining() / 2; // UV 总长度 = Y 长度 / 2 mUData = new byte[uvLength]; mVData = new byte[uvLength]; // 提取 U(偶数索引)和 V(奇数索引) for (int i = 0; i < uvLength; i++) { mUData[i] = uvBuffer.get(i * 2); // U:第0、2、4...字节 mVData[i] = uvBuffer.get(i * 2 + 1); // V:第1、3、5...字节 } } else { // 模式2:Planar(UV 分别存储在 Plane[1] 和 Plane[2],无交错) ByteBuffer uBuffer = planes[1].getBuffer(); ByteBuffer vBuffer = planes[2].getBuffer(); mUData = byteBufferToByteArray(uBuffer); mVData = byteBufferToByteArray(vBuffer); } // 4. 验证 YUV 数据长度(避免后续渲染错误) int expectedYLength = mYuvWidth * mYuvHeight; int expectedUVLength = (mYuvWidth / 2) * (mYuvHeight / 2); if (mYData.length != expectedYLength || mUData.length != expectedUVLength || mVData.length != expectedUVLength) { Log.w(TAG, "YUV 数据长度不匹配,重置为正确长度"); mYData = new byte[expectedYLength]; mUData = new byte[expectedUVLength]; mVData = new byte[expectedUVLength]; return false; } Log.d(TAG, "处理 Image 完成(GLES 2.0):YUV 尺寸=" + mYuvWidth + "x" + mYuvHeight + ",数据长度 Y=" + mYData.length + ", U=" + mUData.length); return true; } catch (Exception e) { Log.e(TAG, "处理 Image 异常(GLES 2.0):" + e.getMessage(), e); return false; } finally { // 必须关闭 Image(释放相机缓冲区,避免卡顿的核心!) if (image != null) { image.close(); } } } private byte[] byteBufferToByteArray(ByteBuffer buffer) { if (buffer == null | buffer.remaining() == 0) return new byte[0]; int originalPos = buffer.position(); byte[] data = new byte[buffer.remaining()]; buffer.get(data); buffer.position(originalPos); return data; } /** * 上传数据到 GLES 2.0 纹理(核心:手动记录纹理尺寸,替代 GL 查询) * * @param textureId 纹理ID * @param data 待上传的字节数据(Y/U/V) * @param width 纹理宽度 * @param height 纹理高度 * @param isYTexture 是否为 Y 纹理(用于区分尺寸记录变量) */ private void uploadTexture(int textureId, byte[] data, int width, int height, boolean isYTexture) { if (textureId == 0 || data == null || width <= 0 || height <= 0) { Log.w(TAG, "上传纹理参数无效(GLES 2.0):textureId=" + textureId + ", width=" + width + ", height=" + height); return; } // 绑定纹理(GLES 2.0 必须先绑定再操作) GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); // 关键:设置像素对齐为 1(YUV 数据无字节对齐,避免数据错位) GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); // 手动判断纹理是否已创建(替代 GLES 2.0 不支持的 glGetTexLevelParameteriv) boolean isTextureCreated = false; if (isYTexture) { isTextureCreated = (mYTexWidth == width && mYTexHeight == height); } else { // U/V 纹理尺寸相同,共用一套判断 isTextureCreated = (mUTexWidth == width && mUTexHeight == height); } ByteBuffer dataBuffer = ByteBuffer.wrap(data); if (!isTextureCreated) { // 首次创建纹理:调用 glTexImage2D(分配GPU内存) GLES20.glTexImage2D( GLES20.GL_TEXTURE_2D, 0, // 2D纹理,基础层级(固定为0) GLES20.GL_LUMINANCE, // GLES 2.0 核心:单通道亮度格式 width, height, 0, // 纹理宽高,边界宽度(必须为0) GLES20.GL_LUMINANCE, // 数据格式:内部格式一致 GLES20.GL_UNSIGNED_BYTE, // 数据类型:无符号字节(YUV 数据类型) dataBuffer // 待上传的 Y/U/V 数据 ); // 更新手动记录的纹理尺寸(下次判断用) if (isYTexture) { mYTexWidth = width; mYTexHeight = height; Log.d(TAG, "创建 Y 纹理(GLES 2.0):尺寸=" + width + "x" + height); } else { mUTexWidth = width; mUTexHeight = height; Log.d(TAG, "创建 U/V 纹理(GLES 2.0):尺寸=" + width + "x" + height); } } else { // 复用纹理:调用 glTexSubImage2D(仅更新数据,不重新分配GPU内存,效率更高) GLES20.glTexSubImage2D( GLES20.GL_TEXTURE_2D, 0, // 2D纹理,基础层级 0, 0, // 数据起始坐标(x=0, y=0,全屏更新) width, height, // 数据宽高(纹理尺寸一致) GLES20.GL_LUMINANCE, // 数据格式:创建时一致 GLES20.GL_UNSIGNED_BYTE, // 数据类型:创建时一致 dataBuffer // 待更新的 Y/U/V 数据 ); } // 解绑纹理(避免后续误操作) GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); } /** * 绑定纹理到 GLES 2.0 着色器采样器:将 Y/U/V 纹理着色器的 uniform 变量关联 */ private void bindTextureToSampler() { // 1. 绑定 Y 纹理到采样器 yTex(纹理单元0) GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // 激活纹理单元0(GLES 2.0 必须先激活) GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[0]); // 绑定 Y 纹理 // 关联采样器:将纹理单元0 着色器的 yTex 变量绑定 int yTexLoc = GLES20.glGetUniformLocation(mShaderProgram, "yTex"); GLES20.glUniform1i(yTexLoc, 0); // 2. 绑定 U 纹理到采样器 uTex(纹理单元1) GLES20.glActiveTexture(GLES20.GL_TEXTURE1); // 激活纹理单元1 GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[1]); // 绑定 U 纹理 int uTexLoc = GLES20.glGetUniformLocation(mShaderProgram, "uTex"); GLES20.glUniform1i(uTexLoc, 1); // 3. 绑定 V 纹理到采样器 vTex(纹理单元2) GLES20.glActiveTexture(GLES20.GL_TEXTURE2); //激活纹理单元2 GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[2]); int vTexLoc = GLES20.glGetUniformLocation(mShaderProgram, "vTex"); GLES20.glUniform1i(vTexLoc, 2); // 添加错误检查 if (yTexLoc == -1 || uTexLoc == -1 || vTexLoc == -1) { Log.e(TAG, "纹理采样器绑定失败: " + "yTex=" + yTexLoc + " uTex=" + uTexLoc + " vTex=" + vTexLoc); } } } 我给代码发给你,结合这两部分代码你来找出问题
09-23
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值