package com.android.example.cameraappxjava;
import android.Manifest;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.net.Uri;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.SystemClock;
import android.provider.MediaStore;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.widget.Button;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import com.android.example.cameraappxjava.util.CameraGLRenderer;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* 调用自定义渲染器的Demo:模拟YUV预览渲染
*/
public class MainActivity2 extends AppCompatActivity {
private static final String TAG = "camera2api";
private static final int REQUEST_CAMERA_PERMISSION = 100;
// 1. 删除 TextureView 相关变量
// private TextureView textureView;
// private boolean isTextureAvailable = false;
// 2. 新增 GLSurfaceView + 自定义渲染器
private GLSurfaceView glSurfaceView;
private CameraGLRenderer cameraGLRenderer; // 之前定义的自定义YUV渲染器
// 3. 新增:预览用 ImageReader(接收 Camera2 输出的 YUV 帧,给渲染器用)
private ImageReader previewImageReader;
// 4. 保留原有拍照用 ImageReader(JPEG格式,不修改)
private ImageReader captureImageReader;
// 5. 保留其他原有变量(相机设备、会话、按钮等)
private Button captureButton;
private CameraDevice cameraDevice;
private CameraCaptureSession cameraCaptureSession;
private CaptureRequest.Builder captureRequestBuilder;
private String cameraId;
private Handler backgroundHandler;
private boolean isSessionClosed;
private HandlerThread backgroundThread;
private CameraManager manager;
private volatile boolean isCapturing = false;
private StreamConfigurationMap map;
private long lastClickTime = 0;
private static final long MIN_CLICK_INTERVAL = 1000;
private File file;
private ContentResolver resolver;
private ContentValues values;
private Uri imageUri;
// ---------------------- 第三步:修改 onCreate(初始化 GLSurfaceView 和渲染器) ----------------------
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Log.d(TAG, "onCreate ——————————————————————");
// 1. 初始化 GLSurfaceView(替换原 TextureView)
glSurfaceView = findViewById(R.id.glsurfaceView);
// 2. 初始化拍照按钮(保留原有逻辑)
captureButton = findViewById(R.id.btnCapture);
// 3. 配置 GLSurfaceView + 自定义渲染器(核心)
initGLRenderer();
// 4. 初始化相机参数(保留原有逻辑,但后续需补充预览ImageReader)
initCamera();
// 5. 保留拍照按钮监听(原有逻辑不变)
captureButton.setOnClickListener(v -> {
long currentTime = SystemClock.elapsedRealtime();
if (currentTime - lastClickTime > MIN_CLICK_INTERVAL) {
lastClickTime = currentTime;
takePicture();
} else {
Log.d(TAG, "点击过快,已忽略");
}
});
}
// ---------------------- 新增:初始化 GLSurfaceView 和自定义渲染器 ----------------------
private void initGLRenderer() {
// 1. 设置 OpenGL 版本(必须是 2.0,匹配渲染器着色器)
glSurfaceView.setEGLContextClientVersion(2);
// 2. 创建自定义渲染器实例
cameraGLRenderer = new CameraGLRenderer();
// 3. 绑定渲染器到 GLSurfaceView
glSurfaceView.setRenderer(cameraGLRenderer);
// 4. 按需渲染(有新帧才重绘,节省性能)
glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
// ---------------------- 第四步:修改 initCamera(新增预览用 ImageReader) ----------------------
private void initCamera() {
Log.d(TAG, "initCamera: 初始化相机配置");
try {
// 1. 保留原有逻辑:初始化 CameraManager、相机ID、配置Map
manager = (CameraManager) getSystemService(CAMERA_SERVICE);
cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
Log.e(TAG, "错误: StreamConfigurationMap为空!!");
return;
}
// 2. 新增:初始化预览用 ImageReader(YUV_420_888 格式,给渲染器传数据)
// 2.1 获取相机支持的 YUV 预览尺寸(用原有的尺寸选择逻辑)
Size[] yuvSizes = map.getOutputSizes(ImageFormat.YUV_420_888);
Size previewSize = chooseOptimalSize(yuvSizes, glSurfaceView.getWidth(), glSurfaceView.getHeight());
// 2.2 创建 ImageReader(尺寸=预览尺寸,格式=YUV_420_888,缓冲区=2)
previewImageReader = ImageReader.newInstance(
previewSize.getWidth(),
previewSize.getHeight(),
ImageFormat.YUV_420_888,
2
);
// 2.3 设置 ImageReader 回调(关键:获取 YUV 帧,传给渲染器)
previewImageReader.setOnImageAvailableListener(reader -> {
try (Image image = reader.acquireLatestImage()) {
if (image == null || cameraGLRenderer == null) return;
// 2.5 把 YUV 数据传给渲染器,触发重绘
cameraGLRenderer.setYUVData(image);
glSurfaceView.requestRender(); // 触发渲染器 onDrawFrame
} catch (Exception e) {
Log.e(TAG, "预览帧处理失败: " + e.getMessage());
}
}, backgroundHandler); // 在相机后台线程执行
// 3. 保留原有逻辑:初始化拍照用 ImageReader(JPEG格式)
Size[] jpegSizes = map.getOutputSizes(ImageFormat.JPEG);
Size captureSize = chooseOptimalSize(jpegSizes,glSurfaceView.getWidth(),glSurfaceView.getHeight());
if (captureImageReader == null || captureImageReader.getWidth() != captureSize.getWidth()) {
if (captureImageReader != null) captureImageReader.close();
captureImageReader = ImageReader.newInstance(
captureSize.getWidth(),
captureSize.getHeight(),
ImageFormat.JPEG,
2
);
}
// 4. 保留原有逻辑:图片保存参数
resolver = getContentResolver();
values = new ContentValues();
values.put(MediaStore.Images.Media.DISPLAY_NAME, "pic_" + System.currentTimeMillis() + ".jpg");
values.put(MediaStore.Images.Media.MIME_TYPE, "image/jpeg");
values.put(MediaStore.Images.Media.RELATIVE_PATH, Environment.DIRECTORY_PICTURES);
} catch (CameraAccessException e) {
Log.e(TAG, "相机访问异常: " + e.getMessage());
} catch (NullPointerException e) {
Log.e(TAG, "NPE: " + e.getMessage());
}
}
// ---------------------- 新增:提取 Image Plane 数据的工具方法 ----------------------
private byte[] extractPlaneData(Image.Plane plane) {
ByteBuffer buffer = plane.getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
return data;
}
// ---------------------- 第五步:修改 openCamera(删除 TextureView 检查) ----------------------
private void openCamera() {
// 1. 删除原 TextureView 相关检查(替换为 ImageReader 检查)
if (previewImageReader == null || backgroundHandler == null) {
Log.w(TAG, "预览ImageReader未就绪,延迟打开相机,1000ms后重试");
backgroundHandler.postDelayed(this::openCamera, 1000);
return;
}
Log.d(TAG, "openCamera: 尝试打开相机");
try {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) {
Log.i(TAG, "1.打开相机: " + cameraId);
manager.openCamera(cameraId, stateCallback, backgroundHandler);
} else {
Log.w(TAG, "相机权限未授予");
}
} catch (CameraAccessException e) {
Log.e(TAG, "打开相机失败: " + e.getMessage());
} catch (SecurityException e) {
Log.e(TAG, "安全异常: " + e.getMessage());
}
}
// ---------------------- 第六步:修改 createCameraPreviewSession(替换预览 Surface) ----------------------
private void createCameraPreviewSession() {
if (cameraDevice == null || previewImageReader == null) {
Log.e(TAG, "创建预览会话失败: 相机或预览ImageReader不可用");
return;
}
try {
// 1. 新增:获取预览 ImageReader 的 Surface(Camera2 输出目标)
Surface previewSurface = previewImageReader.getSurface();
// 2. 保留:获取拍照 ImageReader 的 Surface
Surface captureSurface = captureImageReader.getSurface();
// 3. 配置双输出 Surface(预览 + 拍照,替换原 TextureView 的 Surface)
List<Surface> outputSurfaces = new ArrayList<>(2);
outputSurfaces.add(previewSurface); // 预览:ImageReader 的 Surface(给渲染器)
outputSurfaces.add(captureSurface); // 拍照:原有 ImageReader 的 Surface
// 4. 保留原有逻辑:创建会话 + 配置预览请求
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
Log.i(TAG, "2.2 预览会话配置成功");
cameraCaptureSession = session;
try {
// 配置预览请求(目标是预览 Surface)
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(previewSurface); // 替换为 ImageReader 的 Surface
// 保留原有自动对焦/闪光灯配置
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
Log.i(TAG, "3.开始下发预览请求");
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG, "设置预览请求失败: " + e.getMessage());
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Log.e(TAG, "预览会话配置失败");
Toast.makeText(MainActivity2.this, "配置失败", Toast.LENGTH_SHORT).show();
}
}, backgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG, "创建预览会话异常: " + e.getMessage());
}
}
// ---------------------- 第七步:修改生命周期方法(添加 GLSurfaceView 管理) ----------------------
@Override
protected void onResume() {
Log.d(TAG, "onResume —————————————————————— ");
super.onResume();
// 1. 保留原有权限检查
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
Log.i(TAG, "没有相机权限——>开始请求相机权限");
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
return;
}
// 2. 保留原有后台线程启动
startBackgroundThread();
// 3. 新增:恢复 GLSurfaceView(必须调用,否则渲染暂停)
glSurfaceView.onResume();
// 4. 打开相机(替换原 TextureView 检查)
openCamera();
}
@Override
protected void onPause() {
super.onPause();
Log.d(TAG, "onPause ——————————————————————");
// 1. 新增:暂停 GLSurfaceView(必须调用,保存 OpenGL 上下文)
glSurfaceView.onPause();
// 2. 保留原有预览暂停逻辑
if (!isCapturing && cameraCaptureSession != null) {
try {
cameraCaptureSession.stopRepeating();
Log.d(TAG, "onPause: 暂停预览重复请求(核心资源未释放)");
} catch (CameraAccessException e) {
Log.e(TAG, "onPause: 停止预览失败", e);
}
}
// 3. 保留原有拍照中延迟处理逻辑
if (isCapturing) {
Log.w(TAG, "onPause: 拍照中,暂不处理预览暂停");
new Handler().postDelayed(() -> {
if (!isCapturing && cameraCaptureSession != null) {
try {
cameraCaptureSession.stopRepeating();
Log.d(TAG, "onPause: 拍照完成后,暂停预览");
} catch (CameraAccessException e) {
Log.e(TAG, "onPause: 延迟停止预览失败", e);
}
}
}, 1000);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
Log.d(TAG, "onDestroy: Activity 彻底销毁,释放所有资源");
// 1. 新增:释放预览 ImageReader 和渲染器资源
if (previewImageReader != null) {
previewImageReader.close();
}
if (cameraGLRenderer != null) {
cameraGLRenderer.release();
}
// 2. 保留原有资源释放逻辑(相机、拍照ImageReader、线程等)
if (cameraCaptureSession != null) {
cameraCaptureSession.close();
cameraCaptureSession = null;
}
if (cameraDevice != null) {
cameraDevice.close();
cameraDevice = null;
}
if (captureImageReader != null) {
captureImageReader.close();
captureImageReader = null;
}
stopBackgroundThread();
// 3. 置空新增的引用
glSurfaceView = null;
cameraGLRenderer = null;
previewImageReader = null;
// 4. 保留原有置空逻辑
captureButton = null;
manager = null;
resolver = null;
values = null;
imageUri = null;
backgroundHandler = null;
backgroundThread = null;
Log.d(TAG, "onDestroy: 所有资源释放完成");
}
private boolean checkTakePicture() {
if (cameraDevice == null) {
Log.w(TAG, "拍照失败: 相机未初始化");
return false;
}
// 1. 检查会话有效性
if (cameraCaptureSession == null) {
Log.e(TAG, "拍照错误: CameraCaptureSession为空");
return false;
}
// 2. 检查后台Handler
if (backgroundHandler == null) {
Log.e(TAG, "拍照错误: backgroundHandler未初始化");
startBackgroundThread(); // 初始化方法见下方
return false;
}
if (isSessionClosed) {
Log.e(TAG, "当前会话已关闭");
}
return true;
}
// ---------------------- 第八步:修改 takePicture(替换拍照用 ImageReader) ----------------------
private void takePicture() {
Log.i(TAG, "4.开始拍照流程——————————");
try {
// 1. 保留原有检查逻辑
boolean checkFlag = checkTakePicture();
if (!checkFlag) {
Log.i(TAG, "拍照流程————检查未通过!退出拍照!");
return;
}
// 2. 替换:拍照请求目标为 captureImageReader(原有 JPEG 格式)
CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(captureImageReader.getSurface()); // 用拍照专用 ImageReader
// 3. 保留原有拍照参数配置
captureBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, rotation);
// 4. 保留原有拍照 ImageReader 回调(保存 JPEG 图片)
captureImageReader.setOnImageAvailableListener(reader -> {
Log.d(TAG, "拍照图像数据可用");
try (Image image = reader.acquireLatestImage()) {
if (image != null) {
// 保留原有文件创建和保存逻辑
file = new File(
Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
"pic_" + System.currentTimeMillis() + ".jpg"
);
// 提取 JPEG 数据(原有逻辑)
Image.Plane[] planes = image.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
// 保存图片(原有逻辑)
saveImage(bytes, file);
// 保留原有广播和提示
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaScanIntent.setData(Uri.fromFile(file));
sendBroadcast(mediaScanIntent);
runOnUiThread(() ->
Toast.makeText(MainActivity2.this, "保存至: " + file, Toast.LENGTH_SHORT).show()
);
}
} catch (Exception e) {
Log.e(TAG, "保存拍照图像错误: " + e.getMessage());
} finally {
isCapturing = false;
// 恢复预览(重新下发预览请求)
if (cameraCaptureSession != null && captureRequestBuilder != null) {
try {
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG, "恢复预览失败: " + e.getMessage());
}
}
}
}, backgroundHandler);
// 5. 保留原有拍照执行逻辑
Log.d(TAG, "停止预览");
cameraCaptureSession.stopRepeating();
Log.d(TAG, "4.下发拍照");
isCapturing = true;
cameraCaptureSession.capture(captureBuilder.build(), new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
Log.e(TAG, "拍照失败: " + failure.getReason());
isCapturing = false;
}
}, backgroundHandler);
} catch (CameraAccessException | IllegalStateException | SecurityException e) {
Log.e(TAG, "拍照过程异常: " + e.getClass().getSimpleName(), e);
isCapturing = false;
}
}
// ---------------------- 保留原有未修改的方法 ----------------------
// (包括:chooseOptimalSize、CompareSizesByArea、stateCallback、saveImage、onRequestPermissionsResult、startBackgroundThread、stopBackgroundThread、closeCamera、checkTakePicture)
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
}
}
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
Log.i(TAG, "相机已打开");
cameraDevice = camera;
Log.i(TAG, "2.1 开始配置预览流");
createCameraPreviewSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
Log.w(TAG, "相机断开连接");
cameraDevice.close();
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
Log.e(TAG, "相机错误: " + error);
cameraDevice.close();
cameraDevice = null;
}
};
private void saveImage(byte[] bytes, File file) {
Log.d(TAG, "保存图像: " + file.getAbsolutePath());
imageUri = resolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
if (imageUri != null) {
try (FileOutputStream output = new FileOutputStream(file)) {
output.write(bytes);
Log.i(TAG, "图像保存成功, 大小: " + bytes.length + " bytes");
} catch (IOException e) {
Log.e(TAG, "保存文件失败: " + e.getMessage());
}
}
}
//触发时机:用户点击授权后调用
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
Log.d(TAG, "权限请求结果: " + requestCode);
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults[0] == PackageManager.PERMISSION_DENIED) {
Log.w(TAG, "用户拒绝相机权限");
Toast.makeText(this, "需要相机权限", Toast.LENGTH_SHORT).show();
finish();
} else {
Log.i(TAG, "用户授予相机权限");
startBackgroundThread();
openCamera();
}
}
}
private void stopBackgroundThread() {
if (backgroundThread != null) {
Log.d(TAG, "停止后台线程");
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
backgroundHandler = null;
} catch (InterruptedException e) {
Log.e(TAG, "停止线程失败: " + e.getMessage());
}
}
}
private void startBackgroundThread() {
if (backgroundThread == null) {
backgroundThread = new HandlerThread("CameraBackground");
backgroundThread.start();
backgroundHandler = new Handler(backgroundThread.getLooper());
Log.d(TAG, "后台线程启动");
}
}
private void closeCamera() {
Log.d(TAG, "关闭相机资源");
if (isCapturing) {
Log.w(TAG, "正在拍照中,等待完成或取消...");
// 可以尝试等待一段时间或取消请求
try {
cameraCaptureSession.abortCaptures(); // 取消所有进行中的捕获
} catch (CameraAccessException e) {
throw new RuntimeException(e);
}
}
if (cameraCaptureSession != null) {
cameraCaptureSession.close();
cameraCaptureSession = null;
}
isSessionClosed = true;
}
private Size chooseOptimalSize(Size[] choices, int width, int height) {
List<Size> bigEnough = new ArrayList<>();
for (Size option : choices) {
float ratio = (float) option.getWidth() / option.getHeight();
float viewRatio = (float) width / height;
if (Math.abs(ratio - viewRatio) <= 0.1 &&
option.getWidth() <= width &&
option.getHeight() <= height) {
bigEnough.add(option);
}
}
if (!bigEnough.isEmpty()) {
return Collections.max(bigEnough, new CompareSizesByArea());
}
Log.w(TAG, "未找到完美匹配尺寸,使用默认");
return choices[0];
}
}package com.android.example.cameraappxjava.util;
import android.graphics.ImageFormat;
import android.media.Image;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/**
* 无错误版 GLES 2.0 相机渲染器:处理 YUV_420_888 预览,彻底兼容低版本
* 核心优化:移除所有 GLES 2.0 不支持的 API,手动管理纹理尺寸
*/
public class CameraGLRenderer implements GLSurfaceView.Renderer {
private static final String TAG = "CameraGLRenderer";
private static final int TEXTURE_COUNT = 3; // Y/U/V 3个纹理(GLES 2.0 支持)
// -------------------------- 1. GLES 2.0 兼容配置(无任何不支持API) --------------------------
/**
* 顶点着色器(GLES 2.0 标准语法,必加精度声明)
*/
private static final String VERTEX_SHADER =
"attribute vec4 vPosition;\n" + // 顶点坐标(输入)
"attribute vec2 vTexCoord;\n" + // 纹理坐标(输入)
"varying vec2 texCoord;\n" + // 传递纹理坐标到片段着色器
"void main() {\n" +
" gl_Position = vPosition;\n" + // 全屏顶点位置(-1~1 覆盖屏幕)
" texCoord = vTexCoord;\n" + // 传递纹理坐标
"}";
/**
* 片段着色器(GLES 2.0 兼容:用 GL_LUMINANCE 单通道格式,无 GL_RED)
*/
private static final String FRAGMENT_SHADER =
"precision mediump float;\n" + // GLES 2.0 必须声明精度(中等精度平衡性能)
"varying vec2 texCoord;\n" + // 从顶点着色器接收的纹理坐标
"uniform sampler2D yTex;\n" + // Y通道纹理采样器(纹理单元0)
"uniform sampler2D uTex;\n" + // U通道纹理采样器(纹理单元1)
"uniform sampler2D vTex;\n" + // V通道纹理采样器(纹理单元2)
"void main() {\n" +
// GLES 2.0 兼容:读取 GL_LUMINANCE 纹理的 r 通道(亮度值)
" float y = texture2D(yTex, texCoord).r;\n" +
" float u = texture2D(uTex, texCoord).r - 0.5;\n" + // U/V 偏移 0.5(YUV 标准)
" float v = texture2D(vTex, texCoord).r - 0.5;\n" +
// BT.601 YUV转RGB 公式(手机相机通用,避免偏色)
" float r = y + 1.402 * v;\n" +
" float g = y - 0.34414 * u - 0.71414 * v;\n" +
" float b = y + 1.772 * u;\n" +
// 限制 RGB 范围 0~1(避免颜色溢出,GLES 2.0 支持 clamp 函数)
" r = clamp(r, 0.0, 1.0);\n" +
" g = clamp(g, 0.0, 1.0);\n" +
" b = clamp(b, 0.0, 1.0);\n" +
" gl_FragColor = vec4(r, g, b, 1.0);\n" + // 输出 RGB 颜色(不透明)
"}";
/**
* 全屏顶点坐标(GLES 2.0 标准坐标,顺序:左上→左下→右上→右下)
*/
private static final float[] VERTEX_COORDS = {
-1.0f, 1.0f, 0.0f, // 左上
-1.0f, -1.0f, 0.0f, // 左下
1.0f, 1.0f, 0.0f, // 右上
1.0f, -1.0f, 0.0f // 右下
};
/**
* 纹理坐标(GLES 2.0 兼容,适配竖屏预览,解决画面颠倒)
* 映射规则:纹理坐标 → 屏幕坐标(确保竖屏显示正常)
*/
private static final float[] TEX_COORDS = {
0.0f, 1.0f, // 纹理左上 → 屏幕左上
1.0f, 1.0f, // 纹理左下 → 屏幕左下
0.0f, 0.0f, // 纹理右上 → 屏幕右上
1.0f, 0.0f // 纹理右下 → 屏幕右下
};
// -------------------------- 2. 动态变量(新增:手动记录纹理尺寸,替代GL查询) --------------------------
private int mShaderProgram; // GLES 2.0 着色器程序ID
private int[] mTextureIds = new int[TEXTURE_COUNT]; // Y/U/V 纹理ID(GPU资源)
private FloatBuffer mVertexBuffer; // 顶点坐标缓冲区(GLES 2.0 要求Buffer格式)
private FloatBuffer mTexBuffer; // 纹理坐标缓冲区(GLES 2.0 要求Buffer格式)
private int mViewWidth, mViewHeight; // GLSurfaceView 宽高(渲染视口尺寸)
// 关键:手动记录 Y/U/V 纹理的宽高(替代 GLES 2.0 不支持的 glGetTexLevelParameteriv)
private int mYTexWidth = 0, mYTexHeight = 0; // Y纹理尺寸
private int mUTexWidth = 0, mUTexHeight = 0; // U纹理尺寸(Y的1/2)
private int mVTexWidth = 0, mVTexHeight = 0; // V纹理尺寸(Y的1/2)
// YUV 数据线程安全管理(避免相机线程与渲染线程竞争)
private final Object mYuvLock = new Object();
private Image mPendingImage; // 待处理的相机Image(从Camera2接收)
private byte[] mYData, mUData, mVData; // 提取后的 Y/U/V 字节数据
private int mYuvWidth, mYuvHeight; // 相机输出的 YUV 帧宽高
// -------------------------- 3. 对外接口(无修改,直接复用) --------------------------
/**
* 设置相机预览Image(线程安全,GLES 2.0/3.0 通用)
*
* @param image 相机输出的 YUV_420_888 格式Image(必须关闭,避免内存泄漏)
*/
public void setYUVData(Image image) {
if (image == null || image.getFormat() != ImageFormat.YUV_420_888) {
Log.w(TAG, "无效Image:格式非 YUV_420_888 或 Image为空");
if (image != null) image.close(); // 必须关闭,避免相机缓冲区泄漏
return;
}
synchronized (mYuvLock) {
// 先关闭之前未处理的Image(防止缓冲区堆积导致卡顿)
if (mPendingImage != null) {
mPendingImage.close();
Log.d(TAG, "关闭未处理的PendingImage,避免内存泄漏");
}
mPendingImage = image; // 存储新的待处理Image
}
}
/**
* 释放所有资源(Activity/Fragment 销毁时调用,避免内存泄漏)
*/
public void release() {
synchronized (mYuvLock) {
// 1. 关闭待处理的Image
if (mPendingImage != null) {
mPendingImage.close();
mPendingImage = null;
}
// 2. 释放CPU端 YUV 数据
mYData = null;
mUData = null;
mVData = null;
mYuvWidth = 0;
mYuvHeight = 0;
// 3. 重置手动记录的纹理尺寸
mYTexWidth = mYTexHeight = 0;
mUTexWidth = mUTexHeight = 0;
mVTexWidth = mVTexHeight = 0;
}
// 4. 释放 GLES 2.0 GPU 资源(纹理+着色器程序)
if (mTextureIds != null) {
GLES20.glDeleteTextures(TEXTURE_COUNT, mTextureIds, 0);
mTextureIds = null;
}
if (mShaderProgram != 0) {
GLES20.glDeleteProgram(mShaderProgram);
mShaderProgram = 0;
}
// 5. 释放缓冲区(帮助GC回收)
mVertexBuffer = null;
mTexBuffer = null;
Log.d(TAG, "所有资源释放完成(GLES 2.0 兼容)");
}
// -------------------------- 4. GLES 2.0 生命周期回调(无任何不支持API) --------------------------
/**
* 初始化回调:GLSurfaceView 首次创建时调用(仅1次)
* 作用:初始化OpenGL环境、编译着色器、创建纹理、准备坐标缓冲区
*/
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.d(TAG, "onSurfaceCreated(GLES 2.0):初始化OpenGL环境");
// GLES 2.0 基础配置:禁用混合(避免透明层干扰预览)、黑色背景
GLES20.glDisable(GLES20.GL_BLEND);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
// 准备坐标缓冲区(GLES 2.0 仅支持 Buffer 格式,不支持直接用数组)
mVertexBuffer = createFloatBuffer(VERTEX_COORDS);
mTexBuffer = createFloatBuffer(TEX_COORDS);
// 编译 GLES 2.0 着色器程序(创建渲染"画笔")
mShaderProgram = compileShaderProgram(VERTEX_SHADER, FRAGMENT_SHADER);
if (mShaderProgram == 0) {
Log.e(TAG, "着色器程序创建失败(GLES 2.0),预览不可用");
return;
}
// 创建 Y/U/V 3个纹理(GLES 2.0 2D纹理),配置基础参数
GLES20.glGenTextures(TEXTURE_COUNT, mTextureIds, 0);
initTexture(mTextureIds[0]); // 初始化 Y 纹理
initTexture(mTextureIds[1]); // 初始化 U 纹理
initTexture(mTextureIds[2]); // 初始化 V 纹理
Log.d(TAG, "GLES 2.0 初始化完成,纹理ID:Y=" + mTextureIds[0] + ", U=" + mTextureIds[1] + ", V=" + mTextureIds[2]);
}
/**
* 尺寸变化回调:GLSurfaceView 宽高改变时调用(如屏幕旋转)
* 作用:设置渲染视口(画面显示范围),确保全屏渲染
*/
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
mViewWidth = width;
mViewHeight = height;
// GLES 2.0 设置视口:渲染范围 = GLSurfaceView 全屏(左上角(0,0),宽高=View宽高)
GLES20.glViewport(0, 0, width, height);
Log.d(TAG, "onSurfaceChanged(GLES 2.0):视口尺寸=" + width + "x" + height);
}
/**
* 帧渲染回调:每帧调用1次(渲染线程执行,核心渲染逻辑)
* 流程:处理待处理Image → 上传YUV数据到纹理 → 绑定着色器 → 执行渲染
*/
@Override
public void onDrawFrame(GL10 gl) {
// 1. 处理待处理的Image(线程安全,提取Y/U/V数据)
boolean hasNewData = processPendingImage();
if (!hasNewData) {
// 无新数据:清除屏幕为黑色,避免显示上一帧残留
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
return;
}
// 2. 清除上一帧画面(避免画面重叠)
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// 3. 激活 GLES 2.0 着色器程序(使用"画笔")
GLES20.glUseProgram(mShaderProgram);
// 4. 上传 Y/U/V 数据到对应纹理(手动判断纹理尺寸,替代GL查询)
uploadTexture(mTextureIds[0], mYData, mYuvWidth, mYuvHeight, true); // Y纹理
uploadTexture(mTextureIds[1], mUData, mYuvWidth / 2, mYuvHeight / 2, false); // U纹理(1/2尺寸)
uploadTexture(mTextureIds[2], mVData, mYuvWidth / 2, mYuvHeight / 2, false); // V纹理(1/2尺寸)
// 5. 绑定纹理到着色器采样器(让"画笔"找到"画布")
bindTextureToSampler();
// 6. 传递顶点/纹理坐标(告诉"画笔"画在哪里)
passVertexAndTexCoord();
// 7. 执行渲染:GLES 2.0 支持 GL_TRIANGLE_STRIP,4个顶点画2个三角形覆盖全屏
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, VERTEX_COORDS.length / 3);
// 8. 禁用顶点/纹理坐标输入(避免后续渲染干扰)
int vPositionLoc = GLES20.glGetAttribLocation(mShaderProgram, "vPosition");
int vTexCoordLoc = GLES20.glGetAttribLocation(mShaderProgram, "vTexCoord");
GLES20.glDisableVertexAttribArray(vPositionLoc);
GLES20.glDisableVertexAttribArray(vTexCoordLoc);
}
private void passVertexAndTexCoord() {
int vPositionLoc = GLES20.glGetAttribLocation(mShaderProgram, "vPosition");
GLES20.glEnableVertexAttribArray(vPositionLoc);
GLES20.glVertexAttribPointer(
vPositionLoc,
3,
GLES20.GL_FLOAT,
false,
3 * 4,
mVertexBuffer
);
int vTexCoordLoc = GLES20.glGetAttribLocation(mShaderProgram, "vTexCoord");
GLES20.glEnableVertexAttribArray(vTexCoordLoc);
GLES20.glVertexAttribPointer(
vTexCoordLoc, 2,
GLES20.GL_FLOAT,
false,
2 * 4,
mTexBuffer
);
}
// -------------------------- 5. GLES 2.0 辅助方法(无任何不支持API) --------------------------
/**
* 创建 FloatBuffer:将 Java float 数组转为 GLES 2.0 支持的 Buffer 格式
*
* @param array 原始 float 数组(顶点/纹理坐标)
* @return GLES 2.0 可识别的 FloatBuffer
*/
private FloatBuffer createFloatBuffer(float[] array) {
if (array == null || array.length == 0) return null;
// 1. 分配直接内存(避免JVM GC移动,提升OpenGL访问效率)
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(array.length * 4); // 1float=4字节
// 2. 设置字节序(必须与硬件一致,否则数据错乱)
byteBuffer.order(ByteOrder.nativeOrder());
// 3. 转换为 FloatBuffer 并写入数据
FloatBuffer floatBuffer = byteBuffer.asFloatBuffer();
floatBuffer.put(array);
// 4. 重置读指针(从缓冲区开头开始读取)
floatBuffer.position(0);
return floatBuffer;
}
/**
* 编译 GLES 2.0 着色器程序:编译顶点+片段着色器,链接为可执行程序
*
* @param vertexCode 顶点着色器代码
* @param fragmentCode 片段着色器代码
* @return 着色器程序ID(0 表示失败)
*/
private int compileShaderProgram(String vertexCode, String fragmentCode) {
// 1. 编译顶点着色器(GLES 2.0)
int vertexShader = compileSingleShader(GLES20.GL_VERTEX_SHADER, vertexCode);
if (vertexShader == 0) return 0;
// 2. 编译片段着色器(GLES 2.0)
int fragmentShader = compileSingleShader(GLES20.GL_FRAGMENT_SHADER, fragmentCode);
if (fragmentShader == 0) {
GLES20.glDeleteShader(vertexShader); // 清理已编译的顶点着色器
return 0;
}
// 3. 链接着色器程序(GLES 2.0)
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader); // 绑定顶点着色器
GLES20.glAttachShader(program, fragmentShader); // 绑定片段着色器
GLES20.glLinkProgram(program); // 执行链接
// 4. 检查链接结果(GLES 2.0)
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "着色器链接失败(GLES 2.0):" + GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program); // 清理无效程序
program = 0;
}
// 5. 清理中间着色器(程序已链接,单个着色器可删除)
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
return program;
}
/**
* 编译单个 GLES 2.0 着色器:编译顶点/片段着色器代码
*
* @param shaderType 着色器类型(GL_VERTEX_SHADER / GL_FRAGMENT_SHADER)
* @param shaderCode 着色器代码
* @return 着色器ID(0 表示失败)
*/
private int compileSingleShader(int shaderType, String shaderCode) {
// 1. 创建着色器对象(GLES 2.0)
int shader = GLES20.glCreateShader(shaderType);
if (shader == 0) {
Log.e(TAG, "创建着色器失败(GLES 2.0),类型=" + (shaderType == GLES20.GL_VERTEX_SHADER ? "顶点" : "片段"));
return 0;
}
// 2. 绑定着色器代码并编译(GLES 2.0)
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
// 3. 检查编译结果(GLES 2.0)
int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, (shaderType == GLES20.GL_VERTEX_SHADER ? "顶点" : "片段") + "着色器编译失败(GLES 2.0):" + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader); // 清理无效着色器
shader = 0;
}
return shader;
}
/**
* 初始化 GLES 2.0 纹理参数:配置过滤、边缘处理,确保画面清晰无重复
*
* @param textureId 纹理ID(Y/U/V 纹理)
*/
private void initTexture(int textureId) {
if (textureId == 0) return;
// 绑定纹理(选中GPU"画布",GLES 2.0 必须先绑定再配置)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
// 1. 纹理过滤:缩小时线性过滤(画面平滑,避免锯齿)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
// 2. 纹理过滤:放大时线性过滤(画面平滑,避免像素块)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
// 3. 纹理边缘:水平方向超出范围时"夹紧"(不重复显示,避免边缘错乱)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
// 4. 纹理边缘:垂直方向超出范围时"夹紧"
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// 解绑纹理(避免后续误操作其他纹理)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
/**
* 处理待处理Image:从 mPendingImage 提取 Y/U/V 数据(线程安全)
*
* @return true=有新数据,false=无新数据
*/
private boolean processPendingImage() {
Image image = null;
synchronized (mYuvLock) {
if (mPendingImage == null) {
return false; // 无待处理数据
}
// 取出待处理Image(释放锁,避免长时间占用)
image = mPendingImage;
mPendingImage = null;
}
try {
// 1. 提取Image的宽高和Planes(YUV_420_888 格式固定3个Planes)
mYuvWidth = image.getWidth();
mYuvHeight = image.getHeight();
Image.Plane[] planes = image.getPlanes();
if (planes.length < 3) {
Log.e(TAG, "Image Planes 数量不足3,无法提取 YUV 数据");
return false;
}
// 2. 提取 Y 通道数据(Plane[0]:Y通道,无交错)
ByteBuffer yBuffer = planes[0].getBuffer();
mYData = byteBufferToByteArray(yBuffer);
// 3. 提取 U/V 通道数据(区分 Semi-Planar 和 Planar 模式)
if (planes[1].getPixelStride() == 2) {
// 模式1:Semi-Planar(UV 交错存储在 Plane[1],Plane[2] 无数据)
ByteBuffer uvBuffer = planes[1].getBuffer();
int uvLength = uvBuffer.remaining() / 2; // UV 总长度 = Y 长度 / 2
mUData = new byte[uvLength];
mVData = new byte[uvLength];
// 提取 U(偶数索引)和 V(奇数索引)
for (int i = 0; i < uvLength; i++) {
mUData[i] = uvBuffer.get(i * 2); // U:第0、2、4...字节
mVData[i] = uvBuffer.get(i * 2 + 1); // V:第1、3、5...字节
}
} else {
// 模式2:Planar(UV 分别存储在 Plane[1] 和 Plane[2],无交错)
ByteBuffer uBuffer = planes[1].getBuffer();
ByteBuffer vBuffer = planes[2].getBuffer();
mUData = byteBufferToByteArray(uBuffer);
mVData = byteBufferToByteArray(vBuffer);
}
// 4. 验证 YUV 数据长度(避免后续渲染错误)
int expectedYLength = mYuvWidth * mYuvHeight;
int expectedUVLength = (mYuvWidth / 2) * (mYuvHeight / 2);
if (mYData.length != expectedYLength || mUData.length != expectedUVLength || mVData.length != expectedUVLength) {
Log.w(TAG, "YUV 数据长度不匹配,重置为正确长度");
mYData = new byte[expectedYLength];
mUData = new byte[expectedUVLength];
mVData = new byte[expectedUVLength];
return false;
}
Log.d(TAG, "处理 Image 完成(GLES 2.0):YUV 尺寸=" + mYuvWidth + "x" + mYuvHeight + ",数据长度 Y=" + mYData.length + ", U=" + mUData.length);
return true;
} catch (Exception e) {
Log.e(TAG, "处理 Image 异常(GLES 2.0):" + e.getMessage(), e);
return false;
} finally {
// 必须关闭 Image(释放相机缓冲区,避免卡顿的核心!)
if (image != null) {
image.close();
}
}
}
private byte[] byteBufferToByteArray(ByteBuffer buffer) {
if (buffer == null | buffer.remaining() == 0) return new byte[0];
int originalPos = buffer.position();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
buffer.position(originalPos);
return data;
}
/**
* 上传数据到 GLES 2.0 纹理(核心:手动记录纹理尺寸,替代 GL 查询)
*
* @param textureId 纹理ID
* @param data 待上传的字节数据(Y/U/V)
* @param width 纹理宽度
* @param height 纹理高度
* @param isYTexture 是否为 Y 纹理(用于区分尺寸记录变量)
*/
private void uploadTexture(int textureId, byte[] data, int width, int height, boolean isYTexture) {
if (textureId == 0 || data == null || width <= 0 || height <= 0) {
Log.w(TAG, "上传纹理参数无效(GLES 2.0):textureId=" + textureId + ", width=" + width + ", height=" + height);
return;
}
// 绑定纹理(GLES 2.0 必须先绑定再操作)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
// 关键:设置像素对齐为 1(YUV 数据无字节对齐,避免数据错位)
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
// 手动判断纹理是否已创建(替代 GLES 2.0 不支持的 glGetTexLevelParameteriv)
boolean isTextureCreated = false;
if (isYTexture) {
isTextureCreated = (mYTexWidth == width && mYTexHeight == height);
} else {
// U/V 纹理尺寸相同,共用一套判断
isTextureCreated = (mUTexWidth == width && mUTexHeight == height);
}
ByteBuffer dataBuffer = ByteBuffer.wrap(data);
if (!isTextureCreated) {
// 首次创建纹理:调用 glTexImage2D(分配GPU内存)
GLES20.glTexImage2D(
GLES20.GL_TEXTURE_2D, 0, // 2D纹理,基础层级(固定为0)
GLES20.GL_LUMINANCE, // GLES 2.0 核心:单通道亮度格式
width, height, 0, // 纹理宽高,边界宽度(必须为0)
GLES20.GL_LUMINANCE, // 数据格式:与内部格式一致
GLES20.GL_UNSIGNED_BYTE, // 数据类型:无符号字节(YUV 数据类型)
dataBuffer // 待上传的 Y/U/V 数据
);
// 更新手动记录的纹理尺寸(下次判断用)
if (isYTexture) {
mYTexWidth = width;
mYTexHeight = height;
Log.d(TAG, "创建 Y 纹理(GLES 2.0):尺寸=" + width + "x" + height);
} else {
mUTexWidth = width;
mUTexHeight = height;
Log.d(TAG, "创建 U/V 纹理(GLES 2.0):尺寸=" + width + "x" + height);
}
} else {
// 复用纹理:调用 glTexSubImage2D(仅更新数据,不重新分配GPU内存,效率更高)
GLES20.glTexSubImage2D(
GLES20.GL_TEXTURE_2D, 0, // 2D纹理,基础层级
0, 0, // 数据起始坐标(x=0, y=0,全屏更新)
width, height, // 数据宽高(与纹理尺寸一致)
GLES20.GL_LUMINANCE, // 数据格式:与创建时一致
GLES20.GL_UNSIGNED_BYTE, // 数据类型:与创建时一致
dataBuffer // 待更新的 Y/U/V 数据
);
}
// 解绑纹理(避免后续误操作)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
/**
* 绑定纹理到 GLES 2.0 着色器采样器:将 Y/U/V 纹理与着色器的 uniform 变量关联
*/
private void bindTextureToSampler() {
// 1. 绑定 Y 纹理到采样器 yTex(纹理单元0)
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // 激活纹理单元0(GLES 2.0 必须先激活)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[0]); // 绑定 Y 纹理
// 关联采样器:将纹理单元0 与 着色器的 yTex 变量绑定
int yTexLoc = GLES20.glGetUniformLocation(mShaderProgram, "yTex");
GLES20.glUniform1i(yTexLoc, 0);
// 2. 绑定 U 纹理到采样器 uTex(纹理单元1)
GLES20.glActiveTexture(GLES20.GL_TEXTURE1); // 激活纹理单元1
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[1]); // 绑定 U 纹理
int uTexLoc = GLES20.glGetUniformLocation(mShaderProgram, "uTex");
GLES20.glUniform1i(uTexLoc, 1);
// 3. 绑定 V 纹理到采样器 vTex(纹理单元2)
GLES20.glActiveTexture(GLES20.GL_TEXTURE2); //激活纹理单元2
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[2]);
int vTexLoc = GLES20.glGetUniformLocation(mShaderProgram, "vTex");
GLES20.glUniform1i(vTexLoc, 2);
// 添加错误检查
if (yTexLoc == -1 || uTexLoc == -1 || vTexLoc == -1) {
Log.e(TAG, "纹理采样器绑定失败: "
+ "yTex=" + yTexLoc + " uTex=" + uTexLoc + " vTex=" + vTexLoc);
}
}
}
能够正常存储图片,但是预览黑屏不能显示