public class MainActivity4 extends AppCompatActivity {
private static final String TAG = "camera2api";
// 1. 权限与相机相关变量
private static final int REQUEST_CAMERA_PERMISSIONS = 100;
private TextureView mTextureView; // 预览显示载体
private Button mToggleFilterBtn; // 滤镜切换按钮
private CameraManager mCameraManager; // Camera2的“相机管理器”(列举相机、获取信息)
private StreamConfigurationMap configMap;
private String cameraId; // 当前使用的相机ID
private CameraDevice mCameraDevice; // 代表物理相机设备(打开后才能操作)
private CameraCaptureSession mCaptureSession; // 相机会话(所有预览/拍照操作通过它发起)
private HandlerThread mBackgroundThread; // 相机操作后台线程(避免阻塞UI)
private Handler mBackgroundHandler;
// 2. OpenGL渲染器(连接Camera2和GPU)
private CameraRenderer mCameraRenderer;
private GLSurfaceView mGLSurfaceView; // 隐藏的GLSurfaceView(调度OpenGL渲染,不直接显示)
// 3. Canvas绘制相关(叠加取景框)
private Paint mFramePaint; // 画笔(绘制取景框)
private volatile boolean isDrawingActive = false; // 核心控制标志
private SurfaceTexture surfaceTexture;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main4);
// 初始化UI控件
initViews();
// 初始化Canvas画笔(用于叠加取景框)
initCanvasPaint();
// 初始化OpenGL渲染器(隐藏的GLSurfaceView,仅用于调度GPU)
initRenderer();
}
@Override
protected void onResume() {
super.onResume();
Log.i(TAG, "判断是否有相机权限");
List<String> permissions=new ArrayList<>();
permissions.add(Manifest.permission.CAMERA);
permissions.add(Manifest.permission.WRITE_EXTERNAL_STORAGE);
permissions.add(Manifest.permission.READ_EXTERNAL_STORAGE);
permissions.add(Manifest.permission.RECORD_AUDIO);
if (!checkPermission(permissions)){
Log.i(TAG, "没有相机权限——>开始请求相机权限");
ActivityCompat.requestPermissions(this, permissions.toArray(new String[0]), REQUEST_CAMERA_PERMISSIONS);
}
Log.e(TAG, "1.1");
Log.i(TAG, "授权判断");
if (mTextureView.isAvailable()) {
Log.i(TAG, "授权成功");
openCamera();
}
Log.e(TAG, "1");
}
// --------------------------
// 初始化UI控件
// --------------------------
private void initViews() {
mTextureView = findViewById(R.id.texture_view);
mToggleFilterBtn = findViewById(R.id.btn_toggle_filter);
try {
// 初始化相机管理器
mCameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
cameraId = mCameraManager.getCameraIdList()[0];
CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(cameraId);
configMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
} catch (CameraAccessException e) {
throw new RuntimeException(e);
}
Log.i(TAG, "使用相机ID: " + cameraId);
Log.i(TAG, "设置监听: " );
// 滤镜切换按钮点击事件(需在GL线程执行,避免线程安全问题)
mToggleFilterBtn.setOnClickListener(v -> {
// queueEvent:将任务提交到GL线程执行
mGLSurfaceView.queueEvent(() -> mCameraRenderer.toggleFilter());
});
Log.e(TAG, "1.2");
// TextureView监听:Surface可用时打开相机
mTextureView.setSurfaceTextureListener(surfaceTextureListener);
}
private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(@NonNull SurfaceTexture surface, int width, int height) {
Log.e(TAG, "1.3");
// 检查权限,有权限则打开相机
if (ActivityCompat.checkSelfPermission(MainActivity4.this, Manifest.permission.CAMERA)
== PackageManager.PERMISSION_GRANTED) {
Log.e(TAG, "1.4");
openCamera();
} else {
Log.e(TAG, "1.5");
// 申请相机权限
ActivityCompat.requestPermissions(MainActivity4.this,
new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSIONS);
}
// 重写TextureView的onDraw方法(演示Canvas绘制)
mTextureView.setWillNotDraw(false); // 允许TextureView执行onDraw
startDrawingThread(); // 启动绘制线程
// --------------------------
// 演示Canvas绘制(在预览上叠加白色取景框)
// --------------------------
// 注意:Canvas绘制需在SurfaceTexture可用后执行,且每次预览刷新后需重绘
mTextureView.postInvalidate(); // 触发onDraw
}
@Override
public void onSurfaceTextureSizeChanged(@NonNull SurfaceTexture surface, int width, int height) {
// 预览尺寸变化时,重新设置渲染视口(由OpenGL渲染器处理)
}
@Override
public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surface) {
// TextureView销毁时,释放相机资源
closeCamera();
return false;
}
@Override
public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surface) {
// 每次预览帧更新后,重绘Canvas(避免取景框消失)
mTextureView.postInvalidate();
}
};
// 新增辅助方法:检查单权限
private boolean checkPermission(List<String> permissions) {
boolean isPermissionFlag=true;
for (String permission : permissions) {
if (ActivityCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED){
isPermissionFlag=false;
}
}
return isPermissionFlag;
}
private void startDrawingThread() {
new Thread(() -> {
while (isDrawingActive) {
Canvas canvas = mTextureView.lockCanvas();
if (canvas != null) {
drawFrameWithCanvas(canvas); // 执行绘制逻辑
mTextureView.unlockCanvasAndPost(canvas);
}
try { Thread.sleep(16); } catch (InterruptedException e) {} // 60FPS
}
}).start();
}
// --------------------------
// 初始化Canvas画笔(绘制取景框)
// --------------------------
private void initCanvasPaint() {
mFramePaint = new Paint();
mFramePaint.setColor(Color.WHITE); // 白色取景框
mFramePaint.setStrokeWidth(5); // 线宽5px
mFramePaint.setStyle(Paint.Style.STROKE); // 空心框
}
// --------------------------
// Canvas绘制:在预览上叠加取景框(CPU层面)
// --------------------------
private void drawFrameWithCanvas(Canvas canvas) {
if (canvas == null) return;
// 取景框尺寸:居中,宽高为屏幕的80%
int screenWidth = canvas.getWidth();
int screenHeight = canvas.getHeight();
int frameSize = Math.min(screenWidth, screenHeight) * 4 / 5; // 80%尺寸
int left = (screenWidth - frameSize) / 2;
int top = (screenHeight - frameSize) / 2;
int right = left + frameSize;
int bottom = top + frameSize;
// 绘制取景框(空心矩形)
canvas.drawRect(left, top, right, bottom, mFramePaint);
}
// --------------------------
// 初始化OpenGL渲染器(隐藏的GLSurfaceView)
// --------------------------
private void initRenderer() {
// 创建隐藏的GLSurfaceView(仅用于调度OpenGL,不添加到布局)
mGLSurfaceView = new GLSurfaceView(this);
mGLSurfaceView.setEGLContextClientVersion(2); // 使用OpenGL ES 2.0(移动端通用)
mCameraRenderer = new CameraRenderer(this);
// 注册回调(确保非空)
mCameraRenderer.setSurfaceTextureListener(this::handleSurfaceTextureReady);
// 同步启动GL线程
mGLSurfaceView.setRenderer(mCameraRenderer);
mGLSurfaceView.onResume(); // 关键:强制启动渲染线程
// 等待纹理初始化(最多500ms)
synchronized (mGLSurfaceView) {
try {
mGLSurfaceView.wait(500);
} catch (InterruptedException e) {
Log.w(TAG, "等待纹理初始化超时");
}
}
// 渲染模式:持续渲染(30帧/秒,保证预览流畅)
mGLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
}
// 回调方法(GL线程触发)
private void handleSurfaceTextureReady(SurfaceTexture surfaceTexture) {
synchronized (mGLSurfaceView) {
mGLSurfaceView.notifyAll(); // 唤醒等待线程
this.surfaceTexture = surfaceTexture;
}
}
// --------------------------
// Camera2核心1:打开相机(原生API流程)
// --------------------------
private void openCamera() {
Log.e(TAG, "————————>"+ Log.getStackTraceString(new Throwable()));
Log.e(TAG, "1.6");
// 启动后台线程(Camera2操作必须在后台执行,避免阻塞UI)
startBackgroundThread();
try {
// 1. 打开相机(需权限,回调中获取CameraDevice)
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
mCameraManager.openCamera(cameraId, new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
Log.e(TAG, "相机打开成功");
// 相机打开成功,保存CameraDevice实例
mCameraDevice = camera;
// 下一步:配置预览会话(Camera2的核心流程)
createPreviewSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
// 相机关闭连接,释放资源
camera.close();
mCameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
// 相机出错,释放资源并退出
camera.close();
mCameraDevice = null;
Toast.makeText(MainActivity4.this, "相机打开失败:" + error, Toast.LENGTH_SHORT).show();
finish();
}
}, mBackgroundHandler); // 用后台线程处理回调
} catch (CameraAccessException e) {
e.printStackTrace();
Toast.makeText(this, "相机访问异常:" + e.getMessage(), Toast.LENGTH_SHORT).show();
}
}
// --------------------------
// Camera2核心2:创建预览会话(配置输出目标+发送预览请求)
// --------------------------
private void createPreviewSession() {
try {
// 1. 获取OpenGL渲染器的SurfaceTexture,包装成Surface(Camera2的输出目标)
surfaceTexture = mCameraRenderer.getSurfaceTexture();
if (surfaceTexture == null) {
Toast.makeText(this, "SurfaceTexture为空", Toast.LENGTH_SHORT).show();
return;
}
// 2. 配置预览尺寸(匹配相机支持的尺寸,避免画面变形)
if (configMap == null) {
Toast.makeText(this, "相机配置异常", Toast.LENGTH_SHORT).show();
return;
}
// 获取相机支持的SurfaceTexture输出尺寸,选择第一个(简化处理,实际需选与屏幕比例匹配的)
Size[] previewSizes = configMap.getOutputSizes(SurfaceTexture.class);
Size previewSize = previewSizes[0];
// 设置SurfaceTexture的默认缓冲区尺寸(与相机输出尺寸一致)
surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
// 包装成Surface(Camera2的输出目标必须是Surface)
Surface previewSurface = new Surface(surfaceTexture);
// 3. 构建预览请求(告诉Camera2:要执行“预览”操作)
CaptureRequest.Builder previewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(previewSurface); // 相机输出的YUV数据发送到这个Surface
// 4. 配置预览参数(自动对焦、自动曝光,提升体验)
previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); // 连续对焦
previewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); // 自动曝光+闪光灯
// 5. 创建相机会话(Camera2的“工作模式”,所有操作通过会话发起)
mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
// 会话配置成功,保存会话实例
mCaptureSession = session;
try {
// 发送“重复预览请求”(持续输出YUV数据,30帧/秒)
CaptureRequest previewRequest = previewRequestBuilder.build();
// setRepeatingRequest:重复执行请求,直到停止
mCaptureSession.setRepeatingRequest(previewRequest, null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Toast.makeText(MainActivity4.this, "预览会话配置失败", Toast.LENGTH_SHORT).show();
}
}, mBackgroundHandler); // 用后台线程处理回调
} catch (CameraAccessException e) {
e.printStackTrace();
Toast.makeText(this, "创建会话异常:" + e.getMessage(), Toast.LENGTH_SHORT).show();
}
}
// --------------------------
// 启动相机操作后台线程
// --------------------------
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground"); // 线程名(方便调试)
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); // 线程的“遥控器”
}
// --------------------------
// 停止后台线程(避免内存泄漏)
// --------------------------
private void stopBackgroundThread() {
if (mBackgroundThread != null) {
mBackgroundThread.quitSafely(); // 安全退出线程
try {
mBackgroundThread.join(); // 等待线程执行完毕
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
// --------------------------
// 关闭相机,释放所有资源
// --------------------------
private void closeCamera() {
// 停止预览请求
if (mCaptureSession != null) {
try {
mCaptureSession.stopRepeating(); // 停止重复请求
mCaptureSession.abortCaptures(); // 终止所有未完成的请求
mCaptureSession.close();
} catch (CameraAccessException e) {
e.printStackTrace();
}
mCaptureSession = null;
}
// 关闭相机设备
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
// 停止后台线程
stopBackgroundThread();
}
// --------------------------
// 权限申请结果回调
// --------------------------
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSIONS) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
} else {
// 权限拒绝,提示并退出
Toast.makeText(this, "需要相机权限才能预览", Toast.LENGTH_SHORT).show();
finish();
}
}
}
// --------------------------
// 生命周期:暂停时释放资源
// --------------------------
@Override
protected void onPause() {
closeCamera();
super.onPause();
}
@Override
protected void onDestroy() {
if (surfaceTexture != null) {
surfaceTexture.release(); // 释放纹理资源
surfaceTexture = null;
}
super.onDestroy();
}
}public class CameraRenderer extends AppCompatActivity implements GLSurfaceView.Renderer {
private static final String TAG = "camera2api";
// 1. 基础变量
private Context mContext;
private SurfaceTexture mSurfaceTexture; // 连接Camera2和GPU的桥梁(接收YUV数据)
private int mCameraTextureId; // GPU纹理ID(存储YUV数据的“容器”)
private int mProgramNoFilter; // 无滤镜OpenGL程序(顶点+片段着色器)
private int mProgramBlackWhite; // 黑白滤镜OpenGL程序
private int mCurrentProgram; // 当前使用的程序(默认无滤镜)
private boolean mIsBlackWhite = false; // 滤镜开关
// 2. 顶点坐标和纹理坐标(固定值:覆盖全屏)
// 顶点坐标:左下、右下、左上、右上(OpenGL坐标系:-1~1)
private float[] mVertexCoords = {-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f};
// 纹理坐标:解决相机纹理上下颠倒问题(0~1,对应顶点坐标)
private float[] mTexCoords = {0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f};
// 转为GPU可读取的FloatBuffer(避免CPU/GPU数据格式不兼容)
private FloatBuffer mVertexBuffer;
private FloatBuffer mTexCoordBuffer;
// 3. 构造方法(初始化坐标缓冲区)
public CameraRenderer(Context context) {
mContext = context;
// 初始化顶点坐标缓冲区
mVertexBuffer = ByteBuffer.allocateDirect(mVertexCoords.length * 4)
.order(ByteOrder.nativeOrder()) // 按GPU原生字节序排列
.asFloatBuffer()
.put(mVertexCoords);
mVertexBuffer.position(0); // 重置读取指针
// 初始化纹理坐标缓冲区
mTexCoordBuffer = ByteBuffer.allocateDirect(mTexCoords.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(mTexCoords);
mTexCoordBuffer.position(0);
}
// 在CameraRenderer类中添加
public interface SurfaceTextureListener {
void onSurfaceTextureCreated(SurfaceTexture surfaceTexture);
}
private SurfaceTextureListener mListener;
public void setSurfaceTextureListener(SurfaceTextureListener listener) {
mListener = listener;
}
// --------------------------
// 渲染器核心1:初始化OpenGL环境(只执行1次,在GL线程)
// --------------------------
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// 1. 创建相机专用纹理(告诉GPU:这是相机YUV数据,不是普通图片)
mCameraTextureId = createCameraTexture();
// 2. 绑定纹理到SurfaceTexture(让Camera2的YUV数据流入GPU)
mSurfaceTexture = new SurfaceTexture(mCameraTextureId);
// 当有新YUV帧时,通知GLSurfaceView刷新渲染(保证预览流畅)
GLSurfaceView mGLSurfaceView = (GLSurfaceView) findViewById(R.id.texture_view);
mSurfaceTexture.setOnFrameAvailableListener(surfaceTexture -> {
((GLSurfaceView) findViewById(R.id.texture_view)).requestRender();
});
// 修复1:必须设置帧监听器(否则onDrawFrame不触发)
mSurfaceTexture.setOnFrameAvailableListener(st -> {
if (mGLSurfaceView != null) {
mGLSurfaceView.requestRender();
}
});
// 修复2:空指针防护
if (mListener != null) {
mListener.onSurfaceTextureCreated(mSurfaceTexture);
} else {
Log.w(TAG, "SurfaceTexture回调未注册!");
}
// 3. 编译并链接两个OpenGL程序(无滤镜+黑白滤镜)
String vertexShader = loadShaderFromRaw(R.raw.vertex_shader); // 通用顶点着色器
String fragNoFilter = loadShaderFromRaw(R.raw.frag_shader_no_filter);
String fragBlackWhite = loadShaderFromRaw(R.raw.frag_shader_black_white);
mProgramNoFilter = createOpenGLProgram(vertexShader, fragNoFilter);
mProgramBlackWhite = createOpenGLProgram(vertexShader, fragBlackWhite);
// 默认使用无滤镜程序
mCurrentProgram = mProgramNoFilter;
}
// --------------------------
// 渲染器核心2:每帧渲染(约30次/秒,在GL线程)
// --------------------------
@Override
public void onDrawFrame(GL10 gl) {
// 1. 清空屏幕(避免上一帧画面残留,黑色背景)
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// 2. 更新纹理数据(从Camera2获取最新YUV帧,同步到GPU纹理)
mSurfaceTexture.updateTexImage();
// 3. 切换当前OpenGL程序(根据滤镜开关选择无滤镜/黑白)
mCurrentProgram = mIsBlackWhite ? mProgramBlackWhite : mProgramNoFilter;
GLES20.glUseProgram(mCurrentProgram); // 激活当前程序(GPU开始执行该程序的着色器)
// 4. 绑定顶点坐标(告诉GPU:画面要画在屏幕的哪个位置)
int vPositionLoc = GLES20.glGetAttribLocation(mCurrentProgram, "vPosition");
GLES20.glEnableVertexAttribArray(vPositionLoc); // 启用顶点属性
// 传递顶点坐标给顶点着色器:2个值为1组(x,y),float类型,不归一化,无偏移,从mVertexBuffer读取
GLES20.glVertexAttribPointer(vPositionLoc, 2, GLES20.GL_FLOAT, false, 0, mVertexBuffer);
// 5. 绑定纹理坐标(告诉GPU:纹理如何映射到屏幕顶点)
int vTexCoordLoc = GLES20.glGetAttribLocation(mCurrentProgram, "vTexCoord");
GLES20.glEnableVertexAttribArray(vTexCoordLoc);
GLES20.glVertexAttribPointer(vTexCoordLoc, 2, GLES20.GL_FLOAT, false, 0, mTexCoordBuffer);
// 6. 绑定相机纹理(告诉GPU:要处理的YUV数据在这个纹理中)
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // 激活纹理单元0(GPU有多个纹理单元,这里用第0个)
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mCameraTextureId); // 绑定相机纹理
// 传递纹理单元0给片段着色器的sTexture变量(告诉片段着色器:采样这个纹理)
int sTextureLoc = GLES20.glGetUniformLocation(mCurrentProgram, "sTexture");
GLES20.glUniform1i(sTextureLoc, 0);
// 7. 绘制画面(GPU执行着色器,将处理后的RGB数据写入Surface)
// GL_TRIANGLE_STRIP:用4个顶点画2个三角形,覆盖全屏(效率最高)
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// 8. 禁用顶点属性(避免GPU资源泄漏)
GLES20.glDisableVertexAttribArray(vPositionLoc);
GLES20.glDisableVertexAttribArray(vTexCoordLoc);
// --------------------------
// 演示Canvas绘制(CPU层面叠加简单UI:如白色取景框)
// --------------------------
// 注意:Canvas绘制需在UI线程或SurfaceTexture的回调中执行,这里仅演示逻辑
// 实际代码需在MainActivity的SurfaceTextureListener中处理(见步骤4)
}
// --------------------------
// 渲染器核心3:视图尺寸变化(如屏幕旋转,执行1次)
// --------------------------
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
// 设置渲染视口(画面显示的区域:左上角(0,0),宽width,高height,即全屏)
GLES20.glViewport(0, 0, width, height);
}
// --------------------------
// 辅助方法1:创建相机专用纹理
// --------------------------
private int createCameraTexture() {
int[] textures = new int[1];
// 1. 向GPU申请1个纹理ID(类似“分配GPU内存地址”)
GLES20.glGenTextures(1, textures, 0);
int textureId = textures[0];
// 2. 绑定纹理(指定纹理类型为“相机专用纹理”GL_TEXTURE_EXTERNAL_OES)
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
// 3. 设置纹理过滤参数(避免拉伸时画面模糊/失真)
// 缩小过滤:线性插值(画面缩小时平滑)
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
// 放大过滤:线性插值(画面放大时平滑)
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
// 纹理边缘处理: clamp_to_edge(边缘像素不重复,避免黑边)
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// 4. 解绑纹理(避免后续操作污染当前纹理)
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
return textureId;
}
// --------------------------
// 辅助方法2:从raw目录加载着色器代码
// --------------------------
private String loadShaderFromRaw(int rawId) {
try {
InputStream is = mContext.getResources().openRawResource(rawId);
byte[] buffer = new byte[is.available()];
is.read(buffer);
is.close();
return new String(buffer, "UTF-8"); // 转为字符串格式的着色器代码
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException("加载着色器失败:" + e.getMessage());
}
}
// --------------------------
// 辅助方法3:创建OpenGL程序(编译+链接着色器)
// --------------------------
private int createOpenGLProgram(String vertexShaderCode, String fragmentShaderCode) {
// 1. 编译顶点着色器
int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
// 2. 编译片段着色器
int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
// 3. 链接程序(将两个着色器组合为GPU可执行的“指令集”)
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader); // 附加顶点着色器
GLES20.glAttachShader(program, fragmentShader); // 附加片段着色器
GLES20.glLinkProgram(program); // 链接程序
// 4. 检查链接结果(避免编译失败)
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] == 0) {
String errorLog = GLES20.glGetProgramInfoLog(program);
GLES20.glDeleteProgram(program); // 删除无效程序
throw new RuntimeException("OpenGL程序链接失败:" + errorLog);
}
return program;
}
// --------------------------
// 辅助方法4:编译单个着色器
// --------------------------
private int compileShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type); // 创建着色器(顶点/片段)
GLES20.glShaderSource(shader, shaderCode); // 设置着色器代码
GLES20.glCompileShader(shader); // 编译着色器
// 检查编译结果
int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0) {
String errorLog = GLES20.glGetShaderInfoLog(shader);
GLES20.glDeleteShader(shader); // 删除无效着色器
throw new RuntimeException("着色器编译失败:" + errorLog);
}
return shader;
}
// --------------------------
// 对外提供的方法:切换滤镜(需在GL线程执行)
// --------------------------
public void toggleFilter() {
mIsBlackWhite = !mIsBlackWhite;
}
// --------------------------
// 对外提供的方法:获取SurfaceTexture(供Camera2绑定输出目标)
// --------------------------
public SurfaceTexture getSurfaceTexture() {
return mSurfaceTexture;
}
}看一下是什么问题,仍然不能正常预览