Code Fragment-当前程序结束的时候,做一些清理的操作。

本文详细解析了Android中DockService类的onDestroy方法实现细节,包括资源释放和监听器移除等关键步骤。

在程序结束的时候,当然可以是类,也可是Activity或Application。我们可以主动调用一些清理的操作。

如:packages/apps/Settings/src/com/android/settings/bluetooth/DockService.java中的onDestroy代码处理如下:

@Override
public void onDestroy() {
    if (DEBUG) Log.d(TAG, "onDestroy");
    mRunnable = null;
    if (mDialog != null) {
        mDialog.dismiss();//必要的时候调用一些它们特有的处理
        mDialog = null;
    }
    if (mProfileManager != null) {
        mProfileManager.removeServiceListener(this);
    }
    if (mServiceLooper != null) {
        mServiceLooper.quit();//必要的时候调用一些它们特有的处理

    }

    mLocalAdapter = null;
    mDeviceManager = null;
    mProfileManager = null;
    mServiceLooper = null;
    mServiceHandler = null;
}


package com.android.example.cameraappxjava; import android.Manifest; import android.content.ContentResolver; import android.content.ContentValues; import android.content.Intent; import android.content.pm.PackageManager; import android.graphics.ImageFormat; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CaptureFailure; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.params.StreamConfigurationMap; import android.media.Image; import android.media.ImageReader; import android.net.Uri; import android.opengl.GLSurfaceView; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.HandlerThread; import android.os.SystemClock; import android.provider.MediaStore; import android.util.Log; import android.util.Size; import android.view.Surface; import android.widget.Button; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.appcompat.app.AppCompatActivity; import androidx.core.app.ActivityCompat; import com.android.example.cameraappxjava.util.CameraGLRenderer; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; /** * 修复后:Camera2 + GLSurfaceView 自定义YUV预览Demo */ public class MainActivity2 extends AppCompatActivity implements CameraGLRenderer.SurfaceSizeCallback { private static final String TAG = "camera2api_fixed"; private static final int REQUEST_CAMERA_PERMISSION = 100; private static final long MIN_CLICK_INTERVAL = 1000; // GLSurfaceView 相关 private GLSurfaceView glSurfaceView; private CameraGLRenderer cameraGLRenderer; private int glSurfaceWidth = 0; private int glSurfaceHeight = 0; // Camera2 核心组件 private Button captureButton; private CameraDevice cameraDevice; private CameraCaptureSession cameraCaptureSession; private CaptureRequest.Builder captureRequestBuilder; private String cameraId; private Handler backgroundHandler; private HandlerThread backgroundThread; private CameraManager cameraManager; private volatile boolean isCapturing = false; private long lastClickTime = 0; // ImageReader:预览(YUV)+ 拍照(JPEG) private ImageReader previewImageReader; private ImageReader captureImageReader; private Size previewSize; // 与GLSurfaceView匹配的预览尺寸 private Size captureSize; // 拍照尺寸 // 图片保存相关 private ContentResolver contentResolver; private ContentValues mediaValues; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); Log.d(TAG, "onCreate ——————————————————————"); // 1. 初始化视图组件 glSurfaceView = findViewById(R.id.glsurfaceView); captureButton = findViewById(R.id.btnCapture); contentResolver = getContentResolver(); mediaValues = new ContentValues(); mediaValues.put(MediaStore.Images.Media.MIME_TYPE, "image/jpeg"); mediaValues.put(MediaStore.Images.Media.RELATIVE_PATH, Environment.DIRECTORY_PICTURES); // 2. 初始化GL渲染器(设置尺寸回调) initGLRenderer(); // 3. 拍照按钮点击事件(防连点) captureButton.setOnClickListener(v -> { long currentTime = SystemClock.elapsedRealtime(); if (currentTime - lastClickTime > MIN_CLICK_INTERVAL) { lastClickTime = currentTime; takePicture(); } else { Log.d(TAG, "点击过快,已忽略"); } }); // 4. 初始化CameraManager(提前获取,避免重复创建) cameraManager = (CameraManager) getSystemService(CAMERA_SERVICE); } /** * 初始化GLSurfaceView和自定义渲染器(核心) */ private void initGLRenderer() { // 设置OpenGL ES 2.0版本 glSurfaceView.setEGLContextClientVersion(2); // 创建渲染器并设置尺寸回调(监听GLSurfaceView实际尺寸) cameraGLRenderer = new CameraGLRenderer(this); glSurfaceView.setRenderer(cameraGLRenderer); // 按需渲染(有新帧才重绘,节省性能) glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); } /** * 初始化相机核心配置(预览/拍照ImageReader) * 必须在GLSurfaceView尺寸确定后调用(避免尺寸不匹配) */ private void initCamera() { if (glSurfaceWidth == 0 || glSurfaceHeight == 0 || backgroundHandler == null) { Log.w(TAG, "initCamera: 条件不满足(GL尺寸未确定/后台线程未启动)"); Log.w(TAG, "glSurfaceWidth:"+glSurfaceWidth+"glSurfaceHeight:"+glSurfaceHeight+"backgroundHandler:"+backgroundHandler); return; } Log.d(TAG, "initCamera: 开始初始化,GL尺寸=" + glSurfaceWidth + "x" + glSurfaceHeight); try { // 1. 获取相机ID(默认后置相机,0为后置,1为前置) String[] cameraIds = cameraManager.getCameraIdList(); if (cameraIds.length == 0) { Log.e(TAG, "无可用相机设备"); return; } cameraId = cameraIds[0]; // 优先后置相机 // 2. 获取相机支持的配置(预览/拍照尺寸) CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId); StreamConfigurationMap configMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (configMap == null) { Log.e(TAG, "StreamConfigurationMap为空,相机不支持该配置"); return; } // 3. 初始化预览ImageReader(YUV_420_888格式,与GLSurfaceView尺寸匹配) Size[] yuvSizes = configMap.getOutputSizes(ImageFormat.YUV_420_888); previewSize = chooseOptimalSize(yuvSizes, glSurfaceWidth, glSurfaceHeight); if (previewSize == null) { Log.e(TAG, "未找到合适的预览尺寸"); return; } // 关闭旧的ImageReader(避免内存泄漏) if (previewImageReader != null) { previewImageReader.close(); } // 创建预览ImageReader(2个缓冲区,避免帧丢失) previewImageReader = ImageReader.newInstance( previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 4 ); // 设置YUV帧回调(Camera后台线程执行,避免阻塞UI) previewImageReader.setOnImageAvailableListener(reader -> { Image image = reader.acquireLatestImage(); if (image == null) return; try { if (cameraGLRenderer != null) { // 传递YUV数据给渲染器,并触发渲染 cameraGLRenderer.setYUVData(image); glSurfaceView.requestRender(); }else { image.close(); } } catch (Exception e) { Log.e(TAG, "预览帧处理失败: " + e.getMessage(), e); } }, backgroundHandler); // 4. 初始化拍照ImageReader(JPEG格式,选择最大支持尺寸) Size[] jpegSizes = configMap.getOutputSizes(ImageFormat.JPEG); if (jpegSizes.length == 0) { Log.e(TAG, "相机不支持JPEG拍照"); return; } // 选择最大的拍照尺寸 captureSize = Collections.max(Arrays.asList(jpegSizes), new CompareSizesByArea()); // 关闭旧的ImageReader if (captureImageReader != null) { captureImageReader.close(); } // 创建拍照ImageReader captureImageReader = ImageReader.newInstance( captureSize.getWidth(), captureSize.getHeight(), ImageFormat.JPEG, 1 // 拍照一次一张,1个缓冲区足够 ); Log.d(TAG, "initCamera: 完成,预览尺寸=" + previewSize + ",拍照尺寸=" + captureSize); } catch (CameraAccessException e) { Log.e(TAG, "相机访问异常: " + e.getMessage(), e); } catch (SecurityException e) { Log.e(TAG, "相机权限异常: " + e.getMessage(), e); } } /** * 打开相机(需权限已授予) */ private void openCamera() { if (cameraId == null || previewImageReader == null || backgroundHandler == null) { Log.w(TAG, "openCamera: 条件不满足,延迟1000ms重试"); backgroundHandler.postDelayed(this::openCamera, 1000); return; } Log.d(TAG, "openCamera: 尝试打开相机,ID=" + cameraId); try { if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) { // 打开相机(传入状态回调和后台线程Handler) cameraManager.openCamera(cameraId, cameraStateCallback, backgroundHandler); } else { Log.w(TAG, "相机权限未授予,无法打开"); } } catch (CameraAccessException e) { Log.e(TAG, "打开相机失败: " + e.getMessage(), e); } } /** * 创建相机预览会话(核心:绑定预览/拍照Surface) */ private void createCaptureSession() { if (cameraDevice == null || previewImageReader == null || captureImageReader == null) { Log.e(TAG, "createCaptureSession: 核心组件为空(相机/ImageReader)"); return; } try { // 1. 获取预览和拍照的Surface Surface previewSurface = previewImageReader.getSurface(); Surface captureSurface = captureImageReader.getSurface(); List<Surface> outputSurfaces = new ArrayList<>(); outputSurfaces.add(previewSurface); // 预览Surface(YUV输出) outputSurfaces.add(captureSurface); // 拍照Surface(JPEG输出) // 2. 创建CaptureSession(配置输出Surface) cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession session) { Log.i(TAG, "CaptureSession配置成功"); cameraCaptureSession = session; // 配置预览请求(持续输出YUV帧到预览Surface) try { captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); captureRequestBuilder.addTarget(previewSurface); // 预览目标:YUV ImageReader // 开启自动对焦和自动曝光(预览必备) captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); // 下发持续预览请求 cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler); Log.i(TAG, "预览请求已下发,开始预览"); } catch (CameraAccessException e) { Log.e(TAG, "配置预览请求失败: " + e.getMessage(), e); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession session) { Log.e(TAG, "CaptureSession配置失败"); runOnUiThread(() -> Toast.makeText(MainActivity2.this, "相机预览配置失败", Toast.LENGTH_SHORT).show()); } }, backgroundHandler); } catch (CameraAccessException e) { Log.e(TAG, "创建CaptureSession异常: " + e.getMessage(), e); } } /** * 拍照逻辑(停止预览→下发拍照请求→恢复预览) */ private void takePicture() { if (cameraDevice == null || cameraCaptureSession == null || captureImageReader == null) { Log.w(TAG, "takePicture: 核心组件未就绪,无法拍照"); runOnUiThread(() -> Toast.makeText(this, "相机未就绪", Toast.LENGTH_SHORT).show()); return; } Log.i(TAG, "takePicture: 开始拍照流程"); isCapturing = true; try { // 1. 配置拍照请求(JPEG格式,输出到拍照ImageReader) CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(captureImageReader.getSurface()); // 配置拍照参数(自动对焦、曝光、旋转角度) captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); // 修正照片旋转角度(匹配屏幕方向) int rotation = getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getJpegOrientation(rotation)); // 2. 设置拍照ImageReader回调(保存照片) captureImageReader.setOnImageAvailableListener(reader -> { try (Image image = reader.acquireLatestImage()) { if (image == null) { Log.w(TAG, "拍照Image为空,保存失败"); return; } // 提取JPEG数据(Image的planes[0]为JPEG数据) Image.Plane[] planes = image.getPlanes(); ByteBuffer jpegBuffer = planes[0].getBuffer(); byte[] jpegData = new byte[jpegBuffer.remaining()]; jpegBuffer.get(jpegData); // 保存照片到相册 savePhotoToGallery(jpegData); } catch (Exception e) { Log.e(TAG, "保存照片失败: " + e.getMessage(), e); runOnUiThread(() -> Toast.makeText(MainActivity2.this, "照片保存失败", Toast.LENGTH_SHORT).show()); } finally { isCapturing = false; // 恢复预览(重新下发持续预览请求) if (cameraCaptureSession != null && captureRequestBuilder != null) { try { cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler); Log.i(TAG, "拍照完成,已恢复预览"); } catch (CameraAccessException e) { Log.e(TAG, "恢复预览失败: " + e.getMessage(), e); } } } }, backgroundHandler); // 3. 停止预览→下发拍照请求 cameraCaptureSession.stopRepeating(); cameraCaptureSession.capture(captureBuilder.build(), new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) { super.onCaptureFailed(session, request, failure); Log.e(TAG, "拍照失败,原因: " + failure.getReason()); isCapturing = false; runOnUiThread(() -> Toast.makeText(MainActivity2.this, "拍照失败", Toast.LENGTH_SHORT).show()); // 恢复预览 if (cameraCaptureSession != null && captureRequestBuilder != null) { try { cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler); } catch (CameraAccessException e) { Log.e(TAG, "恢复预览失败: " + e.getMessage(), e); } } } }, backgroundHandler); } catch (CameraAccessException e) { Log.e(TAG, "拍照流程异常: " + e.getMessage(), e); isCapturing = false; } } /** * 保存照片到系统相册 */ private void savePhotoToGallery(byte[] jpegData) { if (jpegData == null || jpegData.length == 0) { Log.w(TAG, "JPEG数据为空,无法保存"); return; } try { // 1. 生成唯一文件名(时间戳) String fileName = "Camera2_" + System.currentTimeMillis() + ".jpg"; mediaValues.put(MediaStore.Images.Media.DISPLAY_NAME, fileName); // 2. 插入到媒体库(获取Uri) Uri imageUri = contentResolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, mediaValues); if (imageUri == null) { Log.e(TAG, "插入媒体库失败,无法获取Uri"); return; } // 3. 写入文件(通过ContentResolver避免存储权限问题) try (FileOutputStream outputStream = (FileOutputStream) contentResolver.openOutputStream(imageUri)) { outputStream.write(jpegData); outputStream.flush(); Log.i(TAG, "照片保存成功,路径: " + imageUri); // 通知媒体库扫描(刷新相册) sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, imageUri)); // 显示成功提示 runOnUiThread(() -> Toast.makeText(this, "照片已保存到相册", Toast.LENGTH_SHORT).show()); } } catch (IOException e) { Log.e(TAG, "写入照片文件失败: " + e.getMessage(), e); } } /** * 计算JPEG照片的旋转角度(匹配屏幕方向) */ private int getJpegOrientation(int screenRotation) { try { CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId); int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); // 根据相机传感器方向和屏幕旋转计算最终角度 int orientation = (sensorOrientation + screenRotation * 90) % 360; return orientation; } catch (CameraAccessException e) { Log.e(TAG, "获取传感器方向失败: " + e.getMessage(), e); return 0; } } /** * 选择最优尺寸(匹配View尺寸比例,避免拉伸) */ private Size chooseOptimalSize(Size[] sizes, int viewWidth, int viewHeight) { if (sizes == null || sizes.length == 0) return null; List<Size> candidateSizes = new ArrayList<>(); float viewRatio = (float) viewWidth / viewHeight; // 筛选:比例接近View(误差≤0.1),且尺寸不超过View for (Size size : sizes) { float sizeRatio = (float) size.getWidth() / size.getHeight(); if (Math.abs(sizeRatio - viewRatio) <= 0.1 && size.getWidth() <= viewWidth && size.getHeight() <= viewHeight) { candidateSizes.add(size); } } // 有符合条件的尺寸:选最大的(画质最好) if (!candidateSizes.isEmpty()) { return Collections.max(candidateSizes, new CompareSizesByArea()); } // 无符合条件:选最大的尺寸(降级处理) return Collections.max(Arrays.asList(sizes), new CompareSizesByArea()); } /** * 启动相机后台线程(处理相机回调和ImageReader回调) */ private void startBackgroundThread() { if (backgroundThread == null) { backgroundThread = new HandlerThread("Camera2_Background"); backgroundThread.start(); backgroundHandler = new Handler(backgroundThread.getLooper()); Log.d(TAG, "后台线程已启动"); } } /** * 停止相机后台线程(避免内存泄漏) */ private void stopBackgroundThread() { if (backgroundThread != null) { backgroundThread.quitSafely(); try { backgroundThread.join(); backgroundThread = null; backgroundHandler = null; Log.d(TAG, "后台线程已停止"); } catch (InterruptedException e) { Log.e(TAG, "停止后台线程异常: " + e.getMessage(), e); } } } /** * 释放相机资源(避免内存泄漏) */ private void releaseCameraResources() { Log.d(TAG, "releaseCameraResources: 开始释放"); // 停止预览请求 if (cameraCaptureSession != null) { try { cameraCaptureSession.stopRepeating(); } catch (CameraAccessException e) { Log.e(TAG, "停止预览失败: " + e.getMessage(), e); } cameraCaptureSession.close(); cameraCaptureSession = null; } // 关闭相机设备 if (cameraDevice != null) { cameraDevice.close(); cameraDevice = null; } // 关闭ImageReader if (previewImageReader != null) { previewImageReader.close(); previewImageReader = null; } if (captureImageReader != null) { captureImageReader.close(); captureImageReader = null; } Log.d(TAG, "releaseCameraResources: 完成释放"); } // ---------------------- 生命周期方法 ---------------------- @Override protected void onResume() { super.onResume(); Log.d(TAG, "onResume ——————————————————————"); // 恢复GLSurfaceView(必须调用,否则OpenGL上下文丢失) glSurfaceView.onResume(); // 启动后台线程 startBackgroundThread(); // 检查相机权限 if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { Log.i(TAG, "请求相机权限"); ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION); return; } // 初始化相机(GL尺寸确定后会自动调用openCamera) initCamera(); } @Override protected void onPause() { super.onPause(); Log.d(TAG, "onPause ——————————————————————"); // 暂停GLSurfaceView(保存OpenGL上下文) glSurfaceView.onPause(); // 停止预览并释放相机资源 if (!isCapturing) { releaseCameraResources(); } else { Log.w(TAG, "拍照中,延迟1000ms释放资源"); backgroundHandler.postDelayed(this::releaseCameraResources, 1000); } // 停止后台线程 stopBackgroundThread(); } @Override protected void onDestroy() { super.onDestroy(); Log.d(TAG, "onDestroy ——————————————————————"); // 释放渲染器资源 if (cameraGLRenderer != null) { cameraGLRenderer.release(); } // 置空引用(帮助GC) glSurfaceView = null; cameraGLRenderer = null; captureButton = null; cameraManager = null; contentResolver = null; mediaValues = null; } // ---------------------- 权限回调 ---------------------- @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); if (requestCode == REQUEST_CAMERA_PERMISSION) { if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { Log.i(TAG, "相机权限已授予"); // 初始化相机并打开 if (glSurfaceWidth == 0 || glSurfaceHeight == 0 || backgroundHandler == null) { Log.w(TAG, "相机权限授权后 initCamera: 条件不满足(GL尺寸未确定/后台线程未启动)"); Log.w(TAG, "glSurfaceWidth:"+glSurfaceWidth+"glSurfaceHeight:"+glSurfaceHeight+"backgroundHandler:"+backgroundHandler); } initCamera(); } else { Log.w(TAG, "相机权限被拒绝,无法预览"); runOnUiThread(() -> { Toast.makeText(this, "需要相机权限才能使用", Toast.LENGTH_SHORT).show(); finish(); // 无权限则退出 }); } } } // ---------------------- CameraDevice状态回调 ---------------------- private final CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice camera) { Log.i(TAG, "相机已打开,ID=" + camera.getId()); cameraDevice = camera; // 相机打开后,创建预览会话 createCaptureSession(); } @Override public void onDisconnected(@NonNull CameraDevice camera) { Log.w(TAG, "相机已断开连接"); camera.close(); cameraDevice = null; } @Override public void onError(@NonNull CameraDevice camera, int error) { Log.e(TAG, "相机错误,代码=" + error); camera.close(); cameraDevice = null; runOnUiThread(() -> Toast.makeText(MainActivity2.this, "相机初始化错误", Toast.LENGTH_SHORT).show()); } }; // ---------------------- GLSurface尺寸回调(来自CameraGLRenderer) ---------------------- @Override public void onSurfaceSizeChanged(int width, int height) { Log.d(TAG, "onSurfaceSizeChanged: GL尺寸=" + width + "x" + height); // 更新GLSurface尺寸 glSurfaceWidth = width; glSurfaceHeight = height; // 重新初始化相机并打开 initCamera(); openCamera(); } // ---------------------- 尺寸比较器 ---------------------- static class CompareSizesByArea implements Comparator<Size> { @Override public int compare(Size lhs, Size rhs) { // 比较面积(避免溢出,用long) return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); } } }package com.android.example.cameraappxjava.util; import android.graphics.ImageFormat; import android.media.Image; import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.util.Log; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; /** * 修复后:YUV_420_888 自定义GL渲染器 */ public class CameraGLRenderer implements GLSurfaceView.Renderer { private static final String TAG = "CameraGLRenderer"; private static final int TEXTURE_COUNT = 3; // Y/U/V 三个纹理 // ---------------------- OpenGL ES 2.0 核心配置 ---------------------- // 顶点着色器(全屏显示) private static final String VERTEX_SHADER = "attribute vec4 vPosition;\n" + "attribute vec2 vTexCoord;\n" + "varying vec2 texCoord;\n" + "void main() {\n" + " gl_Position = vPosition;\n" + " texCoord = vTexCoord;\n" + "}"; // 片段着色器(YUV转RGB) private static final String FRAGMENT_SHADER = "precision mediump float;\n" + "varying vec2 texCoord;\n" + "uniform sampler2D yTex;\n" + "uniform sampler2D uTex;\n" + "uniform sampler2D vTex;\n" + "void main() {\n" + " // YUV转RGB(BT.601标准)\n" + " float y = texture2D(yTex, texCoord).r;\n" + " float u = texture2D(uTex, texCoord).r - 0.5;\n" + " float v = texture2D(vTex, texCoord).r - 0.5;\n" + " float r = y + 1.402 * v;\n" + " float g = y - 0.34414 * u - 0.71414 * v;\n" + " float b = y + 1.772 * u;\n" + " // 颜色范围限制(0.0~1.0)\n" + " r = clamp(r, 0.0, 1.0);\n" + " g = clamp(g, 0.0, 1.0);\n" + " b = clamp(b, 0.0, 1.0);\n" + " gl_FragColor = vec4(r, g, b, 1.0);\n" + "}"; // 全屏顶点坐标(左手坐标系:左上→左下→右上→右下) private static final float[] VERTEX_COORDS = { -1.0f, 1.0f, 0.0f, // 左上 -1.0f, -1.0f, 0.0f, // 左下 1.0f, 1.0f, 0.0f, // 右上 1.0f, -1.0f, 0.0f // 右下 }; // 纹理坐标(适配竖屏:解决画面颠倒,与顶点坐标对应) private static final float[] TEX_COORDS = { 0.0f, 1.0f, // 左上(对应顶点左上) 1.0f, 1.0f, // 左下(对应顶点左下) 0.0f, 0.0f, // 右上(对应顶点右上) 1.0f, 0.0f // 右下(对应顶点右下) }; // ---------------------- 动态变量 ---------------------- private final SurfaceSizeCallback sizeCallback; // GL尺寸回调 private int shaderProgram; // 着色器程序ID private int[] textureIds = new int[TEXTURE_COUNT]; // Y/U/V纹理ID private FloatBuffer vertexBuffer; // 顶点坐标缓冲区 private FloatBuffer texCoordBuffer; // 纹理坐标缓冲区 // YUV数据(线程安全管理) private final Object yuvLock = new Object(); private Image pendingImage; // 待处理的YUV图像 private byte[] yData, uData, vData; // 提取后的Y/U/V数据 private int yuvWidth, yuvHeight; // YUV图像尺寸 // 纹理尺寸记录(避免重复创建纹理) private int yTexWidth = 0, yTexHeight = 0; private int uvTexWidth = 0, uvTexHeight = 0; private ByteBuffer yBuffer; private ByteBuffer uBuffer; private ByteBuffer vBuffer; private boolean hasNewFrame = false; // 新帧标志 // ---------------------- 构造方法(传入尺寸回调) ---------------------- public CameraGLRenderer(SurfaceSizeCallback callback) { this.sizeCallback = callback; } // ---------------------- 对外接口 ---------------------- /** * 设置待处理的YUV图像(从Camera2 ImageReader回调调用) */ public void setYUVData(Image image) { Log.d(TAG, "acquire image: " + image + " @ " + System.identityHashCode(image)); if (image == null || image.getFormat() != ImageFormat.YUV_420_888) { Log.w(TAG, "无效Image:格式非YUV_420_888或为空"); if (image != null) image.close(); return; } synchronized (yuvLock) { // 关闭未处理的旧图像(避免内存泄漏) if (pendingImage != null) { pendingImage.close(); // 关闭旧图像 Log.d(TAG, "关闭未处理的旧Image"); } // 仅当无待处理帧时才更新 if (pendingImage == null) { pendingImage = image; hasNewFrame = true; } else { image.close(); // 直接丢弃过载帧 } Log.e(TAG, "调用setYUVData,pendingImage:" + pendingImage); } } /** * 释放渲染器资源(Activity销毁时调用) */ public void release() { synchronized (yuvLock) { // 关闭待处理图像 if (pendingImage != null) { pendingImage.close(); pendingImage = null; } // 清空YUV数据 yData = uData = vData = null; yuvWidth = yuvHeight = 0; } // 释放OpenGL资源(必须在GL线程调用,此处通过GLSurfaceView队列) GLES20.glDeleteTextures(TEXTURE_COUNT, textureIds, 0); GLES20.glDeleteProgram(shaderProgram); Log.d(TAG, "渲染器资源已释放"); } // ---------------------- OpenGL生命周期回调 ---------------------- @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { Log.d(TAG, "onSurfaceCreated:初始化OpenGL"); // 初始化OpenGL状态 GLES20.glDisable(GLES20.GL_BLEND); // 关闭混合(避免透明) GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // 背景黑色 // 初始化坐标缓冲区(native内存,避免GC) vertexBuffer = createFloatBuffer(VERTEX_COORDS); texCoordBuffer = createFloatBuffer(TEX_COORDS); // 编译着色器程序 shaderProgram = compileShaderProgram(VERTEX_SHADER, FRAGMENT_SHADER); if (shaderProgram == 0) { Log.e(TAG, "着色器程序创建失败,预览不可用"); return; } // 创建Y/U/V三个纹理 GLES20.glGenTextures(TEXTURE_COUNT, textureIds, 0); initTexture(textureIds[0]); // Y纹理 initTexture(textureIds[1]); // U纹理 initTexture(textureIds[2]); // V纹理 // 检查OpenGL错误 int glError = GLES20.glGetError(); if (glError != GLES20.GL_NO_ERROR) { Log.e(TAG, "onSurfaceCreated OpenGL错误: " + glError); } } @Override public void onSurfaceChanged(GL10 gl, int width, int height) { Log.d(TAG, "onSurfaceChanged:GL尺寸=" + width + "x" + height); // 设置视口(全屏显示) GLES20.glViewport(0, 0, width, height); // 通知Activity更新相机预览尺寸 if (sizeCallback != null) { sizeCallback.onSurfaceSizeChanged(width, height); } // 重置纹理尺寸记录(避免尺寸变化导致纹理不匹配) yTexWidth = yTexHeight = uvTexWidth = uvTexHeight = 0; } @Override public void onDrawFrame(GL10 gl) { Log.e(TAG, "调用着色器onDrawFrame"); Log.d(TAG, "PendingImage: " + (pendingImage != null) + " | YUV尺寸: " + yuvWidth + "x" + yuvHeight + " | 纹理尺寸: Y=" + yTexWidth + "x" + yTexHeight + " UV=" + uvTexWidth + "x" + uvTexHeight); // 1. 处理待处理的YUV数据 boolean hasNewData = processPendingYUV(); if (!hasNewData) { // 无新数据:清空屏幕(黑色背景) GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); return; } // 2. 检查着色器程序是否有效 if (shaderProgram == 0 || textureIds == null) { Log.e(TAG, "着色器程序或纹理无效,跳过渲染"); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); return; } // 3. 清空上一帧 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); // 4. 使用着色器程序 GLES20.glUseProgram(shaderProgram); // 5. 上传Y/U/V纹理数据 uploadTexture(textureIds[0], yData, yuvWidth, yuvHeight, true); // Y纹理 uploadTexture(textureIds[1], uData, uvTexWidth, uvTexHeight, false); // U纹理 uploadTexture(textureIds[2], vData, uvTexWidth, uvTexHeight, false); // V纹理 // 6. 绑定纹理到着色器采样器 bindTexturesToSamplers(); // 7. 传递顶点坐标和纹理坐标 passVertexAndTexCoord(); // 8. 绘制(三角形带:4个顶点→2个三角形→全屏) GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, VERTEX_COORDS.length / 3); // 9. 禁用顶点属性(避免后续干扰) int vPositionLoc = GLES20.glGetAttribLocation(shaderProgram, "vPosition"); int vTexCoordLoc = GLES20.glGetAttribLocation(shaderProgram, "vTexCoord"); GLES20.glDisableVertexAttribArray(vPositionLoc); GLES20.glDisableVertexAttribArray(vTexCoordLoc); // 检查渲染错误 int glError = GLES20.glGetError(); if (glError != GLES20.GL_NO_ERROR) { Log.e(TAG, "onDrawFrame OpenGL错误: " + glError); } } // ---------------------- OpenGL辅助方法 ---------------------- /** * 创建Float缓冲区(native内存,避免Java堆内存拷贝) */ private FloatBuffer createFloatBuffer(float[] data) { if (data == null || data.length == 0) return null; ByteBuffer byteBuffer = ByteBuffer.allocateDirect(data.length * 4); // float占4字节 byteBuffer.order(ByteOrder.nativeOrder()); // 匹配 native 字节序 FloatBuffer floatBuffer = byteBuffer.asFloatBuffer(); floatBuffer.put(data); floatBuffer.position(0); // 重置读取位置 return floatBuffer; } /** * 编译着色器程序(顶点+片段) */ private int compileShaderProgram(String vertexCode, String fragmentCode) { // 1. 编译顶点着色器 int vertexShader = compileSingleShader(GLES20.GL_VERTEX_SHADER, vertexCode); if (vertexShader == 0) return 0; // 2. 编译片段着色器 int fragmentShader = compileSingleShader(GLES20.GL_FRAGMENT_SHADER, fragmentCode); if (fragmentShader == 0) { GLES20.glDeleteShader(vertexShader); // 清理已创建的顶点着色器 return 0; } // 3. 链接着色器程序 int program = GLES20.glCreateProgram(); GLES20.glAttachShader(program, vertexShader); GLES20.glAttachShader(program, fragmentShader); GLES20.glLinkProgram(program); // 4. 检查链接错误 int[] linkStatus = new int[1]; GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); if (linkStatus[0] != GLES20.GL_TRUE) { String errorLog = GLES20.glGetProgramInfoLog(program); Log.e(TAG, "着色器程序链接失败: " + errorLog); GLES20.glDeleteProgram(program); program = 0; } // 5. 清理临时着色器(链接后不再需要) GLES20.glDeleteShader(vertexShader); GLES20.glDeleteShader(fragmentShader); return program; } /** * 编译单个着色器(顶点或片段) */ private int compileSingleShader(int shaderType, String shaderCode) { int shader = GLES20.glCreateShader(shaderType); if (shader == 0) { Log.e(TAG, "创建着色器失败,类型: " + (shaderType == GLES20.GL_VERTEX_SHADER ? "顶点" : "片段")); return 0; } // 加载着色器代码并编译 GLES20.glShaderSource(shader, shaderCode); GLES20.glCompileShader(shader); // 检查编译错误 int[] compileStatus = new int[1]; GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0); if (compileStatus[0] != GLES20.GL_TRUE) { String errorLog = GLES20.glGetShaderInfoLog(shader); Log.e(TAG, (shaderType == GLES20.GL_VERTEX_SHADER ? "顶点" : "片段") + "着色器编译失败: " + errorLog); GLES20.glDeleteShader(shader); shader = 0; } return shader; } /** * 初始化纹理参数(Y/U/V通用) */ private void initTexture(int textureId) { if (textureId == 0) return; GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); // 纹理过滤:线性插值(画质更平滑) GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); // 纹理包裹:边缘拉伸(避免黑边) GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); // 解绑纹理 GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); } /** * 处理待处理的YUV图像(提取Y/U/V数据,考虑内存对齐) */ private boolean processPendingYUV() { Image image = null; Log.e(TAG, "调用着色器onDrawFrame2,pendingImage:" + pendingImage); synchronized (yuvLock) { if (pendingImage == null) { return false; // 无新数据 } Log.e(TAG, "调用着色器onDrawFrame4"); // 取出待处理图像(释放锁,避免长时间占用) image = pendingImage; pendingImage = null; Log.e(TAG, "调用着色器onDrawFrame4"); } Log.e(TAG, "调用着色器onDrawFrame3"); try { Log.e(TAG, "image是否可用:" + image); // 1. 获取YUV图像尺寸 yuvWidth = image.getWidth(); yuvHeight = image.getHeight(); Image.Plane[] planes = image.getPlanes(); if (planes.length < 3) { Log.e(TAG, "YUV平面数量不足3,无法提取数据"); return false; } // 2. 提取Y数据(Plane 0:Y通道,pixelStride=1,rowStride可能有对齐) Image.Plane yPlane = planes[0]; int yRowStride = yPlane.getRowStride(); int yPixelStride = yPlane.getPixelStride(); yBuffer = yPlane.getBuffer(); yData = extractPlaneData(yBuffer, yRowStride, yPixelStride, yuvWidth, yuvHeight); if (yData == null || yData.length != yuvWidth * yuvHeight) { Log.e(TAG, "Y数据提取失败,长度不匹配: " + (yData != null ? yData.length : 0) + " vs " + (yuvWidth * yuvHeight)); return false; } // 3. 提取U/V数据(Plane 1:U通道,Plane 2:V通道,或交错) Image.Plane uPlane = planes[1]; Image.Plane vPlane = planes[2]; int uvRowStride = uPlane.getRowStride(); int uvPixelStride = uPlane.getPixelStride(); int uvWidth = yuvWidth / 2; // YUV_420:U/V尺寸是Y的1/2 int uvHeight = yuvHeight / 2; uvTexWidth = uvWidth; uvTexHeight = uvHeight; // 处理Planar(U/V分离)或Semi-Planar(UV交错) if (uvPixelStride == 2) { ByteBuffer uvBuffer = uPlane.getBuffer(); int uvBufferSize = uvBuffer.remaining(); uData = new byte[uvWidth * uvHeight]; vData = new byte[uvWidth * uvHeight]; uvBuffer.rewind(); // 确保从0开始 // 使用批量复制以提高效率 byte[] rowBuffer = new byte[uvRowStride]; for (int row = 0; row < uvHeight; row++) { int rowStart = row * uvRowStride; if (rowStart >= uvBufferSize) break; int bytesToRead = Math.min(uvRowStride, uvBufferSize - rowStart); uvBuffer.position(rowStart); uvBuffer.get(rowBuffer, 0, bytesToRead); // 从rowBuffer中提取UV for (int col = 0; col < uvWidth; col++) { int offset = col * 2; // 每列占2字节 if (offset >= bytesToRead) break; // 防止行内越界 vData[row * uvWidth + col] = rowBuffer[offset + 1]; uData[row * uvWidth + col] = rowBuffer[offset]; } } } else { // Planar(U/V分离,如I420):U和V各自在独立Plane uBuffer = uPlane.getBuffer(); vBuffer = vPlane.getBuffer(); uData = extractPlaneData(uBuffer, uvRowStride, uvPixelStride, uvWidth, uvHeight); vData = extractPlaneData(vBuffer, uvRowStride, uvPixelStride, uvWidth, uvHeight); } // 4. 验证U/V数据长度 if (uData == null || vData == null || uData.length != uvWidth * uvHeight || vData.length != uvWidth * uvHeight) { Log.e(TAG, "U/V数据提取失败,长度不匹配"); return false; } hasNewFrame = false;//处理完帧后需要重置标志 Log.d(TAG, "YUV数据处理成功: " + yuvWidth + "x" + yuvHeight + ",Y长度=" + yData.length + ",U/V长度=" + uData.length); return true; } catch (Exception e) { Log.e(TAG, "处理YUV数据异常: " + e.getMessage(), e); return false; } finally { // 必须关闭Image,否则内存泄漏 if (image != null) { image.close(); } } } /** * 提取平面数据(处理rowStride和pixelStride,避免读取padding字节) */ private byte[] extractPlaneData(ByteBuffer buffer, int rowStride, int pixelStride, int width, int height) { if (buffer == null || rowStride <= 0 || pixelStride <= 0 || width <= 0 || height <= 0) { Log.w(TAG, "提取平面数据参数无效"); return null; } byte[] data = new byte[width * height]; int dataIdx = 0; // 按行读取(跳过rowStride中的padding字节) for (int row = 0; row < height; row++) { // 每行的起始位置 int bufferRowStart = row * rowStride; // 读取当前行的有效数据(width个像素,每个像素pixelStride字节) for (int col = 0; col < width; col++) { int bufferPos = bufferRowStart + col * pixelStride; data[dataIdx++] = buffer.get(bufferPos); } } return data; } /** * 上传数据到纹理(首次创建纹理,后续更新数据) */ private void uploadTexture(int textureId, byte[] data, int width, int height, boolean isYTexture) { if (textureId == 0 || data == null || width <= 0 || height <= 0) { Log.w(TAG, "上传纹理参数无效"); return; } // 绑定纹理 GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); // 设置像素对齐(YUV数据是1字节对齐,默认是4字节,必须修改) GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); // 检查纹理是否已创建(尺寸匹配则更新,否则重新创建) boolean textureCreated = false; if (isYTexture) { textureCreated = (yTexWidth == width && yTexHeight == height); } else { textureCreated = (uvTexWidth == width && uvTexHeight == height); } ByteBuffer dataBuffer = ByteBuffer.wrap(data); if (!textureCreated) { // 首次创建纹理(GL_LUMINANCE:单通道亮度数据) GLES20.glTexImage2D( GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width, height, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, dataBuffer ); // 更新纹理尺寸记录 if (isYTexture) { yTexWidth = width; yTexHeight = height; } else { uvTexWidth = width; uvTexHeight = height; } Log.d(TAG, "创建纹理: " + (isYTexture ? "Y" : "UV") + ",尺寸=" + width + "x" + height); } else { // 纹理已存在,更新数据(只更新像素,不重新创建纹理) GLES20.glTexSubImage2D( GLES20.GL_TEXTURE_2D, 0, 0, 0, // 起始坐标(x,y) width, height, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, dataBuffer ); } // 解绑纹理 GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); } /** * 绑定纹理到着色器的采样器(yTex/uTex/vTex) */ private void bindTexturesToSamplers() { // 绑定Y纹理到TEXTURE0,对应着色器的yTex GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIds[0]); int yTexLoc = GLES20.glGetUniformLocation(shaderProgram, "yTex"); GLES20.glUniform1i(yTexLoc, 0); // 绑定U纹理到TEXTURE1,对应着色器的uTex GLES20.glActiveTexture(GLES20.GL_TEXTURE1); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIds[1]); int uTexLoc = GLES20.glGetUniformLocation(shaderProgram, "uTex"); GLES20.glUniform1i(uTexLoc, 1); // 绑定V纹理到TEXTURE2,对应着色器的vTex GLES20.glActiveTexture(GLES20.GL_TEXTURE2); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIds[2]); int vTexLoc = GLES20.glGetUniformLocation(shaderProgram, "vTex"); GLES20.glUniform1i(vTexLoc, 2); // 检查采样器位置是否有效 if (yTexLoc == -1 || uTexLoc == -1 || vTexLoc == -1) { Log.e(TAG, "着色器采样器位置无效: y=" + yTexLoc + ", u=" + uTexLoc + ", v=" + vTexLoc); } } /** * 传递顶点坐标和纹理坐标到着色器 */ private void passVertexAndTexCoord() { // 传递顶点坐标(vPosition) int vPositionLoc = GLES20.glGetAttribLocation(shaderProgram, "vPosition"); GLES20.glEnableVertexAttribArray(vPositionLoc); GLES20.glVertexAttribPointer( vPositionLoc, 3, // 每个顶点3个坐标(x,y,z) GLES20.GL_FLOAT, false, // 不归一化 3 * 4, // 顶点步长(3个float,每个4字节) vertexBuffer ); // 传递纹理坐标(vTexCoord) int vTexCoordLoc = GLES20.glGetAttribLocation(shaderProgram, "vTexCoord"); GLES20.glEnableVertexAttribArray(vTexCoordLoc); GLES20.glVertexAttribPointer( vTexCoordLoc, 2, // 每个纹理坐标2个值(s,t) GLES20.GL_FLOAT, false, 2 * 4, // 纹理坐标步长(2个float,每个4字节) texCoordBuffer ); // 检查坐标位置是否有效 if (vPositionLoc == -1 || vTexCoordLoc == -1) { Log.e(TAG, "着色器坐标位置无效: vPosition=" + vPositionLoc + ", vTexCoord=" + vTexCoordLoc); } } // ---------------------- GLSurface尺寸回调接口 ---------------------- public interface SurfaceSizeCallback { void onSurfaceSizeChanged(int width, int height); } } 你看看为什么美颜调用
最新发布
09-24
我将提供给你现有的程序代码,按照上面的修改要求,给出完整的代码 服务器 #define _WINSOCK_DEPRECATED_NO_WARNINGS #define _CRT_SECURE_NO_WARNINGS #include <winsock2.h> #include <ws2tcpip.h> #include <stdio.h> #include <stdlib.h> #include <stddef.h> #include <iphlpapi.h> #include <windows.h> #pragma comment(lib, "ws2_32.lib") #pragma comment(lib, "iphlpapi.lib") #define _WINSOCK_DEPRECATED_NO_WARNINGS #define _CRT_SECURE_NO_WARNINGS #include <winsock2.h> #include <ws2tcpip.h> #include <stdio.h> #include <stdlib.h> #include <stddef.h> #include <iphlpapi.h> #include <windows.h> #pragma comment(lib, "ws2_32.lib") #pragma comment(lib, "iphlpapi.lib") #define BUFFER_SIZE 1024 #define MAX_DATA_SIZE 120 #define DEBUG_MODE 1 #define MAGIC_HEADER "CMD_" SOCKET rawSocket; unsigned short processId; BOOL isRunning = TRUE; #define SIO_RCVALL _WSAIOW(IOC_VENDOR, 1) #define RCVALL_ON 1 typedef struct { unsigned char ip_hl : 4; unsigned char ip_v : 4; unsigned char ip_tos; unsigned short ip_len; unsigned short ip_id; unsigned short ip_off; unsigned char ip_ttl; unsigned char ip_p; unsigned short ip_sum; struct in_addr ip_src; struct in_addr ip_dst; } IP_HEADER; typedef struct { unsigned char type; unsigned char code; unsigned short checksum; unsigned short id; unsigned short seq; char data[128]; } ICMP_HEADER; // 统一的分片结构体 (与客户端完全一致) #pragma pack(push, 1) typedef struct { char magic[4]; // "CMD_" unsigned short total_size; // 结果总大小 unsigned short fragment_count; // 总分片数 unsigned short fragment_index; // 当前分片索引 char data[MAX_DATA_SIZE]; // 数据 } UnifiedFragment; #pragma pack(pop) // 计算校验和 unsigned short checksum(unsigned short* buf, int len) { unsigned long sum = 0; while (len > 1) { sum += *buf++; len -= 2; } if (len == 1) { sum += (unsigned char)*buf; } sum = (sum >> 16) + (sum & 0xffff); sum += sum >> 16; return (unsigned short)~sum; } // 获取本地IP地址列表 void printLocalIPs() { PIP_ADAPTER_INFO pAdapterInfo; PIP_ADAPTER_INFO pAdapter = NULL; DWORD dwRetVal = 0; ULONG ulOutBufLen = sizeof(IP_ADAPTER_INFO); pAdapterInfo = (IP_ADAPTER_INFO*)malloc(ulOutBufLen); if (!pAdapterInfo) return; if (GetAdaptersInfo(pAdapterInfo, &ulOutBufLen) == ERROR_BUFFER_OVERFLOW) { free(pAdapterInfo); pAdapterInfo = (IP_ADAPTER_INFO*)malloc(ulOutBufLen); if (!pAdapterInfo) return; } if (GetAdaptersInfo(pAdapterInfo, &ulOutBufLen) == NO_ERROR) { pAdapter = pAdapterInfo; printf("可用的本地IP地址:\n"); while (pAdapter) { if (pAdapter->IpAddressList.IpAddress.String[0] != '0') { printf(" - %s (接口: %s)\n", pAdapter->IpAddressList.IpAddress.String, pAdapter->Description); } pAdapter = pAdapter->Next; } } free(pAdapterInfo); } // 发送ICMP消息 int sendICMPMessage(const char* ipAddress, const char* message, int messageLen, int seq, int type) { struct sockaddr_in destAddr; char sendBuf[sizeof(ICMP_HEADER) + 256] = { 0 }; ICMP_HEADER* icmpHeader = (ICMP_HEADER*)sendBuf; int ret; memset(&destAddr, 0, sizeof(destAddr)); destAddr.sin_family = AF_INET; if (inet_pton(AF_INET, ipAddress, &destAddr.sin_addr) != 1) { printf("❌ 无效的IP地址: %s\n", ipAddress); return -1; } icmpHeader->type = type; icmpHeader->code = 0; icmpHeader->id = processId; icmpHeader->seq = htons(seq); memcpy(icmpHeader->data, message, messageLen); int icmpTotalLen = offsetof(ICMP_HEADER, data) + messageLen; icmpHeader->checksum = 0; icmpHeader->checksum = checksum((unsigned short*)icmpHeader, icmpTotalLen); ret = sendto(rawSocket, sendBuf, icmpTotalLen, 0, (struct sockaddr*)&destAddr, sizeof(destAddr)); if (ret == SOCKET_ERROR) { printf("❌ 发送失败 (错误码: %d)\n", WSAGetLastError()); } return ret; } // 发送命令执行结果 void sendCommandResult(const char* ipAddress, const char* result, int seq) { int totalLength = strlen(result); int fragmentCount = (totalLength + MAX_DATA_SIZE - 1) / MAX_DATA_SIZE; int currentSeq = seq; printf("命令结果大小: %d字节, 分%d个包发送\n", totalLength, fragmentCount); for (int i = 0; i < fragmentCount; i++) { UnifiedFragment fragment; memcpy(fragment.magic, MAGIC_HEADER, 4); fragment.total_size = htons(totalLength); fragment.fragment_count = htons(fragmentCount); fragment.fragment_index = htons(i); int copySize = (i == fragmentCount - 1) ? (totalLength - i * MAX_DATA_SIZE) : MAX_DATA_SIZE; memcpy(fragment.data, result + i * MAX_DATA_SIZE, copySize); // 发送整个结构体 sendICMPMessage(ipAddress, (char*)&fragment, sizeof(UnifiedFragment), currentSeq, 0); #if DEBUG_MODE printf("✅ 发送分片 %d/%d (序列号: %d, 大小: %d字节)\n", i + 1, fragmentCount, currentSeq, copySize); // 调试输出:打印前16字节HEX printf(" 分片头: "); for (int j = 0; j < 16; j++) { printf("%02X ", (unsigned char)((char*)&fragment)[j]); } printf("\n"); #endif currentSeq++; Sleep(30); } // 发送结束标记 sendICMPMessage(ipAddress, "CMD_FINISH", 11, currentSeq, 0); printf("✅ 命令结果发送完成\n"); } // 执行命令 char* execute_command(const char* cmd) { FILE* fp = _popen(cmd, "r"); if (!fp) return _strdup("命令执行失败"); char buffer[BUFFER_SIZE]; size_t totalSize = 0; size_t allocated = BUFFER_SIZE * 10; char* result = (char*)malloc(allocated); if (!result) { _pclose(fp); return _strdup("内存分配失败"); } result[0] = '\0'; while (fgets(buffer, sizeof(buffer), fp)) { size_t len = strlen(buffer); if (totalSize + len + 1 > allocated) { allocated *= 2; char* newResult = (char*)realloc(result, allocated); if (!newResult) { free(result); _pclose(fp); return _strdup("内存分配失败"); } result = newResult; } strcat(result, buffer); totalSize += len; } _pclose(fp); return result; } // 处理命令 void processCommand(const char* srcIP, const char* command) { printf("\n📩 收到命令: %s\n", command); char* result = execute_command(command); if (!result) result = _strdup("命令执行失败"); sendCommandResult(srcIP, result, 1); free(result); } // 主循环 void serverLoop() { char recvBuf[65535]; struct sockaddr_in srcAddr; int srcAddrSize = sizeof(srcAddr); printf("服务器已启动,等待命令...\n"); while (isRunning) { int ret = recvfrom(rawSocket, recvBuf, sizeof(recvBuf), 0, (struct sockaddr*)&srcAddr, &srcAddrSize); if (ret == SOCKET_ERROR) { int error = WSAGetLastError(); if (error == WSAETIMEDOUT) continue; printf("❌ 接收错误: %d\n", error); continue; } IP_HEADER* ipHeader = (IP_HEADER*)recvBuf; if (ipHeader->ip_p != IPPROTO_ICMP) continue; int ipHeaderLen = ipHeader->ip_hl * 4; if (ipHeaderLen < sizeof(IP_HEADER)) continue; ICMP_HEADER* icmpHeader = (ICMP_HEADER*)(recvBuf + ipHeaderLen); char ipStr[INET_ADDRSTRLEN]; InetNtopA(AF_INET, &ipHeader->ip_src, ipStr, INET_ADDRSTRLEN); #if DEBUG_MODE printf("\n收到 %d 字节数据,来源: %s\n", ret, ipStr); printf("IP协议: %d, ICMP类型: %d\n", ipHeader->ip_p, icmpHeader->type); printf("数据头: %.4s\n", icmpHeader->data); #endif if (icmpHeader->type == 8 && strncmp(icmpHeader->data, "cmd:", 4) == 0) { processCommand(ipStr, icmpHeader->data + 4); } } } int main() { WSADATA wsaData; struct sockaddr_in localAddr; DWORD bytesReturned; char ipChoice[16]; if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0) { printf("❌ WSAStartup失败: %d\n", WSAGetLastError()); return 1; } printLocalIPs(); processId = (unsigned short)GetCurrentProcessId(); printf("\n服务器进程ID: %d\n", processId); printf("\n请输入要绑定的IP地址: "); if (fgets(ipChoice, sizeof(ipChoice), stdin) == NULL) { printf("❌ 输入错误\n"); WSACleanup(); return 1; } ipChoice[strcspn(ipChoice, "\n")] = '\0'; rawSocket = WSASocket(AF_INET, SOCK_RAW, IPPROTO_ICMP, NULL, 0, WSA_FLAG_OVERLAPPED); if (rawSocket == INVALID_SOCKET) { printf("❌ 创建套接字失败: %d (需管理员权限)\n", WSAGetLastError()); WSACleanup(); return 1; } memset(&localAddr, 0, sizeof(localAddr)); localAddr.sin_family = AF_INET; localAddr.sin_addr.s_addr = inet_addr(ipChoice); if (bind(rawSocket, (struct sockaddr*)&localAddr, sizeof(localAddr)) == SOCKET_ERROR) { printf("❌ 绑定失败: %d\n", WSAGetLastError()); closesocket(rawSocket); WSACleanup(); return 1; } printf("✅ 已绑定到: %s\n", ipChoice); unsigned long optval = RCVALL_ON; if (WSAIoctl(rawSocket, SIO_RCVALL, &optval, sizeof(optval), NULL, 0, &bytesReturned, NULL, NULL) == SOCKET_ERROR) { printf("⚠️ 警告: 混杂模式未启用 (错误码: %d)\n", WSAGetLastError()); printf("➡️ 仍可接收定向到本机的消息\n"); } else { printf("✅ 已启用混杂模式\n"); } serverLoop(); printf("\n关闭服务器...\n"); closesocket(rawSocket); WSACleanup(); return 0; } 客户端 #define _WINSOCK_DEPRECATED_NO_WARNINGS #define _CRT_SECURE_NO_WARNINGS #include <winsock2.h> #include <ws2tcpip.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <time.h> #pragma comment(lib, "ws2_32.lib") #define DEBUG_MODE 1 #define MAX_PACKET_SIZE 1024 #define ICMP_ECHO_REQUEST 8 #define ICMP_ECHO_REPLY 0 #define ICMP_MIN_SIZE 8 #define CMD_PREFIX "cmd:" #define CMD_PREFIX_LEN 4 #define MAGIC_HEADER "CMD_" #define MAX_DATA_SIZE 120 // 强制1字节对齐确保跨平台兼容性 #pragma pack(push, 1) typedef struct { unsigned char ip_hl : 4; unsigned char ip_v : 4; unsigned char ip_tos; unsigned short ip_len; unsigned short ip_id; unsigned short ip_off; unsigned char ip_ttl; unsigned char ip_p; unsigned short ip_sum; unsigned int ip_src; unsigned int ip_dst; } IP_HEADER; typedef struct { unsigned char icmp_type; unsigned char icmp_code; unsigned short icmp_checksum; unsigned short icmp_id; unsigned short icmp_seq; } ICMP_HEADER; // 统一的分片结构体 (与服务器完全一致) typedef struct { char magic[4]; // "CMD_" unsigned short total_size; // 结果总大小 unsigned short fragment_count; // 总分片数 unsigned short fragment_index; // 当前分片索引 char data[MAX_DATA_SIZE]; // 数据 } UnifiedFragment; #pragma pack(pop) // 计算校验和函数 unsigned short checksum(unsigned short* buffer, int size) { unsigned long cksum = 0; while (size > 1) { cksum += *buffer++; size -= sizeof(unsigned short); } if (size) { cksum += *(unsigned char*)buffer; } cksum = (cksum >> 16) + (cksum & 0xffff); cksum += (cksum >> 16); return (unsigned short)(~cksum); } // 发送命令函数 void sendCommand(const char* command, const char* srcIP, const char* dstIP, int seq) { SOCKET rawSocket = socket(AF_INET, SOCK_RAW, IPPROTO_ICMP); if (rawSocket == INVALID_SOCKET) { printf("创建原始套接字失败: %d\n", WSAGetLastError()); return; } // 设置源IP地址 sockaddr_in srcAddr; memset(&srcAddr, 0, sizeof(srcAddr)); srcAddr.sin_family = AF_INET; srcAddr.sin_addr.s_addr = inet_addr(srcIP); if (bind(rawSocket, (sockaddr*)&srcAddr, sizeof(srcAddr)) == SOCKET_ERROR) { printf("绑定到源IP %s 失败: %d\n", srcIP, WSAGetLastError()); closesocket(rawSocket); return; } printf("? 已绑定到源IP: %s\n", srcIP); // 设置目标地址 sockaddr_in dstAddr; memset(&dstAddr, 0, sizeof(dstAddr)); dstAddr.sin_family = AF_INET; dstAddr.sin_addr.s_addr = inet_addr(dstIP); // 构造ICMP请求包 char sendBuf[MAX_PACKET_SIZE] = { 0 }; ICMP_HEADER* icmpHeader = (ICMP_HEADER*)sendBuf; icmpHeader->icmp_type = ICMP_ECHO_REQUEST; icmpHeader->icmp_code = 0; icmpHeader->icmp_id = (unsigned short)GetCurrentProcessId(); icmpHeader->icmp_seq = htons(seq); // 添加命令前缀和内容 char* payload = sendBuf + sizeof(ICMP_HEADER); strncpy(payload, CMD_PREFIX, CMD_PREFIX_LEN); strncpy(payload + CMD_PREFIX_LEN, command, MAX_PACKET_SIZE - sizeof(ICMP_HEADER) - CMD_PREFIX_LEN); int payloadLen = CMD_PREFIX_LEN + (int)strlen(command) + 1; int packetSize = sizeof(ICMP_HEADER) + payloadLen; // 计算校验和 icmpHeader->icmp_checksum = 0; icmpHeader->icmp_checksum = checksum((unsigned short*)icmpHeader, packetSize); // 发送命令 if (sendto(rawSocket, sendBuf, packetSize, 0, (sockaddr*)&dstAddr, sizeof(dstAddr)) == SOCKET_ERROR) { printf("发送命令失败: %d\n", WSAGetLastError()); } else { printf("? 发送命令: %s (大小: %d字节, 序列号: %d)\n", command, packetSize, seq); } closesocket(rawSocket); } // 接收命令结果函数 void receiveCommandResult(int seq, const char* srcIP) { SOCKET rawSocket = socket(AF_INET, SOCK_RAW, IPPROTO_ICMP); if (rawSocket == INVALID_SOCKET) { printf("创建原始套接字失败: %d\n", WSAGetLastError()); return; } // 设置接收超时 DWORD timeout = 10000; // 延长超时时间到10秒 setsockopt(rawSocket, SOL_SOCKET, SO_RCVTIMEO, (char*)&timeout, sizeof(timeout)); // 设置源IP地址(仅用于接收) sockaddr_in localAddr; memset(&localAddr, 0, sizeof(localAddr)); localAddr.sin_family = AF_INET; localAddr.sin_addr.s_addr = inet_addr(srcIP); bind(rawSocket, (sockaddr*)&localAddr, sizeof(localAddr)); printf("等待结果 (超时: %dms)...\n", timeout); char recvBuf[MAX_PACKET_SIZE] = { 0 }; char* resultBuffer = NULL; int resultSize = 0; int fragmentCount = 0; int receivedFragments = 0; time_t startTime = time(NULL); while (1) { // 检查超时 if (time(NULL) - startTime > 10) { printf("? 接收超时\n"); break; } sockaddr_in fromAddr; int fromAddrLen = sizeof(fromAddr); int bytesRead = recvfrom(rawSocket, recvBuf, sizeof(recvBuf), 0, (sockaddr*)&fromAddr, &fromAddrLen); if (bytesRead == SOCKET_ERROR) { if (WSAGetLastError() == WSAETIMEDOUT) { printf("? 接收超时\n"); break; } printf("接收错误: %d\n", WSAGetLastError()); continue; } // 解析IP头 IP_HEADER* ipHeader = (IP_HEADER*)recvBuf; int ipHeaderLen = ipHeader->ip_hl * 4; // 检查协议类型 if (ipHeader->ip_p != IPPROTO_ICMP) { continue; } // 解析ICMP头 ICMP_HEADER* icmpHeader = (ICMP_HEADER*)(recvBuf + ipHeaderLen); unsigned short recvSeq = ntohs(icmpHeader->icmp_seq); // 调试输出:显示接收到的包信息 #if DEBUG_MODE char fromIP[16]; strcpy(fromIP, inet_ntoa(fromAddr.sin_addr)); printf("收到包: 类型=%d, 序列号=%d, 来源IP=%s\n", icmpHeader->icmp_type, recvSeq, fromIP); #endif // 只处理当前序列号范围内的包 if (recvSeq < seq) { #if DEBUG_MODE printf("跳过过时包 (seq=%d < 当前=%d)\n", recvSeq, seq); #endif continue; } // 处理ICMP响应 if (icmpHeader->icmp_type == ICMP_ECHO_REPLY) { char* payload = recvBuf + ipHeaderLen + sizeof(ICMP_HEADER); int payloadLen = bytesRead - ipHeaderLen - sizeof(ICMP_HEADER); // 检查是否是命令分片 if (payloadLen > sizeof(UnifiedFragment) - MAX_DATA_SIZE && // 确保有足够数据 memcmp(payload, MAGIC_HEADER, 4) == 0) { UnifiedFragment* fragment = (UnifiedFragment*)payload; // 正确转换网络字节序 unsigned short fragIndex = ntohs(fragment->fragment_index); unsigned short fragCount = ntohs(fragment->fragment_count); unsigned short totalSize = ntohs(fragment->total_size); // 首次收到分片时初始化缓冲区 if (fragmentCount == 0) { // 添加合理性检查 if (fragCount > 1000 || totalSize > 10 * 1024 * 1024) { printf("! 无效的分片参数: count=%d, size=%d\n", fragCount, totalSize); continue; } fragmentCount = fragCount; resultSize = totalSize; resultBuffer = (char*)malloc(resultSize + 1); if (!resultBuffer) { printf("内存分配失败\n"); break; } memset(resultBuffer, 0, resultSize + 1); printf("? 命令结果大小: %d字节, 分%d个包发送\n", resultSize, fragmentCount); } // 添加分片索引有效性检查 if (fragIndex < fragmentCount) { // 计算数据长度 int dataOffset = offsetof(UnifiedFragment, data); int fragDataLen = payloadLen - dataOffset; // 防止负长度 if (fragDataLen < 0) fragDataLen = 0; // 计算当前分片应复制的最大数据量 int maxFragmentSize = MAX_DATA_SIZE; int offset = fragIndex * maxFragmentSize; // 防止缓冲区溢出 if (offset + fragDataLen > resultSize) { fragDataLen = resultSize - offset; } if (fragDataLen > 0) { memcpy(resultBuffer + offset, fragment->data, fragDataLen); } receivedFragments++; #if DEBUG_MODE printf("? 收到分片 %d/%d (序列号: %d, 大小: %d字节)\n", fragIndex + 1, fragmentCount, recvSeq, fragDataLen); // 调试输出:打印前16字节HEX printf(" 分片头: "); for (int j = 0; j < 16; j++) { printf("%02X ", (unsigned char)payload[j]); } printf("\n"); #endif // 检查是否收到所有分片 if (receivedFragments >= fragmentCount) { printf("? 命令结果接收完成\n"); printf("命令结果:\n%.*s\n", resultSize, resultBuffer); free(resultBuffer); closesocket(rawSocket); return; } } else { #if DEBUG_MODE printf("! 无效分片索引: %d (最大: %d)\n", fragIndex, fragmentCount); #endif } } // 检查结束标记 else if (strncmp(payload, "CMD_FINISH", 10) == 0) { printf("? 收到结束标记\n"); if (resultBuffer && receivedFragments > 0) { printf("命令结果 (部分):\n%.*s\n", resultSize, resultBuffer); } break; } } } if (resultBuffer) { free(resultBuffer); } closesocket(rawSocket); if (receivedFragments > 0) { printf("? 收到部分结果 (%d/%d 分片)\n", receivedFragments, fragmentCount); } else { printf("? 未收到有效结果\n"); } } // 主函数 int main() { WSADATA wsaData; if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0) { printf("WSAStartup失败: %d\n", WSAGetLastError()); return 1; } printf("客户端\n"); // 获取本地IP地址 char hostname[256]; if (gethostname(hostname, sizeof(hostname))) { printf("获取主机名失败\n"); return 1; } struct hostent* host = gethostbyname(hostname); if (!host) { printf("获取IP地址失败\n"); return 1; } printf("可用的本地IP地址:\n"); for (int i = 0; host->h_addr_list[i]; i++) { struct in_addr addr; memcpy(&addr, host->h_addr_list[i], sizeof(struct in_addr)); printf(" - %s\n", inet_ntoa(addr)); } printf("客户端进程ID: %d\n\n", GetCurrentProcessId()); char serverIP[16] = { 0 }; char clientIP[16] = { 0 }; printf("服务器IP地址: "); scanf("%15s", serverIP); printf("客户端使用的源IP地址: "); scanf("%15s", clientIP); int seq = 1; char command[256] = { 0 }; while (1) { printf("\n输入命令 (exit退出): "); scanf(" %255[^\n]", command); // 限制输入长度防止溢出 if (strcmp(command, "exit") == 0) { break; } sendCommand(command, clientIP, serverIP, seq); receiveCommandResult(seq, clientIP); seq++; } WSACleanup(); return 0; }
08-21
package com.android.example.cameraappxjava; import android.Manifest; import android.content.ContentResolver; import android.content.ContentValues; import android.content.Intent; import android.content.pm.PackageManager; import android.graphics.ImageFormat; import android.graphics.SurfaceTexture; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CaptureFailure; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.params.StreamConfigurationMap; import android.media.Image; import android.media.ImageReader; import android.net.Uri; import android.opengl.GLSurfaceView; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.HandlerThread; import android.os.Looper; import android.os.SystemClock; import android.provider.MediaStore; import android.util.Log; import android.util.Size; import android.view.Surface; import android.widget.Button; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.appcompat.app.AppCompatActivity; import androidx.core.app.ActivityCompat; import com.android.example.cameraappxjava.util.CameraGLRenderer; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; /** * 调用自定义渲染器的Demo:模拟YUV预览渲染 */ public class MainActivity2 extends AppCompatActivity { private static final String TAG = "camera2api"; private static final int REQUEST_CAMERA_PERMISSION = 100; // 1. 删除 TextureView 相关变量 // private TextureView textureView; // private boolean isTextureAvailable = false; // 2. 新增 GLSurfaceView + 自定义渲染器 private GLSurfaceView glSurfaceView; private CameraGLRenderer cameraGLRenderer; // 之前定义的自定义YUV渲染器 // 3. 新增:预览用 ImageReader(接收 Camera2 输出的 YUV 帧,给渲染器用) private ImageReader previewImageReader; // 4. 保留原有拍照用 ImageReader(JPEG格式,不修改) private ImageReader captureImageReader; // 5. 保留其他原有变量(相机设备、会话、按钮等) private Button captureButton; private CameraDevice cameraDevice; private CameraCaptureSession cameraCaptureSession; private CaptureRequest.Builder captureRequestBuilder; private String cameraId; private Handler backgroundHandler; private boolean isSessionClosed; private HandlerThread backgroundThread; private CameraManager manager; private volatile boolean isCapturing = false; private StreamConfigurationMap map; private long lastClickTime = 0; private static final long MIN_CLICK_INTERVAL = 1000; private File file; private ContentResolver resolver; private ContentValues values; private Uri imageUri; // ---------------------- 第三步:修改 onCreate(初始化 GLSurfaceView 和渲染器) ---------------------- @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); Log.d(TAG, "onCreate ——————————————————————"); // 1. 初始化 GLSurfaceView(替换原 TextureView) glSurfaceView = findViewById(R.id.glsurfaceView); // 2. 初始化拍照按钮(保留原有逻辑) captureButton = findViewById(R.id.btnCapture); // 3. 配置 GLSurfaceView + 自定义渲染器(核心) initGLRenderer(); // 4. 初始化相机参数(保留原有逻辑,但后续需补充预览ImageReader) initCamera(); // 5. 保留拍照按钮监听(原有逻辑不变) captureButton.setOnClickListener(v -> { long currentTime = SystemClock.elapsedRealtime(); if (currentTime - lastClickTime > MIN_CLICK_INTERVAL) { lastClickTime = currentTime; takePicture(); } else { Log.d(TAG, "点击过快,已忽略"); } }); } // ---------------------- 新增:初始化 GLSurfaceView 和自定义渲染器 ---------------------- private void initGLRenderer() { // 1. 设置 OpenGL 版本(必须是 2.0,匹配渲染器着色器) glSurfaceView.setEGLContextClientVersion(2); // 2. 创建自定义渲染器实例 cameraGLRenderer = new CameraGLRenderer(); // 3. 绑定渲染器到 GLSurfaceView glSurfaceView.setRenderer(cameraGLRenderer); // 4. 按需渲染(有新帧才重绘,节省性能) glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); } // ---------------------- 第四步:修改 initCamera(新增预览用 ImageReader) ---------------------- private void initCamera() { Log.d(TAG, "initCamera: 初始化相机配置"); try { // 1. 保留原有逻辑:初始化 CameraManager、相机ID、配置Map manager = (CameraManager) getSystemService(CAMERA_SERVICE); cameraId = manager.getCameraIdList()[0]; CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { Log.e(TAG, "错误: StreamConfigurationMap为空!!"); return; } // 2. 新增:初始化预览用 ImageReader(YUV_420_888 格式,给渲染器传数据) // 2.1 获取相机支持的 YUV 预览尺寸(用原有的尺寸选择逻辑) Size[] yuvSizes = map.getOutputSizes(ImageFormat.YUV_420_888); Size previewSize = chooseOptimalSize(yuvSizes, glSurfaceView.getWidth(), glSurfaceView.getHeight()); // 2.2 创建 ImageReader(尺寸=预览尺寸,格式=YUV_420_888,缓冲区=2) previewImageReader = ImageReader.newInstance( previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2 ); // 2.3 设置 ImageReader 回调(关键:获取 YUV 帧,传给渲染器) previewImageReader.setOnImageAvailableListener(reader -> { try (Image image = reader.acquireLatestImage()) { if (image == null || cameraGLRenderer == null) return; // 2.5 把 YUV 数据传给渲染器,触发重绘 cameraGLRenderer.setYUVData(image); glSurfaceView.requestRender(); // 触发渲染器 onDrawFrame } catch (Exception e) { Log.e(TAG, "预览帧处理失败: " + e.getMessage()); } }, backgroundHandler); // 在相机后台线程执行 // 3. 保留原有逻辑:初始化拍照用 ImageReader(JPEG格式) Size[] jpegSizes = map.getOutputSizes(ImageFormat.JPEG); Size captureSize = chooseOptimalSize(jpegSizes,glSurfaceView.getWidth(),glSurfaceView.getHeight()); if (captureImageReader == null || captureImageReader.getWidth() != captureSize.getWidth()) { if (captureImageReader != null) captureImageReader.close(); captureImageReader = ImageReader.newInstance( captureSize.getWidth(), captureSize.getHeight(), ImageFormat.JPEG, 2 ); } // 4. 保留原有逻辑:图片保存参数 resolver = getContentResolver(); values = new ContentValues(); values.put(MediaStore.Images.Media.DISPLAY_NAME, "pic_" + System.currentTimeMillis() + ".jpg"); values.put(MediaStore.Images.Media.MIME_TYPE, "image/jpeg"); values.put(MediaStore.Images.Media.RELATIVE_PATH, Environment.DIRECTORY_PICTURES); } catch (CameraAccessException e) { Log.e(TAG, "相机访问异常: " + e.getMessage()); } catch (NullPointerException e) { Log.e(TAG, "NPE: " + e.getMessage()); } } // ---------------------- 新增:提取 Image Plane 数据的工具方法 ---------------------- private byte[] extractPlaneData(Image.Plane plane) { ByteBuffer buffer = plane.getBuffer(); byte[] data = new byte[buffer.remaining()]; buffer.get(data); return data; } // ---------------------- 第五步:修改 openCamera(删除 TextureView 检查) ---------------------- private void openCamera() { // 1. 删除原 TextureView 相关检查(替换为 ImageReader 检查) if (previewImageReader == null || backgroundHandler == null) { Log.w(TAG, "预览ImageReader未就绪,延迟打开相机,1000ms后重试"); backgroundHandler.postDelayed(this::openCamera, 1000); return; } Log.d(TAG, "openCamera: 尝试打开相机"); try { if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) { Log.i(TAG, "1.打开相机: " + cameraId); manager.openCamera(cameraId, stateCallback, backgroundHandler); } else { Log.w(TAG, "相机权限未授予"); } } catch (CameraAccessException e) { Log.e(TAG, "打开相机失败: " + e.getMessage()); } catch (SecurityException e) { Log.e(TAG, "安全异常: " + e.getMessage()); } } // ---------------------- 第六步:修改 createCameraPreviewSession(替换预览 Surface) ---------------------- private void createCameraPreviewSession() { if (cameraDevice == null || previewImageReader == null) { Log.e(TAG, "创建预览会话失败: 相机或预览ImageReader不可用"); return; } try { // 1. 新增:获取预览 ImageReader 的 Surface(Camera2 输出目标) Surface previewSurface = previewImageReader.getSurface(); // 2. 保留:获取拍照 ImageReader 的 Surface Surface captureSurface = captureImageReader.getSurface(); // 3. 配置双输出 Surface(预览 + 拍照,替换原 TextureView 的 Surface) List<Surface> outputSurfaces = new ArrayList<>(2); outputSurfaces.add(previewSurface); // 预览:ImageReader 的 Surface(给渲染器) outputSurfaces.add(captureSurface); // 拍照:原有 ImageReader 的 Surface // 4. 保留原有逻辑:创建会话 + 配置预览请求 cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession session) { Log.i(TAG, "2.2 预览会话配置成功"); cameraCaptureSession = session; try { // 配置预览请求(目标是预览 Surface) captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); captureRequestBuilder.addTarget(previewSurface); // 替换为 ImageReader 的 Surface // 保留原有自动对焦/闪光灯配置 captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); Log.i(TAG, "3.开始下发预览请求"); cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler); } catch (CameraAccessException e) { Log.e(TAG, "设置预览请求失败: " + e.getMessage()); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession session) { Log.e(TAG, "预览会话配置失败"); Toast.makeText(MainActivity2.this, "配置失败", Toast.LENGTH_SHORT).show(); } }, backgroundHandler); } catch (CameraAccessException e) { Log.e(TAG, "创建预览会话异常: " + e.getMessage()); } } // ---------------------- 第七步:修改生命周期方法(添加 GLSurfaceView 管理) ---------------------- @Override protected void onResume() { Log.d(TAG, "onResume —————————————————————— "); super.onResume(); // 1. 保留原有权限检查 if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { Log.i(TAG, "没有相机权限——>开始请求相机权限"); ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION); return; } // 2. 保留原有后台线程启动 startBackgroundThread(); // 3. 新增:恢复 GLSurfaceView(必须调用,否则渲染暂停) glSurfaceView.onResume(); // 4. 打开相机(替换原 TextureView 检查) openCamera(); } @Override protected void onPause() { super.onPause(); Log.d(TAG, "onPause ——————————————————————"); // 1. 新增:暂停 GLSurfaceView(必须调用,保存 OpenGL 上下文) glSurfaceView.onPause(); // 2. 保留原有预览暂停逻辑 if (!isCapturing && cameraCaptureSession != null) { try { cameraCaptureSession.stopRepeating(); Log.d(TAG, "onPause: 暂停预览重复请求(核心资源未释放)"); } catch (CameraAccessException e) { Log.e(TAG, "onPause: 停止预览失败", e); } } // 3. 保留原有拍照中延迟处理逻辑 if (isCapturing) { Log.w(TAG, "onPause: 拍照中,暂不处理预览暂停"); new Handler().postDelayed(() -> { if (!isCapturing && cameraCaptureSession != null) { try { cameraCaptureSession.stopRepeating(); Log.d(TAG, "onPause: 拍照完成后,暂停预览"); } catch (CameraAccessException e) { Log.e(TAG, "onPause: 延迟停止预览失败", e); } } }, 1000); } } @Override protected void onDestroy() { super.onDestroy(); Log.d(TAG, "onDestroy: Activity 彻底销毁,释放所有资源"); // 1. 新增:释放预览 ImageReader 和渲染器资源 if (previewImageReader != null) { previewImageReader.close(); } if (cameraGLRenderer != null) { cameraGLRenderer.release(); } // 2. 保留原有资源释放逻辑(相机、拍照ImageReader、线程等) if (cameraCaptureSession != null) { cameraCaptureSession.close(); cameraCaptureSession = null; } if (cameraDevice != null) { cameraDevice.close(); cameraDevice = null; } if (captureImageReader != null) { captureImageReader.close(); captureImageReader = null; } stopBackgroundThread(); // 3. 置空新增的引用 glSurfaceView = null; cameraGLRenderer = null; previewImageReader = null; // 4. 保留原有置空逻辑 captureButton = null; manager = null; resolver = null; values = null; imageUri = null; backgroundHandler = null; backgroundThread = null; Log.d(TAG, "onDestroy: 所有资源释放完成"); } private boolean checkTakePicture() { if (cameraDevice == null) { Log.w(TAG, "拍照失败: 相机未初始化"); return false; } // 1. 检查会话有效性 if (cameraCaptureSession == null) { Log.e(TAG, "拍照错误: CameraCaptureSession为空"); return false; } // 2. 检查后台Handler if (backgroundHandler == null) { Log.e(TAG, "拍照错误: backgroundHandler未初始化"); startBackgroundThread(); // 初始化方法见下方 return false; } if (isSessionClosed) { Log.e(TAG, "当前会话已关闭"); } return true; } // ---------------------- 第八步:修改 takePicture(替换拍照用 ImageReader) ---------------------- private void takePicture() { Log.i(TAG, "4.开始拍照流程——————————"); try { // 1. 保留原有检查逻辑 boolean checkFlag = checkTakePicture(); if (!checkFlag) { Log.i(TAG, "拍照流程————检查未通过!退出拍照!"); return; } // 2. 替换:拍照请求目标为 captureImageReader(原有 JPEG 格式) CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(captureImageReader.getSurface()); // 用拍照专用 ImageReader // 3. 保留原有拍照参数配置 captureBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); int rotation = getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, rotation); // 4. 保留原有拍照 ImageReader 回调(保存 JPEG 图片) captureImageReader.setOnImageAvailableListener(reader -> { Log.d(TAG, "拍照图像数据可用"); try (Image image = reader.acquireLatestImage()) { if (image != null) { // 保留原有文件创建和保存逻辑 file = new File( Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), "pic_" + System.currentTimeMillis() + ".jpg" ); // 提取 JPEG 数据(原有逻辑) Image.Plane[] planes = image.getPlanes(); ByteBuffer buffer = planes[0].getBuffer(); byte[] bytes = new byte[buffer.remaining()]; buffer.get(bytes); // 保存图片(原有逻辑) saveImage(bytes, file); // 保留原有广播和提示 Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE); mediaScanIntent.setData(Uri.fromFile(file)); sendBroadcast(mediaScanIntent); runOnUiThread(() -> Toast.makeText(MainActivity2.this, "保存至: " + file, Toast.LENGTH_SHORT).show() ); } } catch (Exception e) { Log.e(TAG, "保存拍照图像错误: " + e.getMessage()); } finally { isCapturing = false; // 恢复预览(重新下发预览请求) if (cameraCaptureSession != null && captureRequestBuilder != null) { try { cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler); } catch (CameraAccessException e) { Log.e(TAG, "恢复预览失败: " + e.getMessage()); } } } }, backgroundHandler); // 5. 保留原有拍照执行逻辑 Log.d(TAG, "停止预览"); cameraCaptureSession.stopRepeating(); Log.d(TAG, "4.下发拍照"); isCapturing = true; cameraCaptureSession.capture(captureBuilder.build(), new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) { super.onCaptureFailed(session, request, failure); Log.e(TAG, "拍照失败: " + failure.getReason()); isCapturing = false; } }, backgroundHandler); } catch (CameraAccessException | IllegalStateException | SecurityException e) { Log.e(TAG, "拍照过程异常: " + e.getClass().getSimpleName(), e); isCapturing = false; } } // ---------------------- 保留原有未修改的方法 ---------------------- // (包括:chooseOptimalSize、CompareSizesByArea、stateCallback、saveImage、onRequestPermissionsResult、startBackgroundThread、stopBackgroundThread、closeCamera、checkTakePicture) static class CompareSizesByArea implements Comparator<Size> { @Override public int compare(Size lhs, Size rhs) { return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); } } private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice camera) { Log.i(TAG, "相机已打开"); cameraDevice = camera; Log.i(TAG, "2.1 开始配置预览流"); createCameraPreviewSession(); } @Override public void onDisconnected(@NonNull CameraDevice camera) { Log.w(TAG, "相机断开连接"); cameraDevice.close(); } @Override public void onError(@NonNull CameraDevice camera, int error) { Log.e(TAG, "相机错误: " + error); cameraDevice.close(); cameraDevice = null; } }; private void saveImage(byte[] bytes, File file) { Log.d(TAG, "保存图像: " + file.getAbsolutePath()); imageUri = resolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values); if (imageUri != null) { try (FileOutputStream output = new FileOutputStream(file)) { output.write(bytes); Log.i(TAG, "图像保存成功, 大小: " + bytes.length + " bytes"); } catch (IOException e) { Log.e(TAG, "保存文件失败: " + e.getMessage()); } } } //触发时机:用户点击授权后调用 @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); Log.d(TAG, "权限请求结果: " + requestCode); if (requestCode == REQUEST_CAMERA_PERMISSION) { if (grantResults[0] == PackageManager.PERMISSION_DENIED) { Log.w(TAG, "用户拒绝相机权限"); Toast.makeText(this, "需要相机权限", Toast.LENGTH_SHORT).show(); finish(); } else { Log.i(TAG, "用户授予相机权限"); startBackgroundThread(); openCamera(); } } } private void stopBackgroundThread() { if (backgroundThread != null) { Log.d(TAG, "停止后台线程"); backgroundThread.quitSafely(); try { backgroundThread.join(); backgroundThread = null; backgroundHandler = null; } catch (InterruptedException e) { Log.e(TAG, "停止线程失败: " + e.getMessage()); } } } private void startBackgroundThread() { if (backgroundThread == null) { backgroundThread = new HandlerThread("CameraBackground"); backgroundThread.start(); backgroundHandler = new Handler(backgroundThread.getLooper()); Log.d(TAG, "后台线程启动"); } } private void closeCamera() { Log.d(TAG, "关闭相机资源"); if (isCapturing) { Log.w(TAG, "正在拍照中,等待完成或取消..."); // 可以尝试等待一段时间或取消请求 try { cameraCaptureSession.abortCaptures(); // 取消所有进行中的捕获 } catch (CameraAccessException e) { throw new RuntimeException(e); } } if (cameraCaptureSession != null) { cameraCaptureSession.close(); cameraCaptureSession = null; } isSessionClosed = true; } private Size chooseOptimalSize(Size[] choices, int width, int height) { List<Size> bigEnough = new ArrayList<>(); for (Size option : choices) { float ratio = (float) option.getWidth() / option.getHeight(); float viewRatio = (float) width / height; if (Math.abs(ratio - viewRatio) <= 0.1 && option.getWidth() <= width && option.getHeight() <= height) { bigEnough.add(option); } } if (!bigEnough.isEmpty()) { return Collections.max(bigEnough, new CompareSizesByArea()); } Log.w(TAG, "未找到完美匹配尺寸,使用默认"); return choices[0]; } }package com.android.example.cameraappxjava.util; import android.graphics.ImageFormat; import android.media.Image; import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.util.Log; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; /** * 无错误版 GLES 2.0 相机渲染器:处理 YUV_420_888 预览,彻底兼容低版本 * 核心优化:移除所有 GLES 2.0 不支持的 API,手动管理纹理尺寸 */ public class CameraGLRenderer implements GLSurfaceView.Renderer { private static final String TAG = "CameraGLRenderer"; private static final int TEXTURE_COUNT = 3; // Y/U/V 3个纹理(GLES 2.0 支持) // -------------------------- 1. GLES 2.0 兼容配置(无任何不支持API) -------------------------- /** * 顶点着色器(GLES 2.0 标准语法,必加精度声明) */ private static final String VERTEX_SHADER = "attribute vec4 vPosition;\n" + // 顶点坐标(输入) "attribute vec2 vTexCoord;\n" + // 纹理坐标(输入) "varying vec2 texCoord;\n" + // 传递纹理坐标到片段着色器 "void main() {\n" + " gl_Position = vPosition;\n" + // 全屏顶点位置(-1~1 覆盖屏幕) " texCoord = vTexCoord;\n" + // 传递纹理坐标 "}"; /** * 片段着色器(GLES 2.0 兼容:用 GL_LUMINANCE 单通道格式,无 GL_RED) */ private static final String FRAGMENT_SHADER = "precision mediump float;\n" + // GLES 2.0 必须声明精度(中等精度平衡性能) "varying vec2 texCoord;\n" + // 从顶点着色器接收的纹理坐标 "uniform sampler2D yTex;\n" + // Y通道纹理采样器(纹理单元0) "uniform sampler2D uTex;\n" + // U通道纹理采样器(纹理单元1) "uniform sampler2D vTex;\n" + // V通道纹理采样器(纹理单元2) "void main() {\n" + // GLES 2.0 兼容:读取 GL_LUMINANCE 纹理的 r 通道(亮度值) " float y = texture2D(yTex, texCoord).r;\n" + " float u = texture2D(uTex, texCoord).r - 0.5;\n" + // U/V 偏移 0.5(YUV 标准) " float v = texture2D(vTex, texCoord).r - 0.5;\n" + // BT.601 YUV转RGB 公式(手机相机通用,避免偏色) " float r = y + 1.402 * v;\n" + " float g = y - 0.34414 * u - 0.71414 * v;\n" + " float b = y + 1.772 * u;\n" + // 限制 RGB 范围 0~1(避免颜色溢出,GLES 2.0 支持 clamp 函数) " r = clamp(r, 0.0, 1.0);\n" + " g = clamp(g, 0.0, 1.0);\n" + " b = clamp(b, 0.0, 1.0);\n" + " gl_FragColor = vec4(r, g, b, 1.0);\n" + // 输出 RGB 颜色(不透明) "}"; /** * 全屏顶点坐标(GLES 2.0 标准坐标,顺序:左上→左下→右上→右下) */ private static final float[] VERTEX_COORDS = { -1.0f, 1.0f, 0.0f, // 左上 -1.0f, -1.0f, 0.0f, // 左下 1.0f, 1.0f, 0.0f, // 右上 1.0f, -1.0f, 0.0f // 右下 }; /** * 纹理坐标(GLES 2.0 兼容,适配竖屏预览,解决画面颠倒) * 映射规则:纹理坐标 → 屏幕坐标(确保竖屏显示正常) */ private static final float[] TEX_COORDS = { 0.0f, 1.0f, // 纹理左上 → 屏幕左上 1.0f, 1.0f, // 纹理左下 → 屏幕左下 0.0f, 0.0f, // 纹理右上 → 屏幕右上 1.0f, 0.0f // 纹理右下 → 屏幕右下 }; // -------------------------- 2. 动态变量(新增:手动记录纹理尺寸,替代GL查询) -------------------------- private int mShaderProgram; // GLES 2.0 着色器程序ID private int[] mTextureIds = new int[TEXTURE_COUNT]; // Y/U/V 纹理ID(GPU资源) private FloatBuffer mVertexBuffer; // 顶点坐标缓冲区(GLES 2.0 要求Buffer格式) private FloatBuffer mTexBuffer; // 纹理坐标缓冲区(GLES 2.0 要求Buffer格式) private int mViewWidth, mViewHeight; // GLSurfaceView 宽高(渲染视口尺寸) // 关键:手动记录 Y/U/V 纹理的宽高(替代 GLES 2.0 不支持的 glGetTexLevelParameteriv) private int mYTexWidth = 0, mYTexHeight = 0; // Y纹理尺寸 private int mUTexWidth = 0, mUTexHeight = 0; // U纹理尺寸(Y的1/2) private int mVTexWidth = 0, mVTexHeight = 0; // V纹理尺寸(Y的1/2) // YUV 数据线程安全管理(避免相机线程与渲染线程竞争) private final Object mYuvLock = new Object(); private Image mPendingImage; // 待处理的相机Image(从Camera2接收) private byte[] mYData, mUData, mVData; // 提取后的 Y/U/V 字节数据 private int mYuvWidth, mYuvHeight; // 相机输出的 YUV 帧宽高 // -------------------------- 3. 对外接口(无修改,直接复用) -------------------------- /** * 设置相机预览Image(线程安全,GLES 2.0/3.0 通用) * * @param image 相机输出的 YUV_420_888 格式Image(必须关闭,避免内存泄漏) */ public void setYUVData(Image image) { if (image == null || image.getFormat() != ImageFormat.YUV_420_888) { Log.w(TAG, "无效Image:格式非 YUV_420_888 或 Image为空"); if (image != null) image.close(); // 必须关闭,避免相机缓冲区泄漏 return; } synchronized (mYuvLock) { // 先关闭之前未处理的Image(防止缓冲区堆积导致卡顿) if (mPendingImage != null) { mPendingImage.close(); Log.d(TAG, "关闭未处理的PendingImage,避免内存泄漏"); } mPendingImage = image; // 存储新的待处理Image } } /** * 释放所有资源(Activity/Fragment 销毁时调用,避免内存泄漏) */ public void release() { synchronized (mYuvLock) { // 1. 关闭待处理的Image if (mPendingImage != null) { mPendingImage.close(); mPendingImage = null; } // 2. 释放CPU端 YUV 数据 mYData = null; mUData = null; mVData = null; mYuvWidth = 0; mYuvHeight = 0; // 3. 重置手动记录的纹理尺寸 mYTexWidth = mYTexHeight = 0; mUTexWidth = mUTexHeight = 0; mVTexWidth = mVTexHeight = 0; } // 4. 释放 GLES 2.0 GPU 资源(纹理+着色器程序) if (mTextureIds != null) { GLES20.glDeleteTextures(TEXTURE_COUNT, mTextureIds, 0); mTextureIds = null; } if (mShaderProgram != 0) { GLES20.glDeleteProgram(mShaderProgram); mShaderProgram = 0; } // 5. 释放缓冲区(帮助GC回收) mVertexBuffer = null; mTexBuffer = null; Log.d(TAG, "所有资源释放完成(GLES 2.0 兼容)"); } // -------------------------- 4. GLES 2.0 生命周期回调(无任何不支持API) -------------------------- /** * 初始化回调:GLSurfaceView 首次创建时调用(仅1次) * 作用:初始化OpenGL环境、编译着色器、创建纹理、准备坐标缓冲区 */ @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { Log.d(TAG, "onSurfaceCreated(GLES 2.0):初始化OpenGL环境"); // GLES 2.0 基础配置:禁用混合(避免透明层干扰预览)、黑色背景 GLES20.glDisable(GLES20.GL_BLEND); GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // 准备坐标缓冲区(GLES 2.0 仅支持 Buffer 格式,不支持直接用数组) mVertexBuffer = createFloatBuffer(VERTEX_COORDS); mTexBuffer = createFloatBuffer(TEX_COORDS); // 编译 GLES 2.0 着色器程序(创建渲染"画笔") mShaderProgram = compileShaderProgram(VERTEX_SHADER, FRAGMENT_SHADER); if (mShaderProgram == 0) { Log.e(TAG, "着色器程序创建失败(GLES 2.0),预览不可用"); return; } // 创建 Y/U/V 3个纹理(GLES 2.0 2D纹理),配置基础参数 GLES20.glGenTextures(TEXTURE_COUNT, mTextureIds, 0); initTexture(mTextureIds[0]); // 初始化 Y 纹理 initTexture(mTextureIds[1]); // 初始化 U 纹理 initTexture(mTextureIds[2]); // 初始化 V 纹理 Log.d(TAG, "GLES 2.0 初始化完成,纹理ID:Y=" + mTextureIds[0] + ", U=" + mTextureIds[1] + ", V=" + mTextureIds[2]); } /** * 尺寸变化回调:GLSurfaceView 宽高改变时调用(如屏幕旋转) * 作用:设置渲染视口(画面显示范围),确保全屏渲染 */ @Override public void onSurfaceChanged(GL10 gl, int width, int height) { mViewWidth = width; mViewHeight = height; // GLES 2.0 设置视口:渲染范围 = GLSurfaceView 全屏(左上角(0,0),宽高=View宽高) GLES20.glViewport(0, 0, width, height); Log.d(TAG, "onSurfaceChanged(GLES 2.0):视口尺寸=" + width + "x" + height); } /** * 帧渲染回调:每帧调用1次(渲染线程执行,核心渲染逻辑) * 流程:处理待处理Image → 上传YUV数据到纹理 → 绑定着色器 → 执行渲染 */ @Override public void onDrawFrame(GL10 gl) { // 1. 处理待处理的Image(线程安全,提取Y/U/V数据) boolean hasNewData = processPendingImage(); if (!hasNewData) { // 无新数据:清除屏幕为黑色,避免显示上一帧残留 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); return; } // 2. 清除上一帧画面(避免画面重叠) GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); // 3. 激活 GLES 2.0 着色器程序(使用"画笔") GLES20.glUseProgram(mShaderProgram); // 4. 上传 Y/U/V 数据到对应纹理(手动判断纹理尺寸,替代GL查询) uploadTexture(mTextureIds[0], mYData, mYuvWidth, mYuvHeight, true); // Y纹理 uploadTexture(mTextureIds[1], mUData, mYuvWidth / 2, mYuvHeight / 2, false); // U纹理(1/2尺寸) uploadTexture(mTextureIds[2], mVData, mYuvWidth / 2, mYuvHeight / 2, false); // V纹理(1/2尺寸) // 5. 绑定纹理到着色器采样器(让"画笔"找到"画布") bindTextureToSampler(); // 6. 传递顶点/纹理坐标(告诉"画笔"画在哪里) passVertexAndTexCoord(); // 7. 执行渲染:GLES 2.0 支持 GL_TRIANGLE_STRIP,4个顶点画2个三角形覆盖全屏 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, VERTEX_COORDS.length / 3); // 8. 禁用顶点/纹理坐标输入(避免后续渲染干扰) int vPositionLoc = GLES20.glGetAttribLocation(mShaderProgram, "vPosition"); int vTexCoordLoc = GLES20.glGetAttribLocation(mShaderProgram, "vTexCoord"); GLES20.glDisableVertexAttribArray(vPositionLoc); GLES20.glDisableVertexAttribArray(vTexCoordLoc); } private void passVertexAndTexCoord() { int vPositionLoc = GLES20.glGetAttribLocation(mShaderProgram, "vPosition"); GLES20.glEnableVertexAttribArray(vPositionLoc); GLES20.glVertexAttribPointer( vPositionLoc, 3, GLES20.GL_FLOAT, false, 3 * 4, mVertexBuffer ); int vTexCoordLoc = GLES20.glGetAttribLocation(mShaderProgram, "vTexCoord"); GLES20.glEnableVertexAttribArray(vTexCoordLoc); GLES20.glVertexAttribPointer( vTexCoordLoc, 2, GLES20.GL_FLOAT, false, 2 * 4, mTexBuffer ); } // -------------------------- 5. GLES 2.0 辅助方法(无任何不支持API) -------------------------- /** * 创建 FloatBuffer:将 Java float 数组转为 GLES 2.0 支持的 Buffer 格式 * * @param array 原始 float 数组(顶点/纹理坐标) * @return GLES 2.0 可识别的 FloatBuffer */ private FloatBuffer createFloatBuffer(float[] array) { if (array == null || array.length == 0) return null; // 1. 分配直接内存(避免JVM GC移动,提升OpenGL访问效率) ByteBuffer byteBuffer = ByteBuffer.allocateDirect(array.length * 4); // 1float=4字节 // 2. 设置字节序(必须与硬件一致,否则数据错乱) byteBuffer.order(ByteOrder.nativeOrder()); // 3. 转换为 FloatBuffer 并写入数据 FloatBuffer floatBuffer = byteBuffer.asFloatBuffer(); floatBuffer.put(array); // 4. 重置读指针(从缓冲区开头开始读取) floatBuffer.position(0); return floatBuffer; } /** * 编译 GLES 2.0 着色器程序:编译顶点+片段着色器,链接为可执行程序 * * @param vertexCode 顶点着色器代码 * @param fragmentCode 片段着色器代码 * @return 着色器程序ID(0 表示失败) */ private int compileShaderProgram(String vertexCode, String fragmentCode) { // 1. 编译顶点着色器(GLES 2.0) int vertexShader = compileSingleShader(GLES20.GL_VERTEX_SHADER, vertexCode); if (vertexShader == 0) return 0; // 2. 编译片段着色器(GLES 2.0) int fragmentShader = compileSingleShader(GLES20.GL_FRAGMENT_SHADER, fragmentCode); if (fragmentShader == 0) { GLES20.glDeleteShader(vertexShader); // 清理已编译的顶点着色器 return 0; } // 3. 链接着色器程序(GLES 2.0) int program = GLES20.glCreateProgram(); GLES20.glAttachShader(program, vertexShader); // 绑定顶点着色器 GLES20.glAttachShader(program, fragmentShader); // 绑定片段着色器 GLES20.glLinkProgram(program); // 执行链接 // 4. 检查链接结果(GLES 2.0) int[] linkStatus = new int[1]; GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); if (linkStatus[0] != GLES20.GL_TRUE) { Log.e(TAG, "着色器链接失败(GLES 2.0):" + GLES20.glGetProgramInfoLog(program)); GLES20.glDeleteProgram(program); // 清理无效程序 program = 0; } // 5. 清理中间着色器(程序已链接,单个着色器可删除) GLES20.glDeleteShader(vertexShader); GLES20.glDeleteShader(fragmentShader); return program; } /** * 编译单个 GLES 2.0 着色器:编译顶点/片段着色器代码 * * @param shaderType 着色器类型(GL_VERTEX_SHADER / GL_FRAGMENT_SHADER) * @param shaderCode 着色器代码 * @return 着色器ID(0 表示失败) */ private int compileSingleShader(int shaderType, String shaderCode) { // 1. 创建着色器对象(GLES 2.0) int shader = GLES20.glCreateShader(shaderType); if (shader == 0) { Log.e(TAG, "创建着色器失败(GLES 2.0),类型=" + (shaderType == GLES20.GL_VERTEX_SHADER ? "顶点" : "片段")); return 0; } // 2. 绑定着色器代码并编译(GLES 2.0) GLES20.glShaderSource(shader, shaderCode); GLES20.glCompileShader(shader); // 3. 检查编译结果(GLES 2.0) int[] compileStatus = new int[1]; GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0); if (compileStatus[0] != GLES20.GL_TRUE) { Log.e(TAG, (shaderType == GLES20.GL_VERTEX_SHADER ? "顶点" : "片段") + "着色器编译失败(GLES 2.0):" + GLES20.glGetShaderInfoLog(shader)); GLES20.glDeleteShader(shader); // 清理无效着色器 shader = 0; } return shader; } /** * 初始化 GLES 2.0 纹理参数:配置过滤、边缘处理,确保画面清晰无重复 * * @param textureId 纹理ID(Y/U/V 纹理) */ private void initTexture(int textureId) { if (textureId == 0) return; // 绑定纹理(选中GPU"画布",GLES 2.0 必须先绑定再配置) GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); // 1. 纹理过滤:缩小时线性过滤(画面平滑,避免锯齿) GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); // 2. 纹理过滤:放大时线性过滤(画面平滑,避免像素块) GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); // 3. 纹理边缘:水平方向超出范围时"夹紧"(不重复显示,避免边缘错乱) GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); // 4. 纹理边缘:垂直方向超出范围时"夹紧" GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); // 解绑纹理(避免后续误操作其他纹理) GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); } /** * 处理待处理Image:从 mPendingImage 提取 Y/U/V 数据(线程安全) * * @return true=有新数据,false=无新数据 */ private boolean processPendingImage() { Image image = null; synchronized (mYuvLock) { if (mPendingImage == null) { return false; // 无待处理数据 } // 取出待处理Image(释放锁,避免长时间占用) image = mPendingImage; mPendingImage = null; } try { // 1. 提取Image的宽高和Planes(YUV_420_888 格式固定3个Planes) mYuvWidth = image.getWidth(); mYuvHeight = image.getHeight(); Image.Plane[] planes = image.getPlanes(); if (planes.length < 3) { Log.e(TAG, "Image Planes 数量不足3,无法提取 YUV 数据"); return false; } // 2. 提取 Y 通道数据(Plane[0]:Y通道,无交错) ByteBuffer yBuffer = planes[0].getBuffer(); mYData = byteBufferToByteArray(yBuffer); // 3. 提取 U/V 通道数据(区分 Semi-Planar 和 Planar 模式) if (planes[1].getPixelStride() == 2) { // 模式1:Semi-Planar(UV 交错存储在 Plane[1],Plane[2] 无数据) ByteBuffer uvBuffer = planes[1].getBuffer(); int uvLength = uvBuffer.remaining() / 2; // UV 总长度 = Y 长度 / 2 mUData = new byte[uvLength]; mVData = new byte[uvLength]; // 提取 U(偶数索引)和 V(奇数索引) for (int i = 0; i < uvLength; i++) { mUData[i] = uvBuffer.get(i * 2); // U:第0、2、4...字节 mVData[i] = uvBuffer.get(i * 2 + 1); // V:第1、3、5...字节 } } else { // 模式2:Planar(UV 分别存储在 Plane[1] 和 Plane[2],无交错) ByteBuffer uBuffer = planes[1].getBuffer(); ByteBuffer vBuffer = planes[2].getBuffer(); mUData = byteBufferToByteArray(uBuffer); mVData = byteBufferToByteArray(vBuffer); } // 4. 验证 YUV 数据长度(避免后续渲染错误) int expectedYLength = mYuvWidth * mYuvHeight; int expectedUVLength = (mYuvWidth / 2) * (mYuvHeight / 2); if (mYData.length != expectedYLength || mUData.length != expectedUVLength || mVData.length != expectedUVLength) { Log.w(TAG, "YUV 数据长度不匹配,重置为正确长度"); mYData = new byte[expectedYLength]; mUData = new byte[expectedUVLength]; mVData = new byte[expectedUVLength]; return false; } Log.d(TAG, "处理 Image 完成(GLES 2.0):YUV 尺寸=" + mYuvWidth + "x" + mYuvHeight + ",数据长度 Y=" + mYData.length + ", U=" + mUData.length); return true; } catch (Exception e) { Log.e(TAG, "处理 Image 异常(GLES 2.0):" + e.getMessage(), e); return false; } finally { // 必须关闭 Image(释放相机缓冲区,避免卡顿的核心!) if (image != null) { image.close(); } } } private byte[] byteBufferToByteArray(ByteBuffer buffer) { if (buffer == null | buffer.remaining() == 0) return new byte[0]; int originalPos = buffer.position(); byte[] data = new byte[buffer.remaining()]; buffer.get(data); buffer.position(originalPos); return data; } /** * 上传数据到 GLES 2.0 纹理(核心:手动记录纹理尺寸,替代 GL 查询) * * @param textureId 纹理ID * @param data 待上传的字节数据(Y/U/V) * @param width 纹理宽度 * @param height 纹理高度 * @param isYTexture 是否为 Y 纹理(用于区分尺寸记录变量) */ private void uploadTexture(int textureId, byte[] data, int width, int height, boolean isYTexture) { if (textureId == 0 || data == null || width <= 0 || height <= 0) { Log.w(TAG, "上传纹理参数无效(GLES 2.0):textureId=" + textureId + ", width=" + width + ", height=" + height); return; } // 绑定纹理(GLES 2.0 必须先绑定再操作) GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); // 关键:设置像素对齐为 1(YUV 数据无字节对齐,避免数据错位) GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); // 手动判断纹理是否已创建(替代 GLES 2.0 不支持的 glGetTexLevelParameteriv) boolean isTextureCreated = false; if (isYTexture) { isTextureCreated = (mYTexWidth == width && mYTexHeight == height); } else { // U/V 纹理尺寸相同,共用一套判断 isTextureCreated = (mUTexWidth == width && mUTexHeight == height); } ByteBuffer dataBuffer = ByteBuffer.wrap(data); if (!isTextureCreated) { // 首次创建纹理:调用 glTexImage2D(分配GPU内存) GLES20.glTexImage2D( GLES20.GL_TEXTURE_2D, 0, // 2D纹理,基础层级(固定为0) GLES20.GL_LUMINANCE, // GLES 2.0 核心:单通道亮度格式 width, height, 0, // 纹理宽高,边界宽度(必须为0) GLES20.GL_LUMINANCE, // 数据格式:与内部格式一致 GLES20.GL_UNSIGNED_BYTE, // 数据类型:无符号字节(YUV 数据类型) dataBuffer // 待上传的 Y/U/V 数据 ); // 更新手动记录的纹理尺寸(下次判断用) if (isYTexture) { mYTexWidth = width; mYTexHeight = height; Log.d(TAG, "创建 Y 纹理(GLES 2.0):尺寸=" + width + "x" + height); } else { mUTexWidth = width; mUTexHeight = height; Log.d(TAG, "创建 U/V 纹理(GLES 2.0):尺寸=" + width + "x" + height); } } else { // 复用纹理:调用 glTexSubImage2D(仅更新数据,不重新分配GPU内存,效率更高) GLES20.glTexSubImage2D( GLES20.GL_TEXTURE_2D, 0, // 2D纹理,基础层级 0, 0, // 数据起始坐标(x=0, y=0,全屏更新) width, height, // 数据宽高(与纹理尺寸一致) GLES20.GL_LUMINANCE, // 数据格式:与创建时一致 GLES20.GL_UNSIGNED_BYTE, // 数据类型:与创建时一致 dataBuffer // 待更新的 Y/U/V 数据 ); } // 解绑纹理(避免后续误操作) GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); } /** * 绑定纹理到 GLES 2.0 着色器采样器:将 Y/U/V 纹理与着色器的 uniform 变量关联 */ private void bindTextureToSampler() { // 1. 绑定 Y 纹理到采样器 yTex(纹理单元0) GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // 激活纹理单元0(GLES 2.0 必须先激活) GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[0]); // 绑定 Y 纹理 // 关联采样器:将纹理单元0 与 着色器的 yTex 变量绑定 int yTexLoc = GLES20.glGetUniformLocation(mShaderProgram, "yTex"); GLES20.glUniform1i(yTexLoc, 0); // 2. 绑定 U 纹理到采样器 uTex(纹理单元1) GLES20.glActiveTexture(GLES20.GL_TEXTURE1); // 激活纹理单元1 GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[1]); // 绑定 U 纹理 int uTexLoc = GLES20.glGetUniformLocation(mShaderProgram, "uTex"); GLES20.glUniform1i(uTexLoc, 1); // 3. 绑定 V 纹理到采样器 vTex(纹理单元2) GLES20.glActiveTexture(GLES20.GL_TEXTURE2); //激活纹理单元2 GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[2]); int vTexLoc = GLES20.glGetUniformLocation(mShaderProgram, "vTex"); GLES20.glUniform1i(vTexLoc, 2); // 添加错误检查 if (yTexLoc == -1 || uTexLoc == -1 || vTexLoc == -1) { Log.e(TAG, "纹理采样器绑定失败: " + "yTex=" + yTexLoc + " uTex=" + uTexLoc + " vTex=" + vTexLoc); } } } 能够正常存储图片,但是预览黑屏不能显示
09-21
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值