一.GLSurfaceView
GLSurfaceView 继承自 SurfaceView。相比 SurfaceView,它加入了 EGL 的管理,并自带了渲染线程。另外它定义了用户需要实现的 Renderer 接口,客户端只需要将实现了渲染函数的 Renderer 的实现类设置给 GLSurfaceView 即可(策略模式)。
二.Camera开发流程
我们选择将 Camera 和 View 分开,Camera 的相关操作由 CameraProxy 类完成,而 View 持有一个 CameraProxy 对象。这样 CameraProxy 也是可以重复利用的。
1. 打开相机
打开相机需要传入一个 cameraId,旧的 API 中只有 CAMERA_FACING_BACK 和 CAMERA_FACING_FRONT 两个值可以选择,返回一个 Camera 对象。
//打开相机
public void openCamera() {
mCamera = Camera.open(mCameraId);//打开相机操作
Camera.getCameraInfo(mCameraId, mCameraInfo);//获取相机信息
initConfig();//参数设置
setDisplayOrientation();//设置预览角度(方向)
mOrientationEventListener.enable();
}
2. 初始化相机配置
在旧 Camera API 中,相机的配置都是通过 Parameters 类完成。
我们可以设置 闪光模式、聚焦模式、曝光强度、预览图片格式和大小、拍照图片格式和大小 等等信息。
private void initConfig() {
try {
mParameters = mCamera.getParameters();
// 如果摄像头不支持这些参数都会出错的,所以设置的时候一定要判断是否支持
List<String> supportedFlashModes = mParameters.getSupportedFlashModes();
if (supportedFlashModes != null && supportedFlashModes.contains(Parameters.FLASH_MODE_OFF)) {
mParameters.setFlashMode(Parameters.FLASH_MODE_OFF); // 设置闪光模式(关闭)
}
List<String> supportedFocusModes = mParameters.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Parameters.FOCUS_MODE_AUTO)) {
mParameters.setFocusMode(Parameters.FOCUS_MODE_AUTO); // 设置聚焦模式(自动)
}
mParameters.setPreviewFormat(ImageFormat.NV21); // 设置预览图片格式
mParameters.setPictureFormat(ImageFormat.JPEG); // 设置拍照图片格式
mParameters.setExposureCompensation(0); // 设置曝光强度
//获取合适的 预览 尺寸
Size previewSize = getSuitableSize(mParameters.getSupportedPreviewSizes());
mPreviewWidth = previewSize.width;
mPreviewHeight = previewSize.height;
mParameters.setPreviewSize(mPreviewWidth, mPreviewHeight); // 设置预览图片大小
Log.d(TAG, "previewWidth: " + mPreviewWidth + ", previewHeight: " + mPreviewHeight);
//获取合适的 保存图片 尺寸
Size pictureSize = getSuitableSize(mParameters.getSupportedPictureSizes());
mParameters.setPictureSize(pictureSize.width, pictureSize.height);
Log.d(TAG, "pictureWidth: " + pictureSize.width + ", pictureHeight: " + pictureSize.height);
mCamera.setParameters(mParameters); // 将设置好的parameters添加到相机里
} catch (Exception e) {
e.printStackTrace();
}
}
这里用到的一个 getSuitableSize 方法获取合数的 预览/拍照 尺寸。
//获取最接近的尺寸
private Size getSuitableSize(List<Size> sizes) {
int minDelta = Integer.MAX_VALUE; // 记录最小的差值
int index = 0; // 最小的差值对应的索引坐标
for (int i = 0; i < sizes.size(); i++) {
Size size = sizes.get(i);
Log.v(TAG, "SupportedSize, width: " + size.width + ", height: " + size.height);
// 先判断比例是否相等
if (size.width * mPreviewScale == size.height) {
int delta = Math.abs(mPreviewWidth - size.width);
if (delta == 0) {//完全相同
return size;
}
if (minDelta > delta) {//寻找差值最小
minDelta = delta;
index = i;
}
}
}
return sizes.get(index);
}
3. 设置相机预览时的显示方向
这个方法很重要,关乎着你预览画面是否正常。其实相机底层的预览画面全都是宽度大于高度的,但是竖屏时画面要显示正常,都是通过这个方法设置了一定的显示方向。
private void setDisplayOrientation() {
int rotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
//前置摄像头
if (mCameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (mCameraInfo.orientation + degrees) % 360;
result = (360 - result) % 360; // 前置 镜像
} else { // 后置摄像头
result = (mCameraInfo.orientation - degrees + 360) % 360;
}
mCamera.setDisplayOrientation(result);
}
4. 开始预览、停止预览
需要自己新建一个 SurfaceTexture
//开始预览
public void startPreview(SurfaceTexture surface) {
if (mCamera != null) {
try {
mCamera.setPreviewTexture(surface);//先设置预览纹理,才能预览
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
}
}
//停止预览
public void stopPreview() {
if (mCamera != null) {
mCamera.stopPreview();
}
}
5. 释放相机
相机是很耗费系统资源的东西,用完一定要释放。对应于 openCamera 。
public void releaseCamera() {
if (mCamera != null) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
6. 点击聚焦
就是根据用户在 view 上的触摸点,使相机对该点进行一次对焦操作。
7. 双指放大缩小
我们也只实现放大缩小的逻辑,至于 View 的触摸交给 View 类去完成。
public void handleZoom(boolean isZoomIn) {
if (mParameters.isZoomSupported()) { // 首先还是要判断是否支持缩放
int maxZoom = mParameters.getMaxZoom();
int zoom = mParameters.getZoom();
if (isZoomIn && zoom < maxZoom) {
zoom++;
} else if (zoom > 0) {
zoom--;
}
mParameters.setZoom(zoom); // 通过这个方法设置放大缩小
mCamera.setParameters(mParameters);
} else {
Log.w(TAG, "zoom not supported");
}
}
8. 拍照
拍照的逻辑交给上层去完成,这里我们只是简单的封装了一下元接口,一般常用的是 Camera.PictureCallback,会返回可用的 jpeg 给我们。
public void takePicture(Camera.PictureCallback pictureCallback) {
mCamera.takePicture(null, null, pictureCallback);
}
9. 其它
诸如设置预览回调、切换前后摄像头的操作等直接看下面完整的实现。
10. CameraProxy 类
下面代码还用到了 OrientationEventListener,是通过传感器来获取当前手机的方向的,用于 拍照 的时候设置图片的选择使用。
public class CameraProxy implements Camera.AutoFocusCallback {
private static final String TAG = "CameraProxy";
private Activity mActivity;
private Camera mCamera;
private Parameters mParameters;//相机支持属性参数
private CameraInfo mCameraInfo = new CameraInfo();//相机信息(前置后置,角度等)
private int mCameraId = CameraInfo.CAMERA_FACING_BACK;//id表示后置还是前置摄像头
private int mPreviewWidth = 1440; // default 宽 1440
private int mPreviewHeight = 1080; // default 高 1080
private float mPreviewScale = mPreviewHeight * 1f / mPreviewWidth;//预览尺寸比例: 高/宽
private PreviewCallback mPreviewCallback; // 相机预览的数据回调
private OrientationEventListener mOrientationEventListener;
private int mLatestRotation = 0;//采集图像的旋转角度
private byte[] mPreviewBuffer;
//构造
public CameraProxy(Activity activity) {
mActivity = activity;
//通过传感器来监听当前手机的方向变化
mOrientationEventListener = new OrientationEventListener(mActivity) {
@Override
public void onOrientationChanged(int orientation) {
setPictureRotate(orientation);
}
};
}
//打开相机
public void openCamera() {
Log.d(TAG, "openCamera cameraId: " + mCameraId);
mCamera = Camera.open(mCameraId);//打开相机操作
Camera.getCameraInfo(mCameraId, mCameraInfo);//获取相机信息
initConfig();//参数设置
setDisplayOrientation();//设置预览角度(方向)
Log.d(TAG, "openCamera enable mOrientationEventListener");
mOrientationEventListener.enable();
}
//相机是很耗费系统资源的东西,用完一定要释放。
public void releaseCamera() {
if (mCamera != null) {
Log.v(TAG, "releaseCamera");
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release();//释放资源
mCamera = null;
}
mOrientationEventListener.disable();
}
//SurfaceView中用的
public void startPreview(SurfaceHolder holder) {
if (mCamera != null) {
Log.v(TAG, "startPreview");
try {
mCamera.setPreviewDisplay(holder);//绑定显示画面
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();//这里才开始预览
}
}
//GlSurfaceView中用的
public void startPreview(SurfaceTexture surface) {
if (mCamera != null) {
Log.v(TAG, "startPreview");
try {
mCamera.setPreviewTexture(surface);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();//开始预览
}
}
停止预览
public void stopPreview() {
if (mCamera != null) {
Log.v(TAG, "stopPreview");
mCamera.stopPreview();
}
}
//判断是否是前置摄像
public boolean isFrontCamera() {
return mCameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT;
}
//Camera的参数配置
private void initConfig() {
Log.v(TAG, "initConfig");
try {
mParameters = mCamera.getParameters();
// 如果摄像头不支持这些参数都会出错的,所以设置的时候一定要判断是否支持
List<String> supportedFlashModes = mParameters.getSupportedFlashModes();
if (supportedFlashModes != null && supportedFlashModes.contains(Parameters.FLASH_MODE_OFF)){
mParameters.setFlashMode(Parameters.FLASH_MODE_OFF); // 设置闪光模式
}
List<String> supportedFocusModes = mParameters.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Parameters.FOCUS_MODE_AUTO){
mParameters.setFocusMode(Parameters.FOCUS_MODE_AUTO); // 设置聚焦模式
}
mParameters.setPreviewFormat(ImageFormat.NV21); // 设置预览图片格式
mParameters.setPictureFormat(ImageFormat.JPEG); // 设置拍照图片格式
mParameters.setExposureCompensation(0); // 设置曝光强度
//获取合适的 预览尺寸
Size previewSize = getSuitableSize(mParameters.getSupportedPreviewSizes());
mPreviewWidth = previewSize.width;
mPreviewHeight = previewSize.height;
mParameters.setPreviewSize(mPreviewWidth, mPreviewHeight); // 设置预览图片大小
Log.d(TAG, "previewWidth: " + mPreviewWidth + ", previewHeight: " + mPreviewHeight);
//获取合适的 拍照尺寸
Size pictureSize = getSuitableSize(mParameters.getSupportedPictureSizes());
mParameters.setPictureSize(pictureSize.width, pictureSize.height);
Log.d(TAG, "pictureWidth: " + pictureSize.width + ", pictureHeight: " + pictureSize.height);
mCamera.setParameters(mParameters); // 将设置好的parameters添加到相机里
} catch (Exception e) {
e.printStackTrace();
}
}
//获取最接近的尺寸
private Size getSuitableSize(List<Size> sizes) {
int minDelta = Integer.MAX_VALUE; // 最小的差值,初始值应该设置大点保证之后的计算中会被重置
int index = 0; // 最小的差值对应的索引坐标
for (int i = 0; i < sizes.size(); i++) {
Size size = sizes.get(i);
Log.v(TAG, "SupportedSize, width: " + size.width + ", height: " + size.height);
// 先判断比例是否相等
if (size.width * mPreviewScale == size.height) {
int delta = Math.abs(mPreviewWidth - size.width);
if (delta == 0) {
return size;
}
if (minDelta > delta) {
minDelta = delta;
index = i;
}
}
}
return sizes.get(index);
}
/**
* 设置相机显示的预览方向,必须设置,否则显示的图像方向会错误
*/
private void setDisplayOrientation() {
int rotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (mCameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {//前置摄像头
result = (mCameraInfo.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing 后置摄像头
result = (mCameraInfo.orientation - degrees + 360) % 360;
}
mCamera.setDisplayOrientation(result);
}
//图像采集的旋转角度(方向)
private void setPictureRotate(int orientation) {
if (orientation == OrientationEventListener.ORIENTATION_UNKNOWN) return;
orientation = (orientation + 45) / 90 * 90;
int rotation;
if (mCameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT) {
rotation = (mCameraInfo.orientation - orientation + 360) % 360;
} else { // 后置摄像头
rotation = (mCameraInfo.orientation + orientation) % 360;
}
mLatestRotation = rotation;
}
// 得到图像采集的方向
public int getLatestRotation() {
return mLatestRotation;
}
// 设置预览的回调
public void setPreviewCallback(PreviewCallback previewCallback) {
mPreviewCallback = previewCallback;
if (mPreviewBuffer == null) {
mPreviewBuffer = new byte[mPreviewWidth * mPreviewHeight * 3 / 2];
}
mCamera.addCallbackBuffer(mPreviewBuffer);
mCamera.setPreviewCallbackWithBuffer(mPreviewCallback);
}
//拍照
public void takePicture(Camera.PictureCallback pictureCallback) {
mCamera.takePicture(null, null, pictureCallback);
}
// 先改变摄像头朝向
public void switchCamera() {
mCameraId ^= 1;
releaseCamera();//释放资源
openCamera();//重新开启
}
//监听手势 点击聚焦
public void focusOnPoint(int x, int y, int width, int height) {
Log.v(TAG, "touch point (" + x + ", " + y + ")");
if (mCamera == null) {
return;
}
Parameters parameters = mCamera.getParameters();
// 1.先要判断是否支持设置聚焦区域
if (parameters.getMaxNumFocusAreas() > 0) {
// 2.以触摸点为中心点,view窄边的1/4为聚焦区域的默认边长
int length = Math.min(width, height) >> 3; // 1/8的长度
int left = x - length;
int top = y - length;
int right = x + length;
int bottom = y + length;
// 3.映射,因为相机聚焦的区域是一个(-1000,-1000)到(1000,1000)的坐标区域
left = left * 2000 / width - 1000;
top = top * 2000 / height - 1000;
right = right * 2000 / width - 1000;
bottom = bottom * 2000 / height - 1000;
// 4.判断上述矩形区域是否超过边界,若超过则设置为临界值
left = left < -1000 ? -1000 : left;
top = top < -1000 ? -1000 : top;
right = right > 1000 ? 1000 : right;
bottom = bottom > 1000 ? 1000 : bottom;
Log.d(TAG, "focus area (" + left + ", " + top + ", " + right + ", " + bottom + ")");
ArrayList<Camera.Area> areas = new ArrayList<>();
areas.add(new Camera.Area(new Rect(left, top, right, bottom), 600));
parameters.setFocusAreas(areas);
}
try {
mCamera.cancelAutoFocus(); // 先要取消掉进程中所有的聚焦功能
mCamera.setParameters(parameters);
mCamera.autoFocus(this); // 调用聚焦
} catch (Exception e) {
e.printStackTrace();
}
}
//手势放缩
public void handleZoom(boolean isZoomIn) {
if (mParameters.isZoomSupported()) {//
int maxZoom = mParameters.getMaxZoom();
int zoom = mParameters.getZoom();
if (isZoomIn && zoom < maxZoom) {
zoom++;
} else if (zoom > 0) {
zoom--;
}
Log.d(TAG, "handleZoom: zoom: " + zoom);
mParameters.setZoom(zoom);//设置放缩
mCamera.setParameters(mParameters);
} else {
Log.i(TAG, "zoom not supported");
}
}
public Camera getCamera() {
return mCamera;
}
public int getPreviewWidth() {
return mPreviewWidth;
}
public int getPreviewHeight() {
return mPreviewHeight;
}
//自动对焦
@Override
public void onAutoFocus(boolean success, Camera camera) {
Log.d(TAG, "onAutoFocus: " + success);
}
}
三、自定义CameraGLSurfaceView
需求分析:
1.CameraGLSurfaceView 是要继承 GLSurfaceView 的。
2.重写 onMeasure 使得 CameraGLSurfaceView 的宽高可以和相机预览尺寸相匹配,这样就不会有画面被拉伸的感觉了。
3.在 CameraGLSurfaceView 中完成对相机的打开、关闭等操作,值得庆幸的是我们可以通过上面的 CameraProxy 很容易的做到。
3.重写 onTouchEvent 方法,来实现单点聚焦,双指放大缩小的功能。
3.实现 GLSurfaceView.Renderer 接口,并在其中完成相机的渲染。
实现:
CameraGLSurfaceView
主要是在 GLSurfaceView.Renderer 的几个回调方法中打开和释放相机,另外就是重写 onMeasure ,onTouchEvent 那几个方法。
public class CameraGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
private static final String TAG = "CameraGLSurfaceView";
private CameraProxy mCameraProxy;
private SurfaceTexture mSurfaceTexture;
private CameraDrawer mDrawer;
private int mRatioWidth = 0;
private int mRatioHeight = 0;
private float mOldDistance;
private int mTextureId = -1;
public CameraGLSurfaceView(Context context) {
this(context, null);
}
public CameraGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
private void init(Context context) {
mCameraProxy = new CameraProxy((Activity) context);
setEGLContextClientVersion(2);//OpenGL ES 2.0
setRenderer(this);//
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
mTextureId = OpenGLUtils.getExternalOESTextureID();
mSurfaceTexture = new SurfaceTexture(mTextureId);
mSurfaceTexture.setOnFrameAvailableListener(this);
mCameraProxy.openCamera();//打开相机
mDrawer = new CameraDrawer();
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
Log.d(TAG, "onSurfaceChanged. thread: " + Thread.currentThread().getName());
Log.d(TAG, "onSurfaceChanged. width: " + width + ", height: " + height);
int previewWidth = mCameraProxy.getPreviewWidth();
int previewHeight = mCameraProxy.getPreviewHeight();
if (width > height) {
setAspectRatio(previewWidth, previewHeight);
} else {
setAspectRatio(previewHeight, previewWidth);
}
GLES20.glViewport(0, 0, width, height);
mCameraProxy.startPreview(mSurfaceTexture);
}
@Override
public void onDrawFrame(GL10 gl) {
GLES20.glClearColor(0, 0, 0, 0);//把背景,或者叫作画布,画成黑色,透明
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
mSurfaceTexture.updateTexImage();
mDrawer.draw(mTextureId, mCameraProxy.isFrontCamera());
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
requestRender();
}
//设置宽高
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
post(new Runnable() {
@Override
public void run() {
requestLayout(); // must run in UI thread
}
});
}
public CameraProxy getCameraProxy() {
return mCameraProxy;
}
public SurfaceTexture getSurfaceTexture() {
return mSurfaceTexture;
}
//图片拉伸处理
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {//对预览宽高的处理
if (width < height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (event.getPointerCount() == 1) {
// 点击聚焦
mCameraProxy.focusOnPoint((int) event.getX(), (int) event.getY(), getWidth(), getHeight());
return true;
}
switch (event.getAction() & MotionEvent.ACTION_MASK) {
case MotionEvent.ACTION_POINTER_DOWN:
mOldDistance = getFingerSpacing(event);
break;
case MotionEvent.ACTION_MOVE:
float newDistance = getFingerSpacing(event);
if (newDistance > mOldDistance) {
mCameraProxy.handleZoom(true);//放大
} else if (newDistance < mOldDistance) {
mCameraProxy.handleZoom(false);//缩小
}
mOldDistance = newDistance;
break;
default:
break;
}
return super.onTouchEvent(event);
}
private static float getFingerSpacing(MotionEvent event) {
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return (float) Math.sqrt(x * x + y * y);
}
}
CameraDrawer
自定义的一个类,将绘制的代码抽离出来了,主要是 draw() 方法。
涉及到很多关于 OpenGL ES 的知识。
public class CameraDrawer {
public final String VERTEX_SHADER = "" +
"attribute vec4 vPosition;" +
"attribute vec2 inputTextureCoordinate;" +
"varying vec2 textureCoordinate;" +
"void main()" +
"{"+
"gl_Position = vPosition;"+
"textureCoordinate = inputTextureCoordinate;" +
"}";
public final String FRAGMENT_SHADER = "" +
"#extension GL_OES_EGL_image_external : require\n"+
"precision mediump float;" +
"varying vec2 textureCoordinate;\n" +
"uniform samplerExternalOES s_texture;\n" +
"void main() {" +
" gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
"}";
private FloatBuffer mVertexBuffer;
private FloatBuffer mBackTextureBuffer;
private FloatBuffer mFrontTextureBuffer;
private ByteBuffer mDrawListBuffer;
private int mProgram;
private int mPositionHandle;
private int mTextureHandle;
private static final float VERTEXES[] = {
-1.0f, 1.0f,
-1.0f,-1.0f,
1.0f, -1.0f,
1.0f, 1.0f,
};
// 后置摄像头使用的纹理坐标
private static final float TEXTURE_BACK[] = {
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 0.0f,
};
// 前置摄像头使用的纹理坐标
private static final float TEXTURE_FRONT[] = {
1.0f, 1.0f,
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
private static final byte VERTEX_ORDER[] = { 0, 1, 2, 3 }; // order to draw vertices
private final int VERTEX_SIZE = 2;
private final int VERTEX_STRIDE = VERTEX_SIZE * 4;
public CameraDrawer() {
// init float buffer for vertex coordinates
mVertexBuffer = ByteBuffer.allocateDirect(VERTEXES.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mVertexBuffer.put(VERTEXES).position(0);
// init float buffer for texture coordinates
mBackTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_BACK.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mBackTextureBuffer.put(TEXTURE_BACK).position(0);
mFrontTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_FRONT.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mFrontTextureBuffer.put(TEXTURE_FRONT).position(0);
// init byte buffer for draw list
mDrawListBuffer = ByteBuffer.allocateDirect(VERTEX_ORDER.length).order(ByteOrder.nativeOrder());
mDrawListBuffer.put(VERTEX_ORDER).position(0);
mProgram = OpenGLUtils.createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
mTextureHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
}
public void draw(int texture, boolean isFrontCamera) {
GLES20.glUseProgram(mProgram); // 指定使用的program
GLES20.glEnable(GLES20.GL_CULL_FACE); // 启动剔除
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture); // 绑定纹理
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, VERTEX_SIZE, GLES20.GL_FLOAT, false, VERTEX_STRIDE, mVertexBuffer);
GLES20.glEnableVertexAttribArray(mTextureHandle);
if (isFrontCamera) {
GLES20.glVertexAttribPointer(mTextureHandle, VERTEX_SIZE, GLES20.GL_FLOAT, false, VERTEX_STRIDE, mFrontTextureBuffer);
} else {
GLES20.glVertexAttribPointer(mTextureHandle, VERTEX_SIZE, GLES20.GL_FLOAT, false, VERTEX_STRIDE, mBackTextureBuffer);
}
// 真正绘制的操作
GLES20.glDrawElements(GLES20.GL_TRIANGLE_FAN, VERTEX_ORDER.length, GLES20.GL_UNSIGNED_BYTE, mDrawListBuffer);
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(mTextureHandle);
}
}
OpenGLUtils
上面还用到了一个 OpenGLUtils 类,这里简单的封装了一些创建程序,加载 shader 等公用操作。
public class OpenGLUtils {
private static final String TAG = "OpenGLUtils";
public static int getExternalOESTextureID() {
int[] texture = new int[1];
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
return texture[0];
}
public static int loadShader(int type, String source) {
// 1. create shader
int shader = GLES20.glCreateShader(type);
if (shader == GLES20.GL_NONE) {
Log.e(TAG, "create shared failed! type: " + type);
return GLES20.GL_NONE;
}
// 2. load shader source
GLES20.glShaderSource(shader, source);
// 3. compile shared source
GLES20.glCompileShader(shader);
// 4. check compile status
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == GLES20.GL_FALSE) { // compile failed
Log.e(TAG, "Error compiling shader. type: " + type + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader); // delete shader
shader = GLES20.GL_NONE;
}
return shader;
}
public static int createProgram(String vertexSource, String fragmentSource) {
// 1. load shader
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == GLES20.GL_NONE) {
Log.e(TAG, "load vertex shader failed! ");
return GLES20.GL_NONE;
}
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (fragmentShader == GLES20.GL_NONE) {
Log.e(TAG, "load fragment shader failed! ");
return GLES20.GL_NONE;
}
// 2. create gl program
int program = GLES20.glCreateProgram();
if (program == GLES20.GL_NONE) {
Log.e(TAG, "create program failed! ");
return GLES20.GL_NONE;
}
// 3. attach shader
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
// we can delete shader after attach
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
// 4. link program
GLES20.glLinkProgram(program);
// 5. check link status
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] == GLES20.GL_FALSE) { // link failed
Log.e(TAG, "Error link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program); // delete program
return GLES20.GL_NONE;
}
return program;
}
public static String loadFromAssets(String fileName, Resources resources) {
String result = null;
try {
InputStream is = resources.getAssets().open(fileName);
int length = is.available();
byte[] data = new byte[length];
is.read(data);
is.close();
result = new String(data, "UTF-8");
result.replace("\\r\\n", "\\n");
} catch (IOException e) {
e.printStackTrace();
}
return result;
}
}
四、CameraActivity
接下来,我们把写好的 CameraSurfaceView 放在 Activity 或者 Fragment 中使用就行了。
注意相机使用前,需要申请相关权限,以及权限的动态申请。
1. AndroidManifest.xml
相机相关权限如下,Android动态权限申请
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-feature android:name="android.hardware.camera"/>
<uses-feature android:name="android.hardware.camera.autofocus"/>
2. 拍照功能
之前只是预留了拍照的功能,并等待外面出传入一个接口回调,这里我们在拍照键点下的时候执行拍照操作就行。
保存图片是耗时操作,不要放在主线程中执行,当拍照完成时,会自动调用 onPictureTaken 方法,我们在这个回调中执行保存的操作。
public class GLSurfaceCameraActivity extends AppCompatActivity implements View.OnClickListener {
private static final String TAG = "SurfaceCameraActivity";
private ImageView mCloseIv;
private ImageView mSwitchCameraIv;
private ImageView mTakePictureIv;
private ImageView mPictureIv;
private CameraGLSurfaceView mCameraView;
private CameraProxy mCameraProxy;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_glsurface_camera);
initView();
}
private void initView() {
mCloseIv = findViewById(R.id.toolbar_close_iv);
mCloseIv.setOnClickListener(this);
mSwitchCameraIv = findViewById(R.id.toolbar_switch_iv);
mSwitchCameraIv.setOnClickListener(this);
mTakePictureIv = findViewById(R.id.take_picture_iv);
mTakePictureIv.setOnClickListener(this);
mPictureIv = findViewById(R.id.picture_iv);
mPictureIv.setOnClickListener(this);
mPictureIv.setImageBitmap(ImageUtils.getLatestThumbBitmap());
mCameraView = findViewById(R.id.camera_view);
mCameraProxy = mCameraView.getCameraProxy();
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.toolbar_close_iv:
finish();
break;
case R.id.toolbar_switch_iv:
mCameraProxy.switchCamera();//切换摄像头
mCameraProxy.startPreview(mCameraView.getSurfaceTexture());//开启预览
break;
case R.id.take_picture_iv:
mCameraProxy.takePicture(mPictureCallback);//拍照
break;
case R.id.picture_iv://打开相册
Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivity(intent);
break;
}
}
private final Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
@Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.d(TAG, "onPictureTaken: callback");
mCameraProxy.startPreview(mCameraView.getSurfaceTexture());// 拍照结束后继续预览
new ImageSaveTask().execute(data); // 保存图片
}
};
//异步线程保存图片操作
private class ImageSaveTask extends AsyncTask<byte[], Void, Void> {
@Override
protected Void doInBackground(byte[]... bytes) {
long time = System.currentTimeMillis();
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes[0], 0, bytes[0].length);
Log.d(TAG, "BitmapFactory.decodeByteArray time: " + (System.currentTimeMillis() - time));
int rotation = mCameraProxy.getLatestRotation();
time = System.currentTimeMillis();
Bitmap rotateBitmap = ImageUtils.rotateBitmap(bitmap, rotation, mCameraProxy.isFrontCamera(), true);
Log.d(TAG, "rotateBitmap time: " + (System.currentTimeMillis() - time));
time = System.currentTimeMillis();
ImageUtils.saveBitmap(rotateBitmap);
Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
return null;
}
//进入图库 预览设置最新拍的照片
@Override
protected void onPostExecute(Void aVoid) {
mPictureIv.setImageBitmap(ImageUtils.getLatestThumbBitmap());
}
}
}
使用手动旋转 Bitmap 的方式来完成图片的旋转并保存。 ImageUtils 代码:
public class ImageUtils {
private static final String TAG = "ImageUtils";
//图库路径
private static final String GALLERY_PATH = Environment.getExternalStoragePublicDirectory(Environment
.DIRECTORY_DCIM) + File.separator + "Camera";
//图库存储图片ID
private static final String[] STORE_IMAGES = {MediaStore.Images.Thumbnails._ID,};
//时间格式
private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyyMMdd_HHmmss");
//旋转保存的图片
public static Bitmap rotateBitmap(Bitmap source, int degree, boolean flipHorizontal, boolean recycle) {
if (degree == 0) {
return source;
}
Matrix matrix = new Matrix();
matrix.postRotate(degree);
if (flipHorizontal) {
matrix.postScale(-1, 1); // 前置摄像头存在水平镜像的问题,所以有需要的话调用这个方法进行水平镜像
}
Bitmap rotateBitmap = Bitmap.createBitmap(source, 0, 0, source.getWidth(), source.getHeight(), matrix, false);
if (recycle) {
source.recycle();
}
return rotateBitmap;
}
//保存图片
public static void saveBitmap(Bitmap bitmap) {
String fileName = DATE_FORMAT.format(new Date(System.currentTimeMillis())) + ".jpg";
File outFile = new File(GALLERY_PATH, fileName);
Log.d(TAG, "saveImage. filepath: " + outFile.getAbsolutePath());
FileOutputStream os = null;
try {
os = new FileOutputStream(outFile);
boolean success = bitmap.compress(Bitmap.CompressFormat.JPEG, 100, os);//图片压缩
if (success) {
//Sqlite存储
insertToDB(outFile.getAbsolutePath());
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (os != null) {
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
//Method:使用内容提供者存储图片
public static void insertToDB(String picturePath) {
ContentValues values = new ContentValues();
ContentResolver resolver = MyApp.getInstance().getContentResolver();
values.put(MediaStore.Images.ImageColumns.DATA, picturePath);
values.put(MediaStore.Images.ImageColumns.TITLE, picturePath.substring(picturePath.lastIndexOf("/") + 1));
values.put(MediaStore.Images.ImageColumns.DATE_TAKEN, System.currentTimeMillis());
values.put(MediaStore.Images.ImageColumns.MIME_TYPE, "image/jpeg");
resolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
}
//得到最近一张图片
public static Bitmap getLatestThumbBitmap() {
Bitmap bitmap = null;
// 按照时间顺序降序查询
Cursor cursor = MediaStore.Images.Media.query(sContext.getContentResolver(), MediaStore.Images.Media
.EXTERNAL_CONTENT_URI, STORE_IMAGES, null, null, MediaStore.Files.FileColumns.DATE_MODIFIED + " DESC");
boolean first = cursor.moveToFirst();
if (first) {
long id = cursor.getLong(0);
bitmap = MediaStore.Images.Thumbnails.getThumbnail(sContext.getContentResolver(),
id, MediaStore.Images.Thumbnails.MICRO_KIND, null);
Log.d(TAG, "bitmap width: " + bitmap.getWidth());
Log.d(TAG, "bitmap height: " + bitmap.getHeight());
}
cursor.close();
return bitmap;
}
}
参考链接:https://blog.youkuaiyun.com/afei__/article/details/86603587