补上demoGitHub
摄像头坑还是挺多的,优化了下代码,发现原来的长宽设置不对,就仔细研究了一下
显示摄像头数据时要按照摄像头的内部规定长宽来设置,而获得的尺寸都是宽比高大的,也就是横屏尺寸,而且摄像头方向和当前屏幕方向又不一样,所以刚开始的时候画面严重拉伸
贴代码
Camera2SurfaceView
public class Camera2SurfaceView extends SurfaceView {
private SurfaceHolder mHolder;
private EGLUtils mEglUtils;
private GLFramebuffer mFramebuffer;
private GLRenderer mRenderer;
private SurfaceTexture mSurfaceTexture;
private Surface mSurface;
private final Object mObject = new Object();
private String mCameraId;
private CameraManager mCameraManager;
private CameraCaptureSession mCameraCaptureSession;
private CameraDevice mCameraDevice;
private Handler mHandler;
private int screenWidth, screenHeight;
public Camera2SurfaceView(Context context) {
super(context);
init(context);
}
public Camera2SurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
private void init(Context context){
mHolder = getHolder();
mFramebuffer = new GLFramebuffer(context);
mRenderer = new GLRenderer(context);
initCamera2();
mHolder.addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
}
@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int w, int h) {
screenWidth = w;
screenHeight = h;
Thread thread = new Thread(){
@Override
public void run() {
super.run();
mEglUtils = new EGLUtils();
mEglUtils.initEGL(EGL14.EGL_NO_CONTEXT,mHolder.getSurface());
mRenderer.initShader();
//获取当前方向
WindowManager windowManager = (WindowManager) getContext().getSystemService(Context.WINDOW_SERVICE);
int rotation = windowManager.getDefaultDisplay().getRotation();
Size mPreviewSize = getPreferredPreviewSize(mSizes, screenWidth, screenHeight);
final int previewWidth = mPreviewSize.getWidth();
final int previewHeight = mPreviewSize.getHeight();
//计算显示区域
int left = 0, top = 0, viewWidth = 0, viewHeight = 0;
switch (rotation) {
case Surface.ROTATION_0:
left = 0;
viewWidth = screenWidth;
viewHeight = (int) (previewWidth * 1.0f / previewHeight * viewWidth);
top = (screenHeight - viewHeight) / 2;
break;
case Surface.ROTATION_90:
left = 0;
viewWidth = screenWidth;
viewHeight = (int) (previewHeight * 1.0f / previewWidth * viewWidth);
top = (screenHeight - viewHeight) / 2;
break;
case Surface.ROTATION_180:
break;
case Surface.ROTATION_270:
left = 0;
viewWidth = screenWidth;
viewHeight = (int) (previewHeight * 1.0f / previewWidth * viewWidth);
top = (screenHeight - viewHeight) / 2;
break;
}
Rect rect = new Rect();
rect.left = left;
rect.top = top;
rect.right = left + viewWidth;
rect.bottom = top + viewHeight;
mFramebuffer.initFramebuffer(previewWidth,previewHeight);
mSurfaceTexture = mFramebuffer.getSurfaceTexture();
mSurfaceTexture.setDefaultBufferSize(previewWidth, previewHeight);
mSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (mObject) {
mObject.notifyAll();
}
}
});
openCamera2();
while (true){
synchronized (mObject) {
try {
mObject.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if(mSurfaceTexture == null){
break;
}
mFramebuffer.drawFrameBuffer(previewWidth,previewHeight,rotation);
GLES30.glClear(GLES30.GL_DEPTH_BUFFER_BIT | GLES30.GL_COLOR_BUFFER_BIT);
GLES30.glViewport(rect.left,rect.top,rect.width(),rect.height());
mFramebuffer.drawFrame();
mRenderer.drawFrame();
mEglUtils.swap();
}
mEglUtils.release();
}
};
thread.start();
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
if(mCameraCaptureSession != null){
mCameraCaptureSession.getDevice().close();
mCameraCaptureSession.close();
mCameraCaptureSession = null;
}
if(mSurface != null){
mSurface.release();
mSurface = null;
}
if(mSurfaceTexture != null){
mSurfaceTexture.release();
mSurfaceTexture = null;
synchronized (mObject) {
mObject.notifyAll();
}
}
}
});
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
}
private Size[] mSizes;
private void initCamera2() {
HandlerThread handlerThread = new HandlerThread("Camera2");
handlerThread.start();
mHandler = new Handler(handlerThread.getLooper());
mCameraManager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
String[] CameraIdList = mCameraManager.getCameraIdList();
mCameraId = CameraIdList[0];
CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(mCameraId);
characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if(map != null){
mSizes = map.getOutputSizes(SurfaceTexture.class);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera2(){
if (PermissionChecker.checkSelfPermission(getContext(), Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) {
try {
mCameraManager.openCamera(mCameraId, stateCallback, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
private CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
takePreview();
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int i) {
}
};
private void takePreview() {
try {
mSurface = new Surface(mSurfaceTexture);
final CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
builder.addTarget(mSurface);
mCameraDevice.createCaptureSession(Arrays.asList(mSurface), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
if (null == mCameraDevice) return;
mCameraCaptureSession = cameraCaptureSession;
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
CaptureRequest previewRequest = builder.build();
try {
mCameraCaptureSession.setRepeatingRequest(previewRequest, null, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
}
}, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
//计算最接近当前宽高的尺寸
private Size getPreferredPreviewSize(Size[] sizes, int width, int height) {
Size s = null;
for (Size option : sizes) {
int w = option.getWidth();
int h = option.getHeight();
if(width >= height){
if(s == null){
s = option;
}else{
if(w <= width){
if(w > s.getWidth()){
s = option;
}else{
int a = Math.abs(height - h) - Math.abs(height - s.getHeight());
if(a < 0){
s = option;
}else if (a == 0 && h < s.getHeight()){
s = option;
}
}
}
}
}else{
if(s == null){
s = option;
}else{
if(h <= width){
if(h > s.getHeight()){
s = option;
}else{
int a = Math.abs(height - w) - Math.abs(height - s.getWidth());
if(a < 0){
s = option;
}else if(a == 0 && w < s.getWidth()){
s = option;
}
}
}
}
}
}
if(s != null){
return s;
}
return sizes[0];
}
}
GLFramebuffer
public class GLFramebuffer {
private Context context;
private final float[] vertexData = {
1f, -1f, 0f,
-1f, -1f, 0f,
1f, 1f, 0f,
-1f, 1f, 0f
};
private FloatBuffer vertexBuffer;
private FloatBuffer textureVertexBuffer;
private int programId;
private int aPositionHandle;
private int uTextureSamplerHandle;
private int aTextureCoordHandle;
private int uSTMMatrixHandle;
private float[] mSTMatrix = new float[16];
private int[] textures;
private int[] frameBuffers;
private int[] vertexBuffers;
private SurfaceTexture surfaceTexture;
public GLFramebuffer(Context context){
this.context = context;
final float[] textureVertexData = {
1f, 0f,
0f, 0f,
1f, 1f,
0f, 1f
};
vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);
textureVertexBuffer = ByteBuffer.allocateDirect(textureVertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureVertexData);
textureVertexBuffer.position(0);
}
public void initFramebuffer(int width,int height){
String vertexShader = ShaderUtils.readRawTextFile(context, R.raw.vertext_shader);
String fragmentShader = ShaderUtils.readRawTextFile(context, R.raw.fragment_sharder);
programId = ShaderUtils.createProgram(vertexShader, fragmentShader);
aPositionHandle = GLES30.glGetAttribLocation(programId, "aPosition");
uSTMMatrixHandle = GLES30.glGetUniformLocation(programId, "uSTMatrix");
uTextureSamplerHandle = GLES30.glGetUniformLocation(programId, "sTexture");
aTextureCoordHandle = GLES30.glGetAttribLocation(programId, "aTexCoord");
vertexBuffers = new int[1];
GLES30.glGenBuffers(1,vertexBuffers,0);
GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, vertexBuffers[0]);
GLES30.glBufferData(GLES30.GL_ARRAY_BUFFER, vertexData.length*4, vertexBuffer,GLES30.GL_STATIC_DRAW);
frameBuffers = new int[1];
GLES30.glGenFramebuffers(1, frameBuffers, 0);
GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, frameBuffers[0]);
textures = new int[2];
GLES30.glGenTextures(2, textures, 0);
GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textures[0]);
GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER,
GLES30.GL_NEAREST);
GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER,
GLES30.GL_LINEAR);
GLES30.glTexImage2D(GLES30.GL_TEXTURE_2D, 0, GLES30.GL_RGBA, width, height, 0, GLES30.GL_RGBA, GLES30.GL_UNSIGNED_BYTE, null);
GLES30.glFramebufferTexture2D(GLES30.GL_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0, GLES30.GL_TEXTURE_2D, textures[0], 0);
GLES30.glActiveTexture(GLES30.GL_TEXTURE1);
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[1]);
GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES30.GL_TEXTURE_MIN_FILTER,
GLES30.GL_NEAREST);
GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES30.GL_TEXTURE_MAG_FILTER,
GLES30.GL_LINEAR);
GLES30.glUseProgram(programId);
GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, vertexBuffers[0]);
GLES30.glEnableVertexAttribArray(aPositionHandle);
GLES30.glVertexAttribPointer(aPositionHandle, 3, GLES30.GL_FLOAT, false,
12, 0);
GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, 0);
GLES30.glEnableVertexAttribArray(aTextureCoordHandle);
GLES30.glVertexAttribPointer(aTextureCoordHandle, 2, GLES30.GL_FLOAT, false, 8, textureVertexBuffer);
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0);
GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, 0);
}
public SurfaceTexture getSurfaceTexture(){
surfaceTexture = new SurfaceTexture(textures[1]);
return surfaceTexture;
}
public void drawFrameBuffer(int width,int height,int rotation){
surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(mSTMatrix);
//旋转纹理方向
switch (rotation) {
case Surface.ROTATION_0:
break;
case Surface.ROTATION_90:
Matrix.rotateM(mSTMatrix,0,-90,0,0,1);//根据z轴,以左下角为圆点旋转,角度顺时针为正,逆时针为负
Matrix.translateM(mSTMatrix,0,-1,0,0);//x轴正向左移,负向右移,y轴正向下移,负向上移
break;
case Surface.ROTATION_180:
break;
case Surface.ROTATION_270:
Matrix.rotateM(mSTMatrix,0,90,0,0,1);
Matrix.translateM(mSTMatrix,0,0,-1,0);
break;
}
GLES30.glUseProgram(programId);
GLES30.glActiveTexture(GLES30.GL_TEXTURE1);
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textures[1]);
GLES30.glUniform1i(uTextureSamplerHandle,1);
GLES30.glUniformMatrix4fv(uSTMMatrixHandle, 1, false, mSTMatrix, 0);
GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, frameBuffers[0]);
GLES30.glClear(GLES30.GL_DEPTH_BUFFER_BIT | GLES30.GL_COLOR_BUFFER_BIT);
GLES30.glViewport(0, 0, width, height);
GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, 4);
GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, 0);
}
public void drawFrame(){
GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textures[0]);
}
}
camera2还是挺坑的,特别是取数据到内存,特别麻烦