先看一下这个库里面的一个工具类
implementation "androidx.camera:camera-core:${1.1.0-alpha11}"
ImageUtil.java这个工具类很有用,有我们后面用的方法。
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.camera.core.internal.utils;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.BitmapRegionDecoder;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.YuvImage;
import android.util.Rational;
import android.util.Size;
import androidx.annotation.IntRange;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Logger;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* Utility class for image related operations.
*/
@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
public final class ImageUtil {
private static final String TAG = "ImageUtil";
private ImageUtil() {
}
/**
* Converts a {@link Size} to an float array of vertexes.
*/
@NonNull
public static float[] sizeToVertexes(@NonNull Size size) {
return new float[]{0, 0, size.getWidth(), 0, size.getWidth(), size.getHeight(), 0,
size.getHeight()};
}
/**
* Returns the min value.
*/
public static float min(float value1, float value2, float value3, float value4) {
return Math.min(Math.min(value1, value2), Math.min(value3, value4));
}
/**
* Rotates aspect ratio based on rotation degrees.
*/
@NonNull
public static Rational getRotatedAspectRatio(
@IntRange(from = 0, to = 359) int rotationDegrees,
@NonNull Rational aspectRatio) {
if (rotationDegrees == 90 || rotationDegrees == 270) {
return inverseRational(aspectRatio);
}
return new Rational(aspectRatio.getNumerator(), aspectRatio.getDenominator());
}
/**
* Converts JPEG {@link ImageProxy} to JPEG byte array.
*/
@NonNull
public static byte[] jpegImageToJpegByteArray(@NonNull ImageProxy image) {
if (image.getFormat() != ImageFormat.JPEG) {
throw new IllegalArgumentException(
"Incorrect image format of the input image proxy: " + image.getFormat());
}
ImageProxy.PlaneProxy[] planes = image.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
byte[] data = new byte[buffer.capacity()];
buffer.rewind();
buffer.get(data);
return data;
}
/**
* Converts JPEG {@link ImageProxy} to JPEG byte array. The input JPEG image will be cropped
* by the specified crop rectangle and compressed by the specified quality value.
*/
@NonNull
public static byte[] jpegImageToJpegByteArray(@NonNull ImageProxy image,
@NonNull Rect cropRect, @IntRange(from = 1, to = 100) int jpegQuality)
throws CodecFailedException {
if (image.getFormat() != ImageFormat.JPEG) {
throw new IllegalArgumentException(
"Incorrect image format of the input image proxy: " + image.getFormat());
}
byte[] data = jpegImageToJpegByteArray(image);
data = cropJpegByteArray(data, cropRect, jpegQuality);
return data;
}
/**
* Converts YUV_420_888 {@link ImageProxy} to JPEG byte array. The input YUV_420_888 image
* will be cropped if a non-null crop rectangle is specified. The output JPEG byte array will
* be compressed by the specified quality value.
*/
@NonNull
public static byte[] yuvImageToJpegByteArray(@NonNull ImageProxy image,
@Nullable Rect cropRect, @IntRange(from = 1, to = 100) int jpegQuality)
throws CodecFailedException {
if (image.getFormat() != ImageFormat.YUV_420_888) {
throw new IllegalArgumentException(
"Incorrect image format of the input image proxy: " + image.getFormat());
}
return ImageUtil.nv21ToJpeg(
ImageUtil.yuv_420_888toNv21(image),
image.getWidth(),
image.getHeight(),
cropRect,
jpegQuality);
}
/** {@link android.media.Image} to NV21 byte array. */
@NonNull
public static byte[] yuv_420_888toNv21(@NonNull ImageProxy image) {
ImageProxy.PlaneProxy yPlane = image.getPlanes()[0];
ImageProxy.PlaneProxy uPlane = image.getPlanes()[1];
ImageProxy.PlaneProxy vPlane = image.getPlanes()[2];
ByteBuffer yBuffer = yPlane.getBuffer();
ByteBuffer uBuffer = uPlane.getBuffer();
ByteBuffer vBuffer = vPlane.getBuffer();
yBuffer.rewind();
uBuffer.rewind();
vBuffer.rewind();
int ySize = yBuffer.remaining();
int position = 0;
// TODO(b/115743986): Pull these bytes from a pool instead of allocating for every image.
byte[] nv21 = new byte[ySize + (image.getWidth() * image.getHeight() / 2)];
// Add the full y buffer to the array. If rowStride > 1, some padding may be skipped.
for (int row = 0; row < image.getHeight(); row++) {
yBuffer.get(nv21, position, image.getWidth());
position += image.getWidth();
yBuffer.position(
Math.min(ySize, yBuffer.position() - image.getWidth() + yPlane.getRowStride()));
}
int chromaHeight = image.getHeight() / 2;
int chromaWidth = image.getWidth() / 2;
int vRowStride = vPlane.getRowStride();
int uRowStride = uPlane.getRowStride();
int vPixelStride = vPlane.getPixelStride();
int uPixelStride = uPlane.getPixelStride();
// Interleave the u and v frames, filling up the rest of the buffer. Use two line buffers to
// perform faster bulk gets from the byte buffers.
byte[] vLineBuffer = new byte[vRowStride];
byte[] uLineBuffer = new byte[uRowStride];
for (int row = 0; row < chromaHeight; row++) {
vBuffer.get(vLineBuffer, 0, Math.min(vRowStride, vBuffer.remaining()));
uBuffer.get(uLineBuffer, 0, Math.min(uRowStride, uBuffer.remaining()));
int vLineBufferPosition = 0;
int uLineBufferPosition = 0;
for (int col = 0; col < chromaWidth; col++) {
nv21[position++] = vLineBuffer[vLineBufferPosition];
nv21[position++] = uLineBuffer[uLineBufferPosition];
vLineBufferPosition += vPixelStride;
uLineBufferPosition += uPixelStride;
}
}
return nv21;
}
/** Crops JPEG byte array with given {@link android.graphics.Rect}. */
@NonNull
@SuppressWarnings("deprecation")
private static byte[] cropJpegByteArray(@NonNull byte[] data, @NonNull Rect cropRect,
@IntRange(from = 1, to = 100) int jpegQuality) throws CodecFailedException {
Bitmap bitmap;
try {
BitmapRegionDecoder decoder = BitmapRegionDecoder.newInstance(data, 0, data.length,
false);
bitmap = decoder.decodeRegion(cropRect, new BitmapFactory.Options());
decoder.recycle();
} catch (IllegalArgumentException e) {
throw new CodecFailedException("Decode byte array failed with illegal argument." + e,
CodecFailedException.FailureType.DECODE_FAILED);
} catch (IOException e) {
throw new CodecFailedException("Decode byte array failed.",
CodecFailedException.FailureType.DECODE_FAILED);
}
if (bitmap == null) {
throw new CodecFailedException("Decode byte array failed.",
CodecFailedException.FailureType.DECODE_FAILED);
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
boolean success = bitmap.compress(Bitmap.CompressFormat.JPEG, jpegQuality, out);
if (!success) {
throw new CodecFailedException("Encode bitmap failed.",
CodecFailedException.FailureType.ENCODE_FAILED);
}
bitmap.recycle();
return out.toByteArray();
}
/** True if the given aspect ratio is meaningful. */
public static boolean isAspectRatioValid(@Nullable Rational aspectRatio) {
return aspectRatio != null && aspectRatio.floatValue() > 0 && !aspectRatio.isNaN();
}
/** True if the given aspect ratio is meaningful and has effect on the given size. */
public static boolean isAspectRatioValid(@NonNull Size sourceSize,
@Nullable Rational aspectRatio) {
return aspectRatio != null
&& aspectRatio.floatValue() > 0
&& isCropAspectRatioHasEffect(sourceSize, aspectRatio)
&& !aspectRatio.isNaN();
}
/**
* Calculates crop rect with the specified aspect ratio on the given size. Assuming the rect is
* at the center of the source.
*/
@Nullable
public static Rect computeCropRectFromAspectRatio(@NonNull Size sourceSize,
@NonNull Rational aspectRatio) {
if (!isAspectRatioValid(aspectRatio)) {
Logger.w(TAG, "Invalid view ratio.");
return null;
}
int sourceWidth = sourceSize.getWidth();
int sourceHeight = sourceSize.getHeight();
float srcRatio = sourceWidth / (float) sourceHeight;
int cropLeft = 0;
int cropTop = 0;
int outputWidth = sourceWidth;
int outputHeight = sourceHeight;
int numerator = aspectRatio.getNumerator();
int denominator = aspectRatio.getDenominator();
if (aspectRatio.floatValue() > srcRatio) {
outputHeight = Math.round((sourceWidth / (float) numerator) * denominator);
cropTop = (sourceHeight - outputHeight) / 2;
} else {
outputWidth = Math.round((sourceHeight / (float) denominator) * numerator);
cropLeft = (sourceWidth - outputWidth) / 2;
}
return new Rect(cropLeft, cropTop, cropLeft + outputWidth, cropTop + outputHeight);
}
/**
* Calculates crop rect based on the dispatch resolution and rotation degrees info.
*
* <p> The original crop rect is calculated based on camera sensor buffer. On some devices,
* the buffer is rotated before being passed to users, in which case the crop rect also
* needs additional transformations.
*
* <p> There are two most common scenarios: 1) exif rotation is 0, or 2) exif rotation
* equals output rotation. 1) means the HAL rotated the buffer based on target
* rotation. 2) means HAL no-oped on the rotation. Theoretically only 1) needs
* additional transformations, but this method is also generic enough to handle all possible
* HAL rotations.
*/
@NonNull
public static Rect computeCropRectFromDispatchInfo(@NonNull Rect surfaceCropRect,
int surfaceToOutputDegrees, @NonNull Size dispatchResolution,
int dispatchToOutputDegrees) {
// There are 3 coordinate systems: surface, dispatch and output. Surface is where
// the original crop rect is defined. We need to figure out what HAL
// has done to the buffer (the surface->dispatch mapping) and apply the same
// transformation to the crop rect.
// The surface->dispatch mapping is calculated by inverting a dispatch->surface mapping.
Matrix matrix = new Matrix();
// Apply the dispatch->surface rotation.
matrix.setRotate(dispatchToOutputDegrees - surfaceToOutputDegrees);
// Apply the dispatch->surface translation. The translation is calculated by
// compensating for the offset caused by the dispatch->surface rotation.
float[] vertexes = sizeToVertexes(dispatchResolution);
matrix.mapPoints(vertexes);
float left = min(vertexes[0], vertexes[2], vertexes[4], vertexes[6]);
float top = min(vertexes[1], vertexes[3], vertexes[5], vertexes[7]);
matrix.postTranslate(-left, -top);
// Inverting the dispatch->surface mapping to get the surface->dispatch mapping.
matrix.invert(matrix);
// Apply the surface->dispatch mapping to surface crop rect.
RectF dispatchCropRectF = new RectF();
matrix.mapRect(dispatchCropRectF, new RectF(surfaceCropRect));
dispatchCropRectF.sort();
Rect dispatchCropRect = new Rect();
dispatchCropRectF.round(dispatchCropRect);
return dispatchCropRect;
}
private static byte[] nv21ToJpeg(@NonNull byte[] nv21, int width, int height,
@Nullable Rect cropRect, @IntRange(from = 1, to = 100) int jpegQuality)
throws CodecFailedException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
YuvImage yuv = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
boolean success =
yuv.compressToJpeg(cropRect == null ? new Rect(0, 0, width, height) : cropRect,
jpegQuality, out);
if (!success) {
throw new CodecFailedException("YuvImage failed to encode jpeg.",
CodecFailedException.FailureType.ENCODE_FAILED);
}
return out.toByteArray();
}
private static boolean isCropAspectRatioHasEffect(@NonNull Size sourceSize,
@NonNull Rational aspectRatio) {
int sourceWidth = sourceSize.getWidth();
int sourceHeight = sourceSize.getHeight();
int numerator = aspectRatio.getNumerator();
int denominator = aspectRatio.getDenominator();
return sourceHeight != Math.round((sourceWidth / (float) numerator) * denominator)
|| sourceWidth != Math.round((sourceHeight / (float) denominator) * numerator);
}
private static Rational inverseRational(@Nullable Rational rational) {
if (rational == null) {
return rational;
}
return new Rational(
/*numerator=*/ rational.getDenominator(),
/*denominator=*/ rational.getNumerator());
}
/**
* Checks whether the image's crop rectangle is the same as the source image size.
*/
public static boolean shouldCropImage(@NonNull ImageProxy image) {
return shouldCropImage(image.getWidth(), image.getHeight(), image.getCropRect().width(),
image.getCropRect().height());
}
/**
* Checks whether the image's crop rectangle is the same as the source image size.
*/
public static boolean shouldCropImage(int sourceWidth, int sourceHeight, int cropRectWidth,
int cropRectHeight) {
return sourceWidth != cropRectWidth || sourceHeight != cropRectHeight;
}
/** Exception for error during transcoding image. */
public static final class CodecFailedException extends Exception {
public enum FailureType {
ENCODE_FAILED,
DECODE_FAILED,
UNKNOWN
}
private FailureType mFailureType;
CodecFailedException(@NonNull String message) {
super(message);
mFailureType = FailureType.UNKNOWN;
}
CodecFailedException(@NonNull String message, @NonNull FailureType failureType) {
super(message);
mFailureType = failureType;
}
@NonNull
public FailureType getFailureType() {
return mFailureType;
}
}
}
在初始化 ImageCapture的时候提供了一个方法:设置在绑定时计算的视图端口裁剪矩形。这个方法可以在生成图片的时候进行裁剪。
/**
* Sets the view port crop rect calculated at the time of binding.
*
* @hide
*/
@RestrictTo(Scope.LIBRARY_GROUP)
public void setViewPortCropRect(@NonNull Rect viewPortCropRect) {
mViewPortCropRect = viewPortCropRect;
}
这个设置参数在takePicture中ImageProxy通过getCropRect() 获取到.然后对图片进行裁剪处理。
public void takePicture(@NonNull Executor executor,
final @NonNull OnImageCapturedCallback callback)
实现过程:
/**
* 拍摄照片
*/
@SuppressLint("RestrictedApi")
private void takenPictureInternal() {
if (mImageCapture != null) {
mImageCapture.takePicture(CameraXExecutors.ioExecutor(), new ImageCapture.OnImageCapturedCallback() {
@SuppressLint("UnsafeOptInUsageError")
@Override
public void onCaptureSuccess(@NonNull ImageProxy image) {
super.onCaptureSuccess(image);
try {
boolean shouldCropImage = ImageUtil.shouldCropImage(image);
int imageFormat = image.getFormat();
String path = SDP + "photo/" + mImageCapture.getTargetRotation() + "_" + TimeExtUtils.getCurr7() + ".jpg";
StringBuilder builder = new StringBuilder();
builder.append("拍照图片格式:").append(ImageUtilKt.getImageName(image.getFormat())).append("\n");
builder.append("拍照保存路径:").append(path).append("\n");
builder.append("拍照保存角度:").append(image.getImageInfo().getRotationDegrees()).append("\n");
builder.append("预览视图角度:").append(getBinding().previewView.getDisplay().getRotation()).append("\n");
builder.append("Activity视图角度:").append(ScreenUtils.getScreenRotation(CameraXFrameActivity.this)).append("\n");
builder.append("图片的宽高:").append(image.getWidth()).append("x").append(image.getHeight()).append("\n");
//
Rect cropRect = image.getCropRect();
builder.append("裁剪坐标 ")
.append(" 宽:").append(image.getCropRect().width())
.append(" 高:").append(image.getCropRect().height())
.append(" left:").append(cropRect.left)
.append(" top:").append(cropRect.top)
.append(" right:").append(cropRect.right)
.append(" bottom:").append(cropRect.bottom)
.append("\n");
LogUtils.i(builder.toString());
if (imageFormat == YUV_420_888) {
byte[] jpeg = ImageUtil.yuvImageToJpegByteArray(image, shouldCropImage ? image.getCropRect() :
null, mImageCapture.getJpegQuality());
//
Bitmap bitmap = ImageUtils.getBitmap(jpeg, 0);
Bitmap rotate = ImageUtils.rotate(bitmap, image.getImageInfo().getRotationDegrees(), bitmap.getWidth() / 2f, bitmap.getHeight() / 2f);
boolean save = ImageUtils.save(rotate, path, Bitmap.CompressFormat.JPEG);
ThreadUtils.runOnUiThread(() -> {
T.info("图片保存:" + (save ? "成功!!!" : "失败!!!"));
});
// //创建一个新空白位图
// Bitmap bgBitmap = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.ARGB_8888);
// YuvToRgbConverter yuvToRgbConverter = new YuvToRgbConverter(mContext);
// yuvToRgbConverter.yuvToRgb(image.getImage(), bgBitmap);
// if (bgBitmap != null) {
// boolean fileByDeleteOldFile = FileUtils.createFileByDeleteOldFile(path);
// if (fileByDeleteOldFile) {
// ImageUtils.save(bgBitmap, path, Bitmap.CompressFormat.JPEG);
// LogUtils.i("保存路径:" + path);
// ThreadUtils.runOnUiThread(() -> {
// T.info("保存路径:" + path);
// });
// }
// }
} else if (imageFormat == ImageFormat.JPEG) {
byte[] jpeg;
if (!shouldCropImage) {
jpeg = ImageUtil.jpegImageToJpegByteArray(image);
} else {
jpeg = ImageUtil.jpegImageToJpegByteArray(image, image.getCropRect(), mImageCapture.getJpegQuality());
}
// ImageProxy.PlaneProxy[] planes = image.getPlanes();
// ByteBuffer buffer = planes[0].getBuffer();
// int size = buffer.remaining();
// byte[] jpeg = new byte[size];
// buffer.get(jpeg, 0, size);
//
Bitmap bitmap = ImageUtils.getBitmap(jpeg, 0);
Bitmap rotate = ImageUtils.rotate(bitmap, image.getImageInfo().getRotationDegrees(), bitmap.getWidth() / 2f, bitmap.getHeight() / 2f);
boolean save = ImageUtils.save(rotate, path, Bitmap.CompressFormat.JPEG);
ThreadUtils.runOnUiThread(() -> {
T.info("图片保存:" + (save ? "成功!!!" : "失败!!!"));
});
} else {
LogUtils.i("Unrecognized image format: " + image.getFormat());
}
//使用完image关闭
image.close();
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void onError(@NonNull ImageCaptureException exception) {
super.onError(exception);
exception.printStackTrace();
}
});
// final ContentValues contentValues = new ContentValues();
// contentValues.put(MediaStore.MediaColumns.DISPLAY_NAME, CAPTURED_FILE_NAME
// + "_" + TimeExtUtils.getCurr7());
// contentValues.put(MediaStore.MediaColumns.MIME_TYPE, CAPTURED_FILE_NAME_END);
// //
// ImageCapture.OutputFileOptions outputFileOptions =
// new ImageCapture.OutputFileOptions.Builder(
// getContentResolver(),
// MediaStore.Images.Media.EXTERNAL_CONTENT_URI, contentValues)
// .build();
// mImageCapture.takePicture(outputFileOptions, CameraXExecutors.ioExecutor(),
// new ImageCapture.OnImageSavedCallback() {
// @Override
// public void onImageSaved(@NonNull ImageCapture.OutputFileResults outputFileResults) {
// StringBuilder builder = new StringBuilder();
// builder.append("图片保存路径:").append(outputFileResults.getSavedUri().getPath()).append("\n");
// LogUtils.i(builder);
// ThreadUtils.runOnUiThread(() -> {
// T.info(builder.toString());
// });
// }
//
// @Override
// public void onError(@NonNull ImageCaptureException exception) {
// exception.printStackTrace();
// }
// });
}
}
打印日志:
┌────────────────────────────────────────────────────────────────────────────────────────────────────────────────
│ CameraX-camerax_io_0, com.jszy.baselib.camera.CameraXFrameActivity$3.onCaptureSuccess(CameraXFrameActivity.java:465)
├┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄
│ 拍照图片格式:JPEG
│ 拍照保存路径:/storage/emulated/0/Android/data/com.jszy.baselib/files/photo/0_202112140923441.jpg
│ 拍照保存角度:90
│ 预览视图角度:0
│ Activity视图角度:0
│ 图片的宽高:4032x2268
│ 裁剪坐标 宽:1920 高:1080 left:0 top:0 right:1920 bottom:1080
└────────────────────────────────────────────────────────────────────────────────────────────────────────────────
这点分析说明图片已经被裁剪了。
参考源码:
ImageCapture.java 中的保存图片代码
public void takePicture(
final @NonNull OutputFileOptions outputFileOptions,
final @NonNull Executor executor,
final @NonNull OnImageSavedCallback imageSavedCallback) {
if (Looper.getMainLooper() != Looper.myLooper()) {
CameraXExecutors.mainThreadExecutor().execute(
() -> takePicture(outputFileOptions, executor, imageSavedCallback));
return;
}
/*
* We need to chain the following callbacks to save the image to disk:
*
* +-----------------------+
* | |
* |ImageCapture. |
* |OnImageCapturedCallback|
* | |
* +-----------+-----------+
* |
* |
* +-----------v-----------+ +----------------------+
* | | | |
* | ImageSaver. | | ImageCapture. |
* | OnImageSavedCallback +------> OnImageSavedCallback |
* | | | |
* +-----------------------+ +----------------------+
*/
// Convert the ImageSaver.OnImageSavedCallback to ImageCapture.OnImageSavedCallback
final ImageSaver.OnImageSavedCallback imageSavedCallbackWrapper =
new ImageSaver.OnImageSavedCallback() {
@Override
public void onImageSaved(@NonNull OutputFileResults outputFileResults) {
imageSavedCallback.onImageSaved(outputFileResults);
}
@Override
public void onError(@NonNull ImageSaver.SaveError error,
@NonNull String message,
@Nullable Throwable cause) {
@ImageCaptureError int imageCaptureError = ERROR_UNKNOWN;
switch (error) {
case FILE_IO_FAILED:
imageCaptureError = ERROR_FILE_IO;
break;
default:
// Keep the imageCaptureError as UNKNOWN_ERROR
break;
}
imageSavedCallback.onError(
new ImageCaptureException(imageCaptureError, message, cause));
}
};
int outputJpegQuality = getJpegQualityInternal();
// Wrap the ImageCapture.OnImageSavedCallback with an OnImageCapturedCallback so it can
// be put into the capture request queue
OnImageCapturedCallback imageCaptureCallbackWrapper =
new OnImageCapturedCallback() {
@Override
public void onCaptureSuccess(@NonNull ImageProxy image) {
mIoExecutor.execute(
new ImageSaver(
image,
outputFileOptions,
image.getImageInfo().getRotationDegrees(),
outputJpegQuality,
executor,
mSequentialIoExecutor,
imageSavedCallbackWrapper));
}
@Override
public void onError(@NonNull final ImageCaptureException exception) {
imageSavedCallback.onError(exception);
}
};
// If the final output image needs to be cropped, setting the JPEG quality as 100 when
// capturing the image. So that the image quality won't be lost when uncompressing and
// compressing the image again in the cropping process.
int rotationDegrees = getRelativeRotation(getCamera());
Size dispatchResolution = getAttachedSurfaceResolution();
// At this point, we can't know whether HAL will rotate the captured image or not. No
// matter HAL will rotate the image byte array or not, it won't affect whether the final
// image needs cropping or not. Therefore, we can still use the attached surface
// resolution and its relative rotation degrees against to the target rotation setting to
// calculate the possible crop rectangle and then use it to determine whether the final
// image will need cropping or not.
Rect cropRect = computeDispatchCropRect(getViewPortCropRect(), mCropAspectRatio,
rotationDegrees, dispatchResolution, rotationDegrees);
boolean shouldCropImage = ImageUtil.shouldCropImage(dispatchResolution.getWidth(),
dispatchResolution.getHeight(), cropRect.width(), cropRect.height());
int capturingJpegQuality = shouldCropImage ? 100 : outputJpegQuality;
// Always use the mainThreadExecutor for the initial callback so we don't need to double
// post to another thread
sendImageCaptureRequest(CameraXExecutors.mainThreadExecutor(),
imageCaptureCallbackWrapper, capturingJpegQuality);
}
ImageServer.java 中ImageProxy转JPEG
@NonNull
private byte[] imageToJpegByteArray(@NonNull ImageProxy image, @IntRange(from = 1,
to = 100) int jpegQuality) throws CodecFailedException {
boolean shouldCropImage = ImageUtil.shouldCropImage(image);
int imageFormat = image.getFormat();
if (imageFormat == ImageFormat.JPEG) {
if (!shouldCropImage) {
// When cropping is unnecessary, the byte array doesn't need to be decoded and
// re-encoded again. Therefore, jpegQuality is unnecessary in this case.
return ImageUtil.jpegImageToJpegByteArray(image);
} else {
return ImageUtil.jpegImageToJpegByteArray(image, image.getCropRect(), jpegQuality);
}
} else if (imageFormat == ImageFormat.YUV_420_888) {
return ImageUtil.yuvImageToJpegByteArray(image, shouldCropImage ? image.getCropRect() :
null, jpegQuality);
} else {
Logger.w(TAG, "Unrecognized image format: " + imageFormat);
}
return null;
}