一、C++层面
1、camera_manager.h
camera/basic/src/main/cpp/camera_manager.h
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CAMERA_NATIVE_CAMERA_H
#define CAMERA_NATIVE_CAMERA_H
#include <camera/NdkCameraDevice.h>
#include <camera/NdkCameraError.h>
#include <camera/NdkCameraManager.h>
#include <camera/NdkCameraMetadataTags.h>
#include <map>
#include <string>
#include <vector>
#include "image_reader.h"
enum class CaptureSessionState : int32_t {
READY = 0, // session is ready
ACTIVE, // session is busy
CLOSED, // session is closed(by itself or a new session evicts)
MAX_STATE
};
template <typename T>
class RangeValue {
public:
T min_, max_;
/**
* return absolute value from relative value
* value: in percent (50 for 50%)
*/
T value(int percent) {
return static_cast<T>(min_ + (max_ - min_) * percent / 100);
}
RangeValue() { min_ = max_ = static_cast<T>(0); }
bool Supported(void) const { return (min_ != max_); }
};
enum PREVIEW_INDICES {
PREVIEW_REQUEST_IDX = 0,
JPG_CAPTURE_REQUEST_IDX,
CAPTURE_REQUEST_COUNT,
};
struct CaptureRequestInfo {
ANativeWindow* outputNativeWindow_;
ACaptureSessionOutput* sessionOutput_;
ACameraOutputTarget* target_;
ACaptureRequest* request_;
ACameraDevice_request_template template_;
int sessionSequenceId_;
};
class CameraId;
class NDKCamera {
private:
ACameraManager* cameraMgr_;
std::map<std::string, CameraId> cameras_;
std::string activeCameraId_;
uint32_t cameraFacing_;
uint32_t cameraOrientation_;
std::vector<CaptureRequestInfo> requests_;
ACaptureSessionOutputContainer* outputContainer_;
ACameraCaptureSession* captureSession_;
CaptureSessionState captureSessionState_;
// set up exposure control
int64_t exposureTime_;
RangeValue<int64_t> exposureRange_;
int32_t sensitivity_;
RangeValue<int32_t> sensitivityRange_;
volatile bool valid_;
ACameraManager_AvailabilityCallbacks* GetManagerListener();
ACameraDevice_stateCallbacks* GetDeviceListener();
ACameraCaptureSession_stateCallbacks* GetSessionListener();
ACameraCaptureSession_captureCallbacks* GetCaptureCallback();
public:
NDKCamera();
~NDKCamera();
void EnumerateCamera(void);//对摄像头设备进行枚举
bool MatchCaptureSizeRequest(ANativeWindow* display, ImageFormat* view,
ImageFormat* capture);//匹配从摄像头捕获的图像
void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow,
int32_t imageRotation);//创建一个摄像头捕获的会话
bool GetSensorOrientation(int32_t* facing, int32_t* angle);
void OnCameraStatusChanged(const char* id, bool available);
void OnDeviceState(ACameraDevice* dev);
void OnDeviceError(ACameraDevice* dev, int err);
void OnSessionState(ACameraCaptureSession* ses, CaptureSessionState state);
void OnCaptureSequenceEnd(ACameraCaptureSession* session, int sequenceId,
int64_t frameNumber);//捕获图像帧
void OnCaptureFailed(ACameraCaptureSession* session, ACaptureRequest* request,
ACameraCaptureFailure* failure);//捕获图像失败
void StartPreview(bool start);//开始图像预览
bool TakePhoto(void);//拍照
bool GetExposureRange(int64_t* min, int64_t* max, int64_t* curVal);
bool GetSensitivityRange(int64_t* min, int64_t* max, int64_t* curVal);
void UpdateCameraRequestParameter(int32_t code, int64_t val);//刷新摄像头请求的参数
};
// helper classes to hold enumerated camera
class CameraId {
public:
ACameraDevice* device_;//摄像头设备
std::string id_;//此摄像头的识别号
acamera_metadata_enum_android_lens_facing_t facing_;
bool available_; // free to use ( no other apps are using
bool owner_; // we are the owner of the camera
explicit CameraId(const char* id)//新建一个摄像头摄像头的对象
: device_(nullptr),
facing_(ACAMERA_LENS_FACING_FRONT),
available_(false),
owner_(false) {
id_ = id;
}
explicit CameraId(void) { CameraId(""); }
};
#endif // CAMERA_NATIVE_CAMERA_H
2、camera_manager.cpp
camera/basic/src/main/cpp/camera_manager.cpp
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "camera_manager.h"
#include <camera/NdkCameraManager.h>
#include <unistd.h>
#include <cinttypes>
#include <queue>
#include <utility>
#include "utils/camera_utils.h"
#include "utils/native_debug.h"
/**
* Range of Camera Exposure Time:
* Camera's capability range have a very long range which may be disturbing
* on camera. For this sample purpose, clamp to a range showing visible
* video on preview: 100000ns ~ 250000000ns
*/
static const uint64_t kMinExposureTime = static_cast<uint64_t>(1000000);
static const uint64_t kMaxExposureTime = static_cast<uint64_t>(250000000);
NDKCamera::NDKCamera()//NDKCamera的实现
: cameraMgr_(nullptr),//继承ACameraManager
activeCameraId_(""),
cameraFacing_(ACAMERA_LENS_FACING_BACK),
cameraOrientation_(0),
outputContainer_(nullptr),
captureSessionState_(CaptureSessionState::MAX_STATE),
exposureTime_(static_cast<int64_t>(0)) {
valid_ = false;
requests_.resize(CAPTURE_REQUEST_COUNT);
memset(requests_.data(), 0, requests_.size() * sizeof(requests_[0]));
cameras_.clear();
cameraMgr_ = ACameraManager_create();//实例化cameramanager
ASSERT(cameraMgr_, "Failed to create cameraManager");
// Pick up a back-facing camera to preview
EnumerateCamera();//枚举化摄像头设备
ASSERT(activeCameraId_.size(), "Unknown ActiveCameraIdx");
// Create back facing camera device
CALL_MGR(openCamera(cameraMgr_, activeCameraId_.c_str(), GetDeviceListener(),
&cameras_[activeCameraId_].device_));//打开摄像头
CALL_MGR(registerAvailabilityCallback(cameraMgr_, GetManagerListener()));//注册可用的摄系统监听器
// Initialize camera controls(exposure time and sensitivity), pick
// up value of 2% * range + min as starting value (just a number, no magic)
ACameraMetadata* metadataObj;//摄像头数据的元数据
CALL_MGR(getCameraCharacteristics(cameraMgr_, activeCameraId_.c_str(),
&metadataObj));//获取摄像头数据的参数信息
ACameraMetadata_const_entry val = {
0,
};
camera_status_t status = ACameraMetadata_getConstEntry(
metadataObj, ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE, &val);//摄像头曝光的时间范围
if (status == ACAMERA_OK) {
exposureRange_.min_ = val.data.i64[0];
if (exposureRange_.min_ < kMinExposureTime) {
exposureRange_.min_ = kMinExposureTime;
}
exposureRange_.max_ = val.data.i64[1];
if (exposureRange_.max_ > kMaxExposureTime) {
exposureRange_.max_ = kMaxExposureTime;
}
exposureTime_ = exposureRange_.value(2);
} else {
LOGW("Unsupported ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE");
exposureRange_.min_ = exposureRange_.max_ = 0l;
exposureTime_ = 0l;
}
status = ACameraMetadata_getConstEntry(
metadataObj, ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE, &val);//摄像头敏感度的范围
if (status == ACAMERA_OK) {
sensitivityRange_.min_ = val.data.i32[0];
sensitivityRange_.max_ = val.data.i32[1];
sensitivity_ = sensitivityRange_.value(2);
} else {
LOGW("failed for ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE");
sensitivityRange_.min_ = sensitivityRange_.max_ = 0;
sensitivity_ = 0;
}
valid_ = true;
}
/**
* A helper class to assist image size comparison, by comparing the absolute
* size
* regardless of the portrait or landscape mode.
*/
class DisplayDimension {//画像尺寸对比
public:
DisplayDimension(int32_t w, int32_t h) : w_(w), h_(h), portrait_(false) {
if (h > w) {
// make it landscape
w_ = h;
h_ = w;
portrait_ = true;
}
}
DisplayDimension(const DisplayDimension& other) {
w_ = other.w_;
h_ = other.h_;
portrait_ = other.portrait_;
}
DisplayDimension(void) {
w_ = 0;
h_ = 0;
portrait_ = false;
}
DisplayDimension& operator=(const DisplayDimension& other) {//赋值操作符
w_ = other.w_;
h_ = other.h_;
portrait_ = other.portrait_;
return (*this);//返回当前对象
}
bool IsSameRatio(DisplayDimension& other) {
return (w_ * other.h_ == h_ * other.w_);
}
bool operator>(DisplayDimension& other) {//大于号操作符
return (w_ >= other.w_ & h_ >= other.h_);
}
bool operator==(DisplayDimension& other) {//等于号操作符
return (w_ == other.w_ && h_ == other.h_ && portrait_ == other.portrait_);
}
DisplayDimension operator-(DisplayDimension& other) {//计算图像缩放、裁剪或变换时的尺寸差异
DisplayDimension delta(w_ - other.w_, h_ - other.h_);
return delta;
}
void Flip(void) { portrait_ = !portrait_; }
bool IsPortrait(void) { return portrait_; }
int32_t width(void) { return w_; }
int32_t height(void) { return h_; }
int32_t org_width(void) { return (portrait_ ? h_ : w_); }
int32_t org_height(void) { return (portrait_ ? w_ : h_); }
private:
int32_t w_, h_;
bool portrait_;
};
/**
* Find a compatible camera modes:
* 1) the same aspect ration as the native display window, which should be a
* rotated version of the physical device
* 2) the smallest resolution in the camera mode list
* This is to minimize the later color space conversion workload.
*/
bool NDKCamera::MatchCaptureSizeRequest(ANativeWindow* display,
ImageFormat* resView,
ImageFormat* resCap) {
DisplayDimension disp(ANativeWindow_getWidth(display),
ANativeWindow_getHeight(display));
if (cameraOrientation_ == 90 || cameraOrientation_ == 270) {
disp.Flip();//横竖屏翻转
}
ACameraMetadata* metadata;
CALL_MGR(
getCameraCharacteristics(cameraMgr_, activeCameraId_.c_str(), &metadata));
ACameraMetadata_const_entry entry;//获取摄像头的元数据
CALL_METADATA(getConstEntry(
metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &entry));
// format of the data: format, width, height, input?, type int32
bool foundIt = false;
DisplayDimension foundRes(4000, 4000);
DisplayDimension maxJPG(0, 0);
for (int i = 0; i < entry.count; i += 4) {
int32_t input = entry.data.i32[i + 3];
int32_t format = entry.data.i32[i + 0];
if (input) continue;
if (format == AIMAGE_FORMAT_YUV_420_888 || format == AIMAGE_FORMAT_JPEG) {
DisplayDimension res(entry.data.i32[i + 1], entry.data.i32[i + 2]);
if (!disp.IsSameRatio(res)) continue;
if (format == AIMAGE_FORMAT_YUV_420_888 && foundRes > res) {
foundIt = true;
foundRes = res;
} else if (format == AIMAGE_FORMAT_JPEG && res > maxJPG) {
maxJPG = res;
}
}
}
if (foundIt) {
resView->width = foundRes.org_width();
resView->height = foundRes.org_height();
resCap->width = maxJPG.org_width();
resCap->height = maxJPG.org_height();
} else {
LOGW("Did not find any compatible camera resolution, taking 640x480");
if (disp.IsPortrait()) {
resView->width = 480;
resView->height = 640;
} else {
resView->width = 640;
resView->height = 480;
}
*resCap = *resView;
}
resView->format = AIMAGE_FORMAT_YUV_420_888;
resCap->format = AIMAGE_FORMAT_JPEG;
return foundIt;
}
void NDKCamera::CreateSession(ANativeWindow* previewWindow,
ANativeWindow* jpgWindow, int32_t imageRotation) {//创建捕获摄像头的会话
// Create output from this app's ANativeWindow, and add into output container
requests_[PREVIEW_REQUEST_IDX].outputNativeWindow_ = previewWindow;//
requests_[PREVIEW_REQUEST_IDX].template_ = TEMPLATE_PREVIEW;//模板
requests_[JPG_CAPTURE_REQUEST_IDX].outputNativeWindow_ = jpgWindow;//图像的窗体
requests_[JPG_CAPTURE_REQUEST_IDX].template_ = TEMPLATE_STILL_CAPTURE;//模板
CALL_CONTAINER(create(&outputContainer_));//摄像头输出的容器
for (auto& req : requests_) {
ANativeWindow_acquire(req.outputNativeWindow_)//请求摄像头数据
CALL_OUTPUT(create(req.outputNativeWindow_, &req.sessionOutput_));//输出本地窗口
CALL_CONTAINER(add(outputContainer_, req.sessionOutput_));//摄像头容器
CALL_TARGET(create(req.outputNativeWindow_, &req.target_));//创建本地窗口
CALL_DEV(createCaptureRequest(cameras_[activeCameraId_].device_,
req.template_, &req.request_));//创建摄像头捕获的请求
CALL_REQUEST(addTarget(req.request_, req.target_));//创建摄像头目标的请求
}
// Create a capture session for the given preview request
captureSessionState_ = CaptureSessionState::READY;
CALL_DEV(createCaptureSession(cameras_[activeCameraId_].device_,
outputContainer_, GetSessionListener(),
&captureSession_));//创建摄像头捕获的会话
ACaptureRequest_setEntry_i32(requests_[JPG_CAPTURE_REQUEST_IDX].request_,
ACAMERA_JPEG_ORIENTATION, 1, &imageRotation);//捕获摄像头的会话
/*
* Only preview request is in manual mode, JPG is always in Auto mode
* JPG capture mode could also be switch into manual mode and control
* the capture parameters, this sample leaves JPG capture to be auto mode
* (auto control has better effect than author's manual control)
*/
uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF;
CALL_REQUEST(setEntry_u8(requests_[PREVIEW_REQUEST_IDX].request_,
ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff));//AE模式
CALL_REQUEST(setEntry_i32(requests_[PREVIEW_REQUEST_IDX].request_,
ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity_));//传感器灵敏度
CALL_REQUEST(setEntry_i64(requests_[PREVIEW_REQUEST_IDX].request_,
ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime_));//摄像头曝光时间
}
NDKCamera::~NDKCamera() {
valid_ = false;
// stop session if it is on:
if (captureSessionState_ == CaptureSessionState::ACTIVE) {
ACameraCaptureSession_stopRepeating(captureSession_);//停止摄像头捕获
}
ACameraCaptureSession_close(captureSession_);
for (auto& req : requests_) {
CALL_REQUEST(removeTarget(req.request_, req.target_));//移除摄像头目标
ACaptureRequest_free(req.request_);//释放摄像头捕获的资源
ACameraOutputTarget_free(req.target_);//摄像头目标资源释放
CALL_CONTAINER(remove(outputContainer_, req.sessionOutput_));
ACaptureSessionOutput_free(req.sessionOutput_);//摄像头输出的资源释放
ANativeWindow_release(req.outputNativeWindow_);//本地窗口进行释放
}
requests_.resize(0);
ACaptureSessionOutputContainer_free(outputContainer_);
for (auto& cam : cameras_) {
if (cam.second.device_) {
CALL_DEV(close(cam.second.device_));//关闭摄像头设备
}
}
cameras_.clear();//清除摄像头的数组信息
if (cameraMgr_) {
CALL_MGR(unregisterAvailabilityCallback(cameraMgr_, GetManagerListener()));//移除摄像头的监听器
ACameraManager_delete(cameraMgr_);//移除摄像头管理器
cameraMgr_ = nullptr;
}
}
/**
* EnumerateCamera()
* Loop through cameras on the system, pick up
* 1) back facing one if available
* 2) otherwise pick the first one reported to us
*/
void NDKCamera::EnumerateCamera() {
ACameraIdList* cameraIds = nullptr;
CALL_MGR(getCameraIdList(cameraMgr_, &cameraIds));//获取摄像头的列表
for (int i = 0; i < cameraIds->numCameras; ++i) {
const char* id = cameraIds->cameraIds[i];
ACameraMetadata* metadataObj;
CALL_MGR(getCameraCharacteristics(cameraMgr_, id, &metadataObj));//获取摄像头元数据
int32_t count = 0;
const uint32_t* tags = nullptr;
ACameraMetadata_getAllTags(metadataObj, &count, &tags);
for (int tagIdx = 0; tagIdx < count; ++tagIdx) {
if (ACAMERA_LENS_FACING == tags[tagIdx]) {
ACameraMetadata_const_entry lensInfo = {
0,
};
CALL_METADATA(getConstEntry(metadataObj, tags[tagIdx], &lensInfo));//获取摄像头元数据
CameraId cam(id);
cam.facing_ = static_cast<acamera_metadata_enum_android_lens_facing_t>(
lensInfo.data.u8[0]);
cam.owner_ = false;
cam.device_ = nullptr;
cameras_[cam.id_] = cam;
if (cam.facing_ == ACAMERA_LENS_FACING_BACK) {
activeCameraId_ = cam.id_;
}
break;
}
}
ACameraMetadata_free(metadataObj);//摄像头元数据资源释放
}
ASSERT(cameras_.size(), "No Camera Available on the device");
if (activeCameraId_.length() == 0) {
// if no back facing camera found, pick up the first one to use...
activeCameraId_ = cameras_.begin()->second.id_;
}
ACameraManager_deleteCameraIdList(cameraIds);
}
/**
* GetSensorOrientation()
* Retrieve current sensor orientation regarding to the phone device
* orientation
* SensorOrientation is NOT settable.
*/
bool NDKCamera::GetSensorOrientation(int32_t* facing, int32_t* angle) {
if (!cameraMgr_) {
return false;
}
ACameraMetadata* metadataObj;
ACameraMetadata_const_entry face, orientation;
CALL_MGR(getCameraCharacteristics(cameraMgr_, activeCameraId_.c_str(),
&metadataObj));
CALL_METADATA(getConstEntry(metadataObj, ACAMERA_LENS_FACING, &face));
cameraFacing_ = static_cast<int32_t>(face.data.u8[0]);
CALL_METADATA(
getConstEntry(metadataObj, ACAMERA_SENSOR_ORIENTATION, &orientation));
LOGI("====Current SENSOR_ORIENTATION: %8d", orientation.data.i32[0]);
ACameraMetadata_free(metadataObj);
cameraOrientation_ = orientation.data.i32[0];
if (facing) *facing = cameraFacing_;
if (angle) *angle = cameraOrientation_;
return true;
}
/**
* StartPreview()
* Toggle preview start/stop
*/
void NDKCamera::StartPreview(bool start) {
if (start) {
CALL_SESSION(setRepeatingRequest(captureSession_, nullptr, 1,
&requests_[PREVIEW_REQUEST_IDX].request_,
nullptr));
} else if (!start && captureSessionState_ == CaptureSessionState::ACTIVE) {
ACameraCaptureSession_stopRepeating(captureSession_);//停止重复摄像头预览
} else {
ASSERT(false, "Conflict states(%s, %d)", (start ? "true" : "false"),
static_cast<int>(captureSessionState_));
}
}
/**
* Capture one jpg photo into
* /sdcard/DCIM/Camera
* refer to WriteFile() for details
*/
bool NDKCamera::TakePhoto(void) {
if (captureSessionState_ == CaptureSessionState::ACTIVE) {
ACameraCaptureSession_stopRepeating(captureSession_);
}
CALL_SESSION(capture(captureSession_, GetCaptureCallback(), 1,
&requests_[JPG_CAPTURE_REQUEST_IDX].request_,
&requests_[JPG_CAPTURE_REQUEST_IDX].sessionSequenceId_));//捕获摄像头信息
return true;
}
void NDKCamera::UpdateCameraRequestParameter(int32_t code, int64_t val) {
ACaptureRequest* request = requests_[PREVIEW_REQUEST_IDX].request_;
switch (code) {
case ACAMERA_SENSOR_EXPOSURE_TIME:
if (exposureRange_.Supported()) {
exposureTime_ = val;
CALL_REQUEST(setEntry_i64(request, ACAMERA_SENSOR_EXPOSURE_TIME, 1,
&exposureTime_));//摄像头请求的曝光信息
}
break;
case ACAMERA_SENSOR_SENSITIVITY:
if (sensitivityRange_.Supported()) {
sensitivity_ = val;
CALL_REQUEST(setEntry_i32(request, ACAMERA_SENSOR_SENSITIVITY, 1,
&sensitivity_));//获取传感器的灵敏度
}
break;
default:
ASSERT(false, "==ERROR==: error code for CameraParameterChange: %d",
code);
return;
}
uint8_t aeModeOff = ACAMERA_CONTROL_AE_MODE_OFF;
CALL_REQUEST(setEntry_u8(request, ACAMERA_CONTROL_AE_MODE, 1, &aeModeOff));
CALL_SESSION(
setRepeatingRequest(captureSession_, nullptr, 1, &request,
&requests_[PREVIEW_REQUEST_IDX].sessionSequenceId_));
}
/**
* Retrieve Camera Exposure adjustable range.
*
* @param min Camera minimium exposure time in nanoseconds
* @param max Camera maximum exposure tiem in nanoseconds
*
* @return true min and max are loaded with the camera's exposure values
* false camera has not initialized, no value available
*/
bool NDKCamera::GetExposureRange(int64_t* min, int64_t* max, int64_t* curVal) {
if (!exposureRange_.Supported() || !exposureTime_ || !min || !max ||
!curVal) {//获取摄像头曝光范围
return false;
}
*min = exposureRange_.min_;
*max = exposureRange_.max_;
*curVal = exposureTime_;
return true;
}
/**
* Retrieve Camera sensitivity range.
*
* @param min Camera minimium sensitivity
* @param max Camera maximum sensitivity
*
* @return true min and max are loaded with the camera's sensitivity values
* false camera has not initialized, no value available
*/
bool NDKCamera::GetSensitivityRange(int64_t* min, int64_t* max,
int64_t* curVal) {//获取摄像头的敏感度
if (!sensitivityRange_.Supported() || !sensitivity_ || !min || !max ||
!curVal) {
return false;
}
*min = static_cast<int64_t>(sensitivityRange_.min_);
*max = static_cast<int64_t>(sensitivityRange_.max_);
*curVal = sensitivity_;
return true;
}
3、camera_listeners.cpp
camera/basic/src/main/cpp/camera_listeners.cpp
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <camera/NdkCameraManager.h>
#include <cinttypes>
#include <queue>
#include <thread>
#include <utility>
#include "camera_manager.h"
#include "utils/camera_utils.h"
#include "utils/native_debug.h"
/*
* Camera Manager Listener object
*/
void OnCameraAvailable(void* ctx, const char* id) {
reinterpret_cast<NDKCamera*>(ctx)->OnCameraStatusChanged(id, true);//摄像头状态改变true
}
void OnCameraUnavailable(void* ctx, const char* id) {
reinterpret_cast<NDKCamera*>(ctx)->OnCameraStatusChanged(id, false);//摄像头状态改变false
}
/**
* OnCameraStatusChanged()
* handles Callback from ACameraManager
*/
void NDKCamera::OnCameraStatusChanged(const char* id, bool available) {
if (valid_) {
cameras_[std::string(id)].available_ = available ? true : false;//摄像头数组状态
}
}
/**
* Construct a camera manager listener on the fly and return to caller
*
* @return ACameraManager_AvailabilityCallback
*/
ACameraManager_AvailabilityCallbacks* NDKCamera::GetManagerListener() {
static ACameraManager_AvailabilityCallbacks cameraMgrListener = {
.context = this,//当前的实例
.onCameraAvailable = ::OnCameraAvailable,//回调函数-摄像头可用
.onCameraUnavailable = ::OnCameraUnavailable,回调函数-摄像头不可用
};
return &cameraMgrListener;//获取cameraManagerListener
}
/*
* CameraDevice callbacks
*/
void OnDeviceStateChanges(void* ctx, ACameraDevice* dev) {
reinterpret_cast<NDKCamera*>(ctx)->OnDeviceState(dev);//摄像头状态改变
}
void OnDeviceErrorChanges(void* ctx, ACameraDevice* dev, int err) {
reinterpret_cast<NDKCamera*>(ctx)->OnDeviceError(dev, err);//摄像头错误状态上报
}
ACameraDevice_stateCallbacks* NDKCamera::GetDeviceListener() {
static ACameraDevice_stateCallbacks cameraDeviceListener = {
.context = this,
.onDisconnected = ::OnDeviceStateChanges,
.onError = ::OnDeviceErrorChanges,
};
return &cameraDeviceListener;//获取CameraDeviceListener
}
/**
* Handle Camera DeviceStateChanges msg, notify device is disconnected
* simply close the camera
*/
void NDKCamera::OnDeviceState(ACameraDevice* dev) {//摄像头设备没有连接
std::string id(ACameraDevice_getId(dev));
LOGW("device %s is disconnected", id.c_str());
cameras_[id].available_ = false;
ACameraDevice_close(cameras_[id].device_);//关闭摄像头设备
cameras_.erase(id);
}
/**
* Handles Camera's deviceErrorChanges message, no action;
* mainly debugging purpose
*
*
*/
void NDKCamera::OnDeviceError(ACameraDevice* dev, int err) {
std::string id(ACameraDevice_getId(dev));
LOGI("CameraDevice %s is in error %#x", id.c_str(), err);
PrintCameraDeviceError(err);//打印摄像头设备的错误
CameraId& cam = cameras_[id];
switch (err) {
case ERROR_CAMERA_IN_USE:
cam.available_ = false;
cam.owner_ = false;
break;
case ERROR_CAMERA_SERVICE:
case ERROR_CAMERA_DEVICE:
case ERROR_CAMERA_DISABLED:
case ERROR_MAX_CAMERAS_IN_USE:
cam.available_ = false;
cam.owner_ = false;
break;
default:
LOGI("Unknown Camera Device Error: %#x", err);
}
}
// CaptureSession state callbacks
void OnSessionClosed(void* ctx, ACameraCaptureSession* ses) {
LOGW("session %p closed", ses);
reinterpret_cast<NDKCamera*>(ctx)->OnSessionState(//捕获会话的状态
ses, CaptureSessionState::CLOSED);
}
void OnSessionReady(void* ctx, ACameraCaptureSession* ses) {
LOGW("session %p ready", ses);
reinterpret_cast<NDKCamera*>(ctx)->OnSessionState(ses,
CaptureSessionState::READY);
}
void OnSessionActive(void* ctx, ACameraCaptureSession* ses) {
LOGW("session %p active", ses);
reinterpret_cast<NDKCamera*>(ctx)->OnSessionState(
ses, CaptureSessionState::ACTIVE);
}
ACameraCaptureSession_stateCallbacks* NDKCamera::GetSessionListener() {
static ACameraCaptureSession_stateCallbacks sessionListener = {
.context = this,
.onClosed = ::OnSessionClosed,
.onReady = ::OnSessionReady,
.onActive = ::OnSessionActive,
};
return &sessionListener;//获取CameraCaptureSessionListener
}
/**
* Handles capture session state changes.
* Update into internal session state.
*/
void NDKCamera::OnSessionState(ACameraCaptureSession* ses,
CaptureSessionState state) {
if (!ses || ses != captureSession_) {
LOGW("CaptureSession is %s", (ses ? "NOT our session" : "NULL"));
return;
}
ASSERT(state < CaptureSessionState::MAX_STATE, "Wrong state %d",
static_cast<int>(state));
captureSessionState_ = state;
}
// Capture callbacks, mostly information purpose
void SessionCaptureCallback_OnFailed(void* context,
ACameraCaptureSession* session,
ACaptureRequest* request,
ACameraCaptureFailure* failure) {
std::thread captureFailedThread(&NDKCamera::OnCaptureFailed,
static_cast<NDKCamera*>(context), session,
request, failure);
captureFailedThread.detach();//退出线程
}
void SessionCaptureCallback_OnSequenceEnd(void* context,
ACameraCaptureSession* session,
int sequenceId, int64_t frameNumber) {
std::thread sequenceThread(&NDKCamera::OnCaptureSequenceEnd,
static_cast<NDKCamera*>(context), session,
sequenceId, frameNumber);
sequenceThread.detach();//退出线程
}
void SessionCaptureCallback_OnSequenceAborted(void* context,
ACameraCaptureSession* session,
int sequenceId) {
std::thread sequenceThread(&NDKCamera::OnCaptureSequenceEnd,
static_cast<NDKCamera*>(context), session,
sequenceId, static_cast<int64_t>(-1));
sequenceThread.detach();//退出线程
}
ACameraCaptureSession_captureCallbacks* NDKCamera::GetCaptureCallback() {
static ACameraCaptureSession_captureCallbacks captureListener{
.context = this,
.onCaptureStarted = nullptr,
.onCaptureProgressed = nullptr,
.onCaptureCompleted = nullptr,
.onCaptureFailed = SessionCaptureCallback_OnFailed,//捕获失败的会话
.onCaptureSequenceCompleted = SessionCaptureCallback_OnSequenceEnd,//队列的回调
.onCaptureSequenceAborted = SessionCaptureCallback_OnSequenceAborted,//队列终止的回调
.onCaptureBufferLost = nullptr,
};
return &captureListener;
}
/**
* Process JPG capture SessionCaptureCallback_OnFailed event
* If this is current JPG capture session, simply resume preview
* @param session the capture session that failed
* @param request the capture request that failed
* @param failure for additional fail info.
*/
void NDKCamera::OnCaptureFailed(ACameraCaptureSession* session,
ACaptureRequest* request,
ACameraCaptureFailure* failure) {
if (valid_ && request == requests_[JPG_CAPTURE_REQUEST_IDX].request_) {
ASSERT(failure->sequenceId ==
requests_[JPG_CAPTURE_REQUEST_IDX].sessionSequenceId_,
"Error jpg sequence id")
StartPreview(true);//开始预览
}
}
/**
* Process event from JPEG capture
* SessionCaptureCallback_OnSequenceEnd()
* SessionCaptureCallback_OnSequenceAborted()
*
* If this is jpg capture, turn back on preview after a catpure.
*/
void NDKCamera::OnCaptureSequenceEnd(ACameraCaptureSession* session,
int sequenceId, int64_t frameNumber) {
if (sequenceId != requests_[JPG_CAPTURE_REQUEST_IDX].sessionSequenceId_)
return;
// resume preview
CALL_SESSION(setRepeatingRequest(captureSession_, nullptr, 1,
&requests_[PREVIEW_REQUEST_IDX].request_,
nullptr));//停止重复捕获的请求
}
4、camera_engine.h
camera/basic/src/main/cpp/camera_engine.h
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __CAMERA_ENGINE_H__
#define __CAMERA_ENGINE_H__
#include <android/native_window.h>
#include <android_native_app_glue.h>
#include <functional>
#include <thread>
#include "camera_manager.h"
/**
* basic CameraAppEngine
*/
class CameraEngine {
public:
explicit CameraEngine(android_app* app);
~CameraEngine();
// Interfaces to android application framework
struct android_app* AndroidApp(void) const;//定义一个android_app*的结构体
void OnAppInitWindow(void);//Application 初始化窗体的函数
void DrawFrame(void);//绘制图像帧
void OnAppConfigChange(void);//Application的配置信息变化
void OnAppTermWindow(void);//Application终止窗体
// Native Window handlers
int32_t GetSavedNativeWinWidth(void);//窗体的宽度
int32_t GetSavedNativeWinHeight(void);//窗体的高度
int32_t GetSavedNativeWinFormat(void);//窗体的格式
void SaveNativeWinRes(int32_t w, int32_t h, int32_t format);//窗体的资源信息
// UI handlers
void RequestCameraPermission();//请求摄像头的权限信息
void OnCameraPermission(jboolean granted);//摄像头的权限授权
void EnableUI(void);//激活UI
void OnTakePhoto(void);//拍照
void OnCameraParameterChanged(int32_t code, int64_t val);//摄像头参数改变
// Manage NDKCamera Object
void CreateCamera(void);//创建摄像头的实例
void DeleteCamera(void);//删除摄像头的实例
private:
void OnPhotoTaken(const char* fileName);//拍照图片存储
int GetDisplayRotation(void);//获取显示的方向
struct android_app* app_;
ImageFormat savedNativeWinRes_;
bool cameraGranted_;//摄像头是否授权
int rotation_;//旋转方向
volatile bool cameraReady_;//摄像头是否准备好
NDKCamera* camera_;//摄像头实例
ImageReader* yuvReader_;//yuv格式的图像
ImageReader* jpgReader_;//jpg格式的图像
};
/**
* retrieve global singleton CameraEngine instance
* @return the only instance of CameraEngine in the app
*/
CameraEngine* GetAppEngine(void);//获取CameraEngine的单实例
#endif // __CAMERA_ENGINE_H__
5、camera_engine.cpp
camera/basic/src/main/cpp/camera_engine.cpp
/**
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Description
* Demonstrate NDK Camera interface added to android-24
*/
#include "camera_engine.h"
#include <cstdio>
#include "utils/native_debug.h"
/**
* constructor and destructor for main application class
* @param app native_app_glue environment
* @return none
*/
CameraEngine::CameraEngine(android_app* app)//构建函数
: app_(app),
cameraGranted_(false),
rotation_(0),
cameraReady_(false),
camera_(nullptr),
yuvReader_(nullptr),
jpgReader_(nullptr) {
memset(&savedNativeWinRes_, 0, sizeof(savedNativeWinRes_));//重新赋值savedNativeWinRes
}
CameraEngine::~CameraEngine() {//析构函数
cameraReady_ = false;
DeleteCamera();//删除摄像头设备
}
struct android_app* CameraEngine::AndroidApp(void) const { return app_; }//结构体android_app
/**
* Create a camera object for onboard BACK_FACING camera
*/
void CameraEngine::CreateCamera(void) {//创建摄像头设备
// Camera needed to be requested at the run-time from Java SDK
// if Not granted, do nothing.
if (!cameraGranted_ || !app_->window) {
LOGW("Camera Sample requires Full Camera access");
return;
}
int32_t displayRotation = GetDisplayRotation();//显示的旋转
rotation_ = displayRotation;
camera_ = new NDKCamera();//新建摄像头设备
ASSERT(camera_, "Failed to Create CameraObject");
int32_t facing = 0, angle = 0, imageRotation = 0;
if (camera_->GetSensorOrientation(&facing, &angle)) {
if (facing == ACAMERA_LENS_FACING_FRONT) {
imageRotation = (angle + rotation_) % 360;
imageRotation = (360 - imageRotation) % 360;
} else {
imageRotation = (angle - rotation_ + 360) % 360;
}
}
LOGI("Phone Rotation: %d, Present Rotation Angle: %d", rotation_,
imageRotation);
ImageFormat view{0, 0, 0}, capture{0, 0, 0};
camera_->MatchCaptureSizeRequest(app_->window, &view, &capture);//匹配捕获窗口大小
ASSERT(view.width && view.height, "Could not find supportable resolution");
// Request the necessary nativeWindow to OS
bool portraitNativeWindow =
(savedNativeWinRes_.width < savedNativeWinRes_.height);
ANativeWindow_setBuffersGeometry(
app_->window, portraitNativeWindow ? view.height : view.width,
portraitNativeWindow ? view.width : view.height, WINDOW_FORMAT_RGBA_8888);
yuvReader_ = new ImageReader(&view, AIMAGE_FORMAT_YUV_420_888);//yuv
yuvReader_->SetPresentRotation(imageRotation);//设定旋转
jpgReader_ = new ImageReader(&capture, AIMAGE_FORMAT_JPEG);//jpg
jpgReader_->SetPresentRotation(imageRotation);//设定旋转
jpgReader_->RegisterCallback(//注册回调函数
this, [this](void* ctx, const char* str) -> void {
reinterpret_cast<CameraEngine*>(ctx)->OnPhotoTaken(str);
});
// now we could create session
camera_->CreateSession(yuvReader_->GetNativeWindow(),
jpgReader_->GetNativeWindow(), imageRotation);
}
void CameraEngine::DeleteCamera(void) {
cameraReady_ = false;
if (camera_) {
delete camera_;
camera_ = nullptr;
}
if (yuvReader_) {
delete yuvReader_;
yuvReader_ = nullptr;//赋值空指针
}
if (jpgReader_) {
delete jpgReader_;
jpgReader_ = nullptr;
}
}
/**
* Initiate a Camera Run-time usage request to Java side implementation
* [ The request result will be passed back in function
* notifyCameraPermission()]
*/
void CameraEngine::RequestCameraPermission() {
if (!app_) return;
JNIEnv* env;
ANativeActivity* activity = app_->activity;
activity->vm->GetEnv((void**)&env, JNI_VERSION_1_6);
activity->vm->AttachCurrentThread(&env, NULL);
jobject activityObj = env->NewGlobalRef(activity->clazz);
jclass clz = env->GetObjectClass(activityObj);
env->CallVoidMethod(activityObj,
env->GetMethodID(clz, "RequestCamera", "()V"));//请求摄像头权限
env->DeleteGlobalRef(activityObj);
activity->vm->DetachCurrentThread();
}
/**
* Process to user's sensitivity and exposure value change
* all values are represented in int64_t even exposure is just int32_t
* @param code ACAMERA_SENSOR_EXPOSURE_TIME or ACAMERA_SENSOR_SENSITIVITY
* @param val corresponding value from user
*/
void CameraEngine::OnCameraParameterChanged(int32_t code, int64_t val) {
camera_->UpdateCameraRequestParameter(code, val);//更新摄像头设备的灵敏度和曝光时间
}
/**
* The main function rendering a frame. In our case, it is yuv to RGBA8888
* converter
*/图片格式从YUV到RGBA8888
void CameraEngine::DrawFrame(void) {
if (!cameraReady_ || !yuvReader_) return;
AImage* image = yuvReader_->GetNextImage();
if (!image) {
return;
}
ANativeWindow_acquire(app_->window);
ANativeWindow_Buffer buf;
if (ANativeWindow_lock(app_->window, &buf, nullptr) < 0) {
yuvReader_->DeleteImage(image);
return;
}
yuvReader_->DisplayImage(&buf, image);
ANativeWindow_unlockAndPost(app_->window);
ANativeWindow_release(app_->window);
}
6、image_reader.h
camera/basic/src/main/cpp/image_reader.h
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CAMERA_IMAGE_READER_H
#define CAMERA_IMAGE_READER_H
#include <media/NdkImageReader.h>
#include <functional>
/*
* ImageFormat:
* A Data Structure to communicate resolution between camera and ImageReader
*/
struct ImageFormat {
int32_t width;
int32_t height;
int32_t format; // Through out this demo, the format is fixed to
// YUV_420 format
};
class ImageReader {
public:
/**
* Ctor and Dtor()
*/
explicit ImageReader(ImageFormat* res, enum AIMAGE_FORMATS format);
~ImageReader();
/**
* Report cached ANativeWindow, which was used to create camera's capture
* session output.
*/
ANativeWindow* GetNativeWindow(void);
/**
* Retrieve Image on the top of Reader's queue
*/
AImage* GetNextImage(void);
/**
* Retrieve Image on the back of Reader's queue, dropping older images
*/
AImage* GetLatestImage(void);
/**
* Delete Image
* @param image {@link AImage} instance to be deleted
*/
void DeleteImage(AImage* image);
/**
* AImageReader callback handler. Called by AImageReader when a frame is
* captured
* (Internal function, not to be called by clients)
*/
void ImageCallback(AImageReader* reader);
/**
* DisplayImage()
* Present camera image to the given display buffer. Avaliable image is
* converted
* to display buffer format. Supported display format:
* WINDOW_FORMAT_RGBX_8888
* WINDOW_FORMAT_RGBA_8888
* @param buf {@link ANativeWindow_Buffer} for image to display to.
* @param image a {@link AImage} instance, source of image conversion.
* it will be deleted via {@link AImage_delete}
* @return true on success, false on failure
*/
bool DisplayImage(ANativeWindow_Buffer* buf, AImage* image);
/**
* Configure the rotation angle necessary to apply to
* Camera image when presenting: all rotations should be accumulated:
* CameraSensorOrientation + Android Device Native Orientation +
* Human Rotation (rotated degree related to Phone native orientation
*/
void SetPresentRotation(int32_t angle);
/**
* regsiter a callback function for client to be notified that jpeg already
* written out.
* @param ctx is client context when callback is invoked
* @param callback is the actual callback function
*/
void RegisterCallback(void* ctx,
std::function<void(void* ctx, const char* fileName)>);
private:
int32_t presentRotation_;
AImageReader* reader_;
std::function<void(void* ctx, const char* fileName)> callback_;
void* callbackCtx_;
void PresentImage(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage90(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage180(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage270(ANativeWindow_Buffer* buf, AImage* image);
void WriteFile(AImage* image);
};
#endif // CAMERA_IMAGE_READER_H
7、image_reader.cpp
camera/basic/src/main/cpp/image_reader.cpp
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "image_reader.h"
#include <dirent.h>
#include <cstdlib>
#include <ctime>
#include <functional>
#include <string>
#include <thread>
#include "utils/native_debug.h"
/*
* For JPEG capture, captured files are saved under
* DirName
* File names are incrementally appended an index number as
* capture0.jpg, capture1.jpg, capture2.jpg
*/
static const char *kDirName = "/sdcard/DCIM/Camera/";
static const char *kFileName = "capture";
/**
* MAX_BUF_COUNT:
* Max buffers in this ImageReader.
*/
#define MAX_BUF_COUNT 4
/**
* ImageReader listener: called by AImageReader for every frame captured
* We pass the event to ImageReader class, so it could do some housekeeping
* about
* the loaded queue. For example, we could keep a counter to track how many
* buffers are full and idle in the queue. If camera almost has no buffer to
* capture
* we could release ( skip ) some frames by AImageReader_getNextImage() and
* AImageReader_delete().
*/
void OnImageCallback(void *ctx, AImageReader *reader) {
reinterpret_cast<ImageReader *>(ctx)->ImageCallback(reader);
}
/**
* Constructor
*/
ImageReader::ImageReader(ImageFormat *res, enum AIMAGE_FORMATS format)
: presentRotation_(0), reader_(nullptr) {
callback_ = nullptr;
callbackCtx_ = nullptr;
media_status_t status = AImageReader_new(res->width, res->height, format,
MAX_BUF_COUNT, &reader_);
ASSERT(reader_ && status == AMEDIA_OK, "Failed to create AImageReader");
AImageReader_ImageListener listener{
.context = this,
.onImageAvailable = OnImageCallback,
};
AImageReader_setImageListener(reader_, &listener);
}
ImageReader::~ImageReader() {
ASSERT(reader_, "NULL Pointer to %s", __FUNCTION__);
AImageReader_delete(reader_);
}
void ImageReader::RegisterCallback(
void *ctx, std::function<void(void *ctx, const char *fileName)> func) {
callbackCtx_ = ctx;
callback_ = func;
}
void ImageReader::ImageCallback(AImageReader *reader) {
int32_t format;
media_status_t status = AImageReader_getFormat(reader, &format);
ASSERT(status == AMEDIA_OK, "Failed to get the media format");
if (format == AIMAGE_FORMAT_JPEG) {
AImage *image = nullptr;
media_status_t status = AImageReader_acquireNextImage(reader, &image);
ASSERT(status == AMEDIA_OK && image, "Image is not available");
// Create a thread and write out the jpeg files
std::thread writeFileHandler(&ImageReader::WriteFile, this, image);
writeFileHandler.detach();
}
}
ANativeWindow *ImageReader::GetNativeWindow(void) {
if (!reader_) return nullptr;
ANativeWindow *nativeWindow;
media_status_t status = AImageReader_getWindow(reader_, &nativeWindow);
ASSERT(status == AMEDIA_OK, "Could not get ANativeWindow");
return nativeWindow;
}
/**
* GetNextImage()
* Retrieve the next image in ImageReader's bufferQueue, NOT the last image so
* no image is skipped. Recommended for batch/background processing.
*/
AImage *ImageReader::GetNextImage(void) {
AImage *image;
media_status_t status = AImageReader_acquireNextImage(reader_, &image);
if (status != AMEDIA_OK) {
return nullptr;
}
return image;
}
/**
* GetLatestImage()
* Retrieve the last image in ImageReader's bufferQueue, deleting images in
* in front of it on the queue. Recommended for real-time processing.
*/
AImage *ImageReader::GetLatestImage(void) {
AImage *image;
media_status_t status = AImageReader_acquireLatestImage(reader_, &image);
if (status != AMEDIA_OK) {
return nullptr;
}
return image;
}
/**
* Delete Image
* @param image {@link AImage} instance to be deleted
*/
void ImageReader::DeleteImage(AImage *image) {
if (image) AImage_delete(image);
}
/**
* Helper function for YUV_420 to RGB conversion. Courtesy of Tensorflow
* ImageClassifier Sample:
* https://github.com/tensorflow/tensorflow/blob/master/tensorflow/examples/android/jni/yuv2rgb.cc
* The difference is that here we have to swap UV plane when calling it.
*/
#ifndef MAX
#define MAX(a, b) \
({ \
__typeof__(a) _a = (a); \
__typeof__(b) _b = (b); \
_a > _b ? _a : _b; \
})
#define MIN(a, b) \
({ \
__typeof__(a) _a = (a); \
__typeof__(b) _b = (b); \
_a < _b ? _a : _b; \
})
#endif
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their
// ranges
// are normalized to eight bits.
static const int kMaxChannelValue = 262143;
static inline uint32_t YUV2RGB(int nY, int nU, int nV) {
nY -= 16;
nU -= 128;
nV -= 128;
if (nY < 0) nY = 0;
// This is the floating point equivalent. We do the conversion in integer
// because some Android devices do not have floating point in hardware.
// nR = (int)(1.164 * nY + 1.596 * nV);
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
// nB = (int)(1.164 * nY + 2.018 * nU);
int nR = (int)(1192 * nY + 1634 * nV);
int nG = (int)(1192 * nY - 833 * nV - 400 * nU);
int nB = (int)(1192 * nY + 2066 * nU);
nR = MIN(kMaxChannelValue, MAX(0, nR));
nG = MIN(kMaxChannelValue, MAX(0, nG));
nB = MIN(kMaxChannelValue, MAX(0, nB));
nR = (nR >> 10) & 0xff;
nG = (nG >> 10) & 0xff;
nB = (nB >> 10) & 0xff;
return 0xff000000 | (nB << 16) | (nG << 8) | nR;
}
/**
* Convert yuv image inside AImage into ANativeWindow_Buffer
* ANativeWindow_Buffer format is guaranteed to be
* WINDOW_FORMAT_RGBX_8888
* WINDOW_FORMAT_RGBA_8888
* @param buf a {@link ANativeWindow_Buffer } instance, destination of
* image conversion
* @param image a {@link AImage} instance, source of image conversion.
* it will be deleted via {@link AImage_delete}
*/
bool ImageReader::DisplayImage(ANativeWindow_Buffer *buf, AImage *image) {
ASSERT(buf->format == WINDOW_FORMAT_RGBX_8888 ||
buf->format == WINDOW_FORMAT_RGBA_8888,
"Not supported buffer format");
int32_t srcFormat = -1;
AImage_getFormat(image, &srcFormat);
ASSERT(AIMAGE_FORMAT_YUV_420_888 == srcFormat, "Failed to get format");
int32_t srcPlanes = 0;
AImage_getNumberOfPlanes(image, &srcPlanes);
ASSERT(srcPlanes == 3, "Is not 3 planes");
switch (presentRotation_) {
case 0:
PresentImage(buf, image);
break;
case 90:
PresentImage90(buf, image);
break;
case 180:
PresentImage180(buf, image);
break;
case 270:
PresentImage270(buf, image);
break;
default:
ASSERT(0, "NOT recognized display rotation: %d", presentRotation_);
}
AImage_delete(image);
return true;
}
/*
* PresentImage()
* Converting yuv to RGB
* No rotation: (x,y) --> (x, y)
* Refer to:
* https://mathbits.com/MathBits/TISection/Geometry/Transformations2.htm
*/
void ImageReader::PresentImage(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
int32_t yStride, uvStride;
uint8_t *yPixel, *uPixel, *vPixel;
int32_t yLen, uLen, vLen;
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
AImage_getPlaneData(image, 0, &yPixel, &yLen);
AImage_getPlaneData(image, 1, &uPixel, &uLen);
AImage_getPlaneData(image, 2, &vPixel, &vLen);
int32_t uvPixelStride;
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = MIN(buf->height, (srcRect.bottom - srcRect.top));
int32_t width = MIN(buf->width, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
out[x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
}
out += buf->stride;
}
}
/*
* PresentImage90()
* Converting YUV to RGB
* Rotation image anti-clockwise 90 degree -- (x, y) --> (-y, x)
*/
void ImageReader::PresentImage90(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
int32_t yStride, uvStride;
uint8_t *yPixel, *uPixel, *vPixel;
int32_t yLen, uLen, vLen;
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
AImage_getPlaneData(image, 0, &yPixel, &yLen);
AImage_getPlaneData(image, 1, &uPixel, &uLen);
AImage_getPlaneData(image, 2, &vPixel, &vLen);
int32_t uvPixelStride;
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = MIN(buf->width, (srcRect.bottom - srcRect.top));
int32_t width = MIN(buf->height, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
out += height - 1;
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
// [x, y]--> [-y, x]
out[x * buf->stride] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
}
out -= 1; // move to the next column
}
}
/*
* PresentImage180()
* Converting yuv to RGB
* Rotate image 180 degree: (x, y) --> (-x, -y)
*/
void ImageReader::PresentImage180(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
int32_t yStride, uvStride;
uint8_t *yPixel, *uPixel, *vPixel;
int32_t yLen, uLen, vLen;
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
AImage_getPlaneData(image, 0, &yPixel, &yLen);
AImage_getPlaneData(image, 1, &uPixel, &uLen);
AImage_getPlaneData(image, 2, &vPixel, &vLen);
int32_t uvPixelStride;
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = MIN(buf->height, (srcRect.bottom - srcRect.top));
int32_t width = MIN(buf->width, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
out += (height - 1) * buf->stride;
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
// mirror image since we are using front camera
out[width - 1 - x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
// out[x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
}
out -= buf->stride;
}
}
/*
* PresentImage270()
* Converting image from YUV to RGB
* Rotate Image counter-clockwise 270 degree: (x, y) --> (y, x)
*/
void ImageReader::PresentImage270(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
int32_t yStride, uvStride;
uint8_t *yPixel, *uPixel, *vPixel;
int32_t yLen, uLen, vLen;
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
AImage_getPlaneData(image, 0, &yPixel, &yLen);
AImage_getPlaneData(image, 1, &uPixel, &uLen);
AImage_getPlaneData(image, 2, &vPixel, &vLen);
int32_t uvPixelStride;
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = MIN(buf->width, (srcRect.bottom - srcRect.top));
int32_t width = MIN(buf->height, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
out[(width - 1 - x) * buf->stride] =
YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
}
out += 1; // move to the next column
}
}
void ImageReader::SetPresentRotation(int32_t angle) {
presentRotation_ = angle;
}
/**
* Write out jpeg files to kDirName directory
* @param image point capture jpg image
*/
void ImageReader::WriteFile(AImage *image) {
int planeCount;
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
ASSERT(status == AMEDIA_OK && planeCount == 1,
"Error: getNumberOfPlanes() planeCount = %d", planeCount);
uint8_t *data = nullptr;
int len = 0;
AImage_getPlaneData(image, 0, &data, &len);
DIR *dir = opendir(kDirName);
if (dir) {
closedir(dir);
} else {
std::string cmd = "mkdir -p ";
cmd += kDirName;
system(cmd.c_str());
}
struct timespec ts {
0, 0
};
clock_gettime(CLOCK_REALTIME, &ts);
struct tm localTime;
localtime_r(&ts.tv_sec, &localTime);
std::string fileName = kDirName;
std::string dash("-");
fileName += kFileName + std::to_string(localTime.tm_mon) +
std::to_string(localTime.tm_mday) + dash +
std::to_string(localTime.tm_hour) +
std::to_string(localTime.tm_min) +
std::to_string(localTime.tm_sec) + ".jpg";
FILE *file = fopen(fileName.c_str(), "wb");
if (file && data && len) {
fwrite(data, 1, len, file);
fclose(file);
if (callback_) {
callback_(callbackCtx_, fileName.c_str());
}
} else {
if (file) fclose(file);
}
AImage_delete(image);
}
8、camera_ui.cpp
camera/basic/src/main/cpp/camera_ui.cpp
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <utils/native_debug.h>
#include "camera_engine.h"
/**
* Retrieve current rotation from Java side
*
* @return current rotation angle
*/
int CameraEngine::GetDisplayRotation() {
ASSERT(app_, "Application is not initialized");
JNIEnv *env;
ANativeActivity *activity = app_->activity;
activity->vm->GetEnv((void **)&env, JNI_VERSION_1_6);
activity->vm->AttachCurrentThread(&env, NULL);
jobject activityObj = env->NewGlobalRef(activity->clazz);
jclass clz = env->GetObjectClass(activityObj);
jint newOrientation = env->CallIntMethod(
activityObj, env->GetMethodID(clz, "getRotationDegree", "()I"));
env->DeleteGlobalRef(activityObj);//获取旋转的角度
activity->vm->DetachCurrentThread();
return newOrientation;
}
/**
* Initializate UI on Java side. The 2 seekBars' values are passed in
* array in the tuple of ( min, max, curVal )
* 0: exposure min
* 1: exposure max
* 2: exposure val
* 3: sensitivity min
* 4: sensitivity max
* 5: sensitivity val
*/
const int kInitDataLen = 6;
void CameraEngine::EnableUI(void) {
JNIEnv *jni;
app_->activity->vm->AttachCurrentThread(&jni, NULL);
int64_t range[3];
// Default class retrieval
jclass clazz = jni->GetObjectClass(app_->activity->clazz);
jmethodID methodID = jni->GetMethodID(clazz, "EnableUI", "([J)V");
jlongArray initData = jni->NewLongArray(kInitDataLen);
ASSERT(initData && methodID, "JavaUI interface Object failed(%p, %p)",
methodID, initData);
if (!camera_->GetExposureRange(&range[0], &range[1], &range[2])) {
memset(range, 0, sizeof(int64_t) * 3);
}
jni->SetLongArrayRegion(initData, 0, 3, range);
if (!camera_->GetSensitivityRange(&range[0], &range[1], &range[2])) {
memset(range, 0, sizeof(int64_t) * 3);
}
jni->SetLongArrayRegion(initData, 3, 3, range);
jni->CallVoidMethod(app_->activity->clazz, methodID, initData);
app_->activity->vm->DetachCurrentThread();
}
/**
* Handles UI request to take a photo into
* /sdcard/DCIM/Camera
*/
void CameraEngine::OnTakePhoto() {
if (camera_) {
camera_->TakePhoto();
}
}
void CameraEngine::OnPhotoTaken(const char *fileName) {
JNIEnv *jni;
app_->activity->vm->AttachCurrentThread(&jni, NULL);
// Default class retrieval
jclass clazz = jni->GetObjectClass(app_->activity->clazz);
jmethodID methodID =
jni->GetMethodID(clazz, "OnPhotoTaken", "(Ljava/lang/String;)V");
jstring javaName = jni->NewStringUTF(fileName);
jni->CallVoidMethod(app_->activity->clazz, methodID, javaName);
app_->activity->vm->DetachCurrentThread();
}
/**
* Process user camera and disk writing permission
* Resume application initialization after user granted camera and disk usage
* If user denied permission, do nothing: no camera
*
* @param granted user's authorization for camera and disk usage.
* @return none
*/
void CameraEngine::OnCameraPermission(jboolean granted) {
cameraGranted_ = (granted != JNI_FALSE);
// TODO: Fail gracefully.
ASSERT(cameraGranted_, "required app permissions were not granted");
OnAppInitWindow();
}
/**
* A couple UI handles ( from UI )
* user camera and disk permission
* exposure and sensitivity SeekBars
* takePhoto button
*/
extern "C" JNIEXPORT void JNICALL
Java_com_sample_camera_basic_CameraActivity_notifyCameraPermission(
JNIEnv *env, jclass type, jboolean permission) {
std::thread permissionHandler(&CameraEngine::OnCameraPermission,
GetAppEngine(), permission);
permissionHandler.detach();
}
extern "C" JNIEXPORT void JNICALL
Java_com_sample_camera_basic_CameraActivity_TakePhoto(JNIEnv *env,
jclass type) {
std::thread takePhotoHandler(&CameraEngine::OnTakePhoto, GetAppEngine());
takePhotoHandler.detach();
}
extern "C" JNIEXPORT void JNICALL
Java_com_sample_camera_basic_CameraActivity_OnExposureChanged(
JNIEnv *env, jobject instance, jlong exposurePercent) {
GetAppEngine()->OnCameraParameterChanged(ACAMERA_SENSOR_EXPOSURE_TIME,
exposurePercent);
}
extern "C" JNIEXPORT void JNICALL
Java_com_sample_camera_basic_CameraActivity_OnSensitivityChanged(
JNIEnv *env, jobject instance, jlong sensitivity) {
GetAppEngine()->OnCameraParameterChanged(ACAMERA_SENSOR_SENSITIVITY,
sensitivity);
}
9、android_main.cpp
camera/basic/src/main/cpp/android_main.cpp
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "camera_engine.h"
#include "utils/native_debug.h"
/*
* SampleEngine global object
*/
static CameraEngine* pEngineObj = nullptr;
CameraEngine* GetAppEngine(void) {
ASSERT(pEngineObj, "AppEngine has not initialized");
return pEngineObj;
}
/**
* Teamplate function for NativeActivity derived applications
* Create/Delete camera object with
* INIT_WINDOW/TERM_WINDOW command, ignoring other event.
*/
static void ProcessAndroidCmd(struct android_app* app, int32_t cmd) {
CameraEngine* engine = reinterpret_cast<CameraEngine*>(app->userData);
switch (cmd) {
case APP_CMD_INIT_WINDOW:
if (engine->AndroidApp()->window != NULL) {
engine->SaveNativeWinRes(ANativeWindow_getWidth(app->window),
ANativeWindow_getHeight(app->window),
ANativeWindow_getFormat(app->window));
engine->OnAppInitWindow();
}
break;
case APP_CMD_TERM_WINDOW:
engine->OnAppTermWindow();
ANativeWindow_setBuffersGeometry(
app->window, engine->GetSavedNativeWinWidth(),
engine->GetSavedNativeWinHeight(), engine->GetSavedNativeWinFormat());
break;
case APP_CMD_CONFIG_CHANGED:
engine->OnAppConfigChange();
break;
case APP_CMD_LOST_FOCUS:
break;
}
}
extern "C" void android_main(struct android_app* state) {
CameraEngine engine(state);
pEngineObj = &engine;
state->userData = reinterpret_cast<void*>(&engine);
state->onAppCmd = ProcessAndroidCmd;
// loop waiting for stuff to do.
while (!state->destroyRequested) {
struct android_poll_source* source = nullptr;
auto result = ALooper_pollOnce(0, NULL, nullptr, (void**)&source);
ASSERT(result != ALOOPER_POLL_ERROR, "ALooper_pollOnce returned an error");
if (source != NULL) {
source->process(state, source);
}
pEngineObj->DrawFrame();
}
LOGI("CameraEngine thread destroy requested!");
engine.DeleteCamera();
pEngineObj = nullptr;
}
/**
* Handle Android System APP_CMD_INIT_WINDOW message
* Request camera persmission from Java side
* Create camera object if camera has been granted
*/
void CameraEngine::OnAppInitWindow(void) {
if (!cameraGranted_) {
// Not permitted to use camera yet, ask(again) and defer other events
RequestCameraPermission();
return;
}
rotation_ = GetDisplayRotation();
CreateCamera();
ASSERT(camera_, "CameraCreation Failed");
EnableUI();
// NativeActivity end is ready to display, start pulling images
cameraReady_ = true;
camera_->StartPreview(true);
}
/**
* Handle APP_CMD_TEMR_WINDOW
*/
void CameraEngine::OnAppTermWindow(void) {
cameraReady_ = false;
DeleteCamera();
}
/**
* Handle APP_CMD_CONFIG_CHANGED
*/
void CameraEngine::OnAppConfigChange(void) {
int newRotation = GetDisplayRotation();
if (newRotation != rotation_) {
OnAppTermWindow();
rotation_ = newRotation;
OnAppInitWindow();
}
}
/**
* Retrieve saved native window width.
* @return width of native window
*/
int32_t CameraEngine::GetSavedNativeWinWidth(void) {
return savedNativeWinRes_.width;
}
/**
* Retrieve saved native window height.
* @return height of native window
*/
int32_t CameraEngine::GetSavedNativeWinHeight(void) {
return savedNativeWinRes_.height;
}
/**
* Retrieve saved native window format
* @return format of native window
*/
int32_t CameraEngine::GetSavedNativeWinFormat(void) {
return savedNativeWinRes_.format;
}
/**
* Save original NativeWindow Resolution
* @param w width of native window in pixel
* @param h height of native window in pixel
* @param format
*/
void CameraEngine::SaveNativeWinRes(int32_t w, int32_t h, int32_t format) {
savedNativeWinRes_.width = w;
savedNativeWinRes_.height = h;
savedNativeWinRes_.format = format;
}
10、camera_utils.cpp
camera/common/utils/camera_utils.cpp
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "camera_utils.h"
#include <camera/NdkCameraManager.h>
#include <media/NdkImage.h>
#include <cinttypes>
#include <string>
#include <typeinfo>
#include <utility>
#include <vector>
#include "utils/native_debug.h"
#define UKNOWN_TAG "UNKNOW_TAG"
#define MAKE_PAIR(val) std::make_pair(val, #val)
template <typename T>
const char* GetPairStr(T key, std::vector<std::pair<T, const char*>>& store) {
typedef typename std::vector<std::pair<T, const char*>>::iterator iterator;
for (iterator it = store.begin(); it != store.end(); ++it) {
if (it->first == key) {
return it->second;
}
}
LOGW("(%#08x) : UNKNOWN_TAG for %s", key, typeid(store[0].first).name());
return UKNOWN_TAG;
}
/*
* camera_status_t error translation
*/
using ERROR_PAIR = std::pair<camera_status_t, const char*>;
static std::vector<ERROR_PAIR> errorInfo{
MAKE_PAIR(ACAMERA_OK),
MAKE_PAIR(ACAMERA_ERROR_UNKNOWN),
MAKE_PAIR(ACAMERA_ERROR_INVALID_PARAMETER),
MAKE_PAIR(ACAMERA_ERROR_CAMERA_DISCONNECTED),
MAKE_PAIR(ACAMERA_ERROR_NOT_ENOUGH_MEMORY),
MAKE_PAIR(ACAMERA_ERROR_METADATA_NOT_FOUND),
MAKE_PAIR(ACAMERA_ERROR_CAMERA_DEVICE),
MAKE_PAIR(ACAMERA_ERROR_CAMERA_SERVICE),
MAKE_PAIR(ACAMERA_ERROR_SESSION_CLOSED),
MAKE_PAIR(ACAMERA_ERROR_INVALID_OPERATION),
MAKE_PAIR(ACAMERA_ERROR_STREAM_CONFIGURE_FAIL),
MAKE_PAIR(ACAMERA_ERROR_CAMERA_IN_USE),
MAKE_PAIR(ACAMERA_ERROR_MAX_CAMERA_IN_USE),
MAKE_PAIR(ACAMERA_ERROR_CAMERA_DISABLED),
MAKE_PAIR(ACAMERA_ERROR_PERMISSION_DENIED),
};
const char* GetErrorStr(camera_status_t err) {
return GetPairStr<camera_status_t>(err, errorInfo);
}
/*
* camera_metadata_tag_t translation. Useful to look at available tags
* on the underneath platform
*/
using TAG_PAIR = std::pair<acamera_metadata_tag_t, const char*>;
static std::vector<TAG_PAIR> tagInfo{
MAKE_PAIR(ACAMERA_COLOR_CORRECTION_MODE),
MAKE_PAIR(ACAMERA_COLOR_CORRECTION_TRANSFORM),
MAKE_PAIR(ACAMERA_COLOR_CORRECTION_GAINS),
MAKE_PAIR(ACAMERA_COLOR_CORRECTION_ABERRATION_MODE),
MAKE_PAIR(ACAMERA_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES),
MAKE_PAIR(ACAMERA_COLOR_CORRECTION_END),
MAKE_PAIR(ACAMERA_CONTROL_AE_ANTIBANDING_MODE),
MAKE_PAIR(ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION),
MAKE_PAIR(ACAMERA_CONTROL_AE_LOCK),
MAKE_PAIR(ACAMERA_CONTROL_AE_MODE),
MAKE_PAIR(ACAMERA_CONTROL_AE_REGIONS),
MAKE_PAIR(ACAMERA_CONTROL_AE_TARGET_FPS_RANGE),
MAKE_PAIR(ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER),
MAKE_PAIR(ACAMERA_CONTROL_AF_MODE),
MAKE_PAIR(ACAMERA_CONTROL_AF_REGIONS),
MAKE_PAIR(ACAMERA_CONTROL_AF_TRIGGER),
MAKE_PAIR(ACAMERA_CONTROL_AWB_LOCK),
MAKE_PAIR(ACAMERA_CONTROL_AWB_MODE),
MAKE_PAIR(ACAMERA_CONTROL_AWB_REGIONS),
MAKE_PAIR(ACAMERA_CONTROL_CAPTURE_INTENT),
MAKE_PAIR(ACAMERA_CONTROL_EFFECT_MODE),
MAKE_PAIR(ACAMERA_CONTROL_MODE),
MAKE_PAIR(ACAMERA_CONTROL_SCENE_MODE),
MAKE_PAIR(ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE),
MAKE_PAIR(ACAMERA_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES),
MAKE_PAIR(ACAMERA_CONTROL_AE_AVAILABLE_MODES),
MAKE_PAIR(ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES),
MAKE_PAIR(ACAMERA_CONTROL_AE_COMPENSATION_RANGE),
MAKE_PAIR(ACAMERA_CONTROL_AE_COMPENSATION_STEP),
MAKE_PAIR(ACAMERA_CONTROL_AF_AVAILABLE_MODES),
MAKE_PAIR(ACAMERA_CONTROL_AVAILABLE_EFFECTS),
MAKE_PAIR(ACAMERA_CONTROL_AVAILABLE_SCENE_MODES),
MAKE_PAIR(ACAMERA_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES),
MAKE_PAIR(ACAMERA_CONTROL_AWB_AVAILABLE_MODES),
MAKE_PAIR(ACAMERA_CONTROL_MAX_REGIONS),
MAKE_PAIR(ACAMERA_CONTROL_AE_STATE),
MAKE_PAIR(ACAMERA_CONTROL_AF_STATE),
MAKE_PAIR(ACAMERA_CONTROL_AWB_STATE),
MAKE_PAIR(ACAMERA_CONTROL_AE_LOCK_AVAILABLE),
MAKE_PAIR(ACAMERA_CONTROL_AWB_LOCK_AVAILABLE),
MAKE_PAIR(ACAMERA_CONTROL_AVAILABLE_MODES),
MAKE_PAIR(ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE),
MAKE_PAIR(ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST),
MAKE_PAIR(ACAMERA_CONTROL_END),
MAKE_PAIR(ACAMERA_EDGE_MODE),
MAKE_PAIR(ACAMERA_EDGE_AVAILABLE_EDGE_MODES),
MAKE_PAIR(ACAMERA_EDGE_END),
MAKE_PAIR(ACAMERA_FLASH_MODE),
MAKE_PAIR(ACAMERA_FLASH_STATE),
MAKE_PAIR(ACAMERA_FLASH_END),
MAKE_PAIR(ACAMERA_FLASH_INFO_AVAILABLE),
MAKE_PAIR(ACAMERA_FLASH_INFO_END),
MAKE_PAIR(ACAMERA_HOT_PIXEL_MODE),
MAKE_PAIR(ACAMERA_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES),
MAKE_PAIR(ACAMERA_HOT_PIXEL_END),
MAKE_PAIR(ACAMERA_JPEG_GPS_COORDINATES),
MAKE_PAIR(ACAMERA_JPEG_GPS_PROCESSING_METHOD),
MAKE_PAIR(ACAMERA_JPEG_GPS_TIMESTAMP),
MAKE_PAIR(ACAMERA_JPEG_ORIENTATION),
MAKE_PAIR(ACAMERA_JPEG_QUALITY),
MAKE_PAIR(ACAMERA_JPEG_THUMBNAIL_QUALITY),
MAKE_PAIR(ACAMERA_JPEG_THUMBNAIL_SIZE),
MAKE_PAIR(ACAMERA_JPEG_AVAILABLE_THUMBNAIL_SIZES),
MAKE_PAIR(ACAMERA_JPEG_END),
MAKE_PAIR(ACAMERA_LENS_APERTURE),
MAKE_PAIR(ACAMERA_LENS_FILTER_DENSITY),
MAKE_PAIR(ACAMERA_LENS_FOCAL_LENGTH),
MAKE_PAIR(ACAMERA_LENS_FOCUS_DISTANCE),
MAKE_PAIR(ACAMERA_LENS_OPTICAL_STABILIZATION_MODE),
MAKE_PAIR(ACAMERA_LENS_FACING),
MAKE_PAIR(ACAMERA_LENS_POSE_ROTATION),
MAKE_PAIR(ACAMERA_LENS_POSE_TRANSLATION),
MAKE_PAIR(ACAMERA_LENS_FOCUS_RANGE),
MAKE_PAIR(ACAMERA_LENS_STATE),
MAKE_PAIR(ACAMERA_LENS_INTRINSIC_CALIBRATION),
MAKE_PAIR(ACAMERA_LENS_RADIAL_DISTORTION),
MAKE_PAIR(ACAMERA_LENS_END),
MAKE_PAIR(ACAMERA_LENS_INFO_AVAILABLE_APERTURES),
MAKE_PAIR(ACAMERA_LENS_INFO_AVAILABLE_FILTER_DENSITIES),
MAKE_PAIR(ACAMERA_LENS_INFO_AVAILABLE_FOCAL_LENGTHS),
MAKE_PAIR(ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION),
MAKE_PAIR(ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE),
MAKE_PAIR(ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE),
MAKE_PAIR(ACAMERA_LENS_INFO_SHADING_MAP_SIZE),
MAKE_PAIR(ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION),
MAKE_PAIR(ACAMERA_LENS_INFO_END),
MAKE_PAIR(ACAMERA_NOISE_REDUCTION_MODE),
MAKE_PAIR(ACAMERA_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES),
MAKE_PAIR(ACAMERA_NOISE_REDUCTION_END),
MAKE_PAIR(ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS),
MAKE_PAIR(ACAMERA_REQUEST_PIPELINE_DEPTH),
MAKE_PAIR(ACAMERA_REQUEST_PIPELINE_MAX_DEPTH),
MAKE_PAIR(ACAMERA_REQUEST_PARTIAL_RESULT_COUNT),
MAKE_PAIR(ACAMERA_REQUEST_AVAILABLE_CAPABILITIES),
MAKE_PAIR(ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS),
MAKE_PAIR(ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS),
MAKE_PAIR(ACAMERA_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS),
MAKE_PAIR(ACAMERA_REQUEST_END),
MAKE_PAIR(ACAMERA_SCALER_CROP_REGION),
MAKE_PAIR(ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM),
MAKE_PAIR(ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS),
MAKE_PAIR(ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS),
MAKE_PAIR(ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS),
MAKE_PAIR(ACAMERA_SCALER_CROPPING_TYPE),
MAKE_PAIR(ACAMERA_SCALER_END),
MAKE_PAIR(ACAMERA_SENSOR_EXPOSURE_TIME),
MAKE_PAIR(ACAMERA_SENSOR_FRAME_DURATION),
MAKE_PAIR(ACAMERA_SENSOR_SENSITIVITY),
MAKE_PAIR(ACAMERA_SENSOR_REFERENCE_ILLUMINANT1),
MAKE_PAIR(ACAMERA_SENSOR_REFERENCE_ILLUMINANT2),
MAKE_PAIR(ACAMERA_SENSOR_CALIBRATION_TRANSFORM1),
MAKE_PAIR(ACAMERA_SENSOR_CALIBRATION_TRANSFORM2),
MAKE_PAIR(ACAMERA_SENSOR_COLOR_TRANSFORM1),
MAKE_PAIR(ACAMERA_SENSOR_COLOR_TRANSFORM2),
MAKE_PAIR(ACAMERA_SENSOR_FORWARD_MATRIX1),
MAKE_PAIR(ACAMERA_SENSOR_FORWARD_MATRIX2),
MAKE_PAIR(ACAMERA_SENSOR_BLACK_LEVEL_PATTERN),
MAKE_PAIR(ACAMERA_SENSOR_MAX_ANALOG_SENSITIVITY),
MAKE_PAIR(ACAMERA_SENSOR_ORIENTATION),
MAKE_PAIR(ACAMERA_SENSOR_TIMESTAMP),
MAKE_PAIR(ACAMERA_SENSOR_NEUTRAL_COLOR_POINT),
MAKE_PAIR(ACAMERA_SENSOR_NOISE_PROFILE),
MAKE_PAIR(ACAMERA_SENSOR_GREEN_SPLIT),
MAKE_PAIR(ACAMERA_SENSOR_TEST_PATTERN_DATA),
MAKE_PAIR(ACAMERA_SENSOR_TEST_PATTERN_MODE),
MAKE_PAIR(ACAMERA_SENSOR_AVAILABLE_TEST_PATTERN_MODES),
MAKE_PAIR(ACAMERA_SENSOR_ROLLING_SHUTTER_SKEW),
MAKE_PAIR(ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS),
MAKE_PAIR(ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL),
MAKE_PAIR(ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL),
MAKE_PAIR(ACAMERA_SENSOR_END),
MAKE_PAIR(ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT),
MAKE_PAIR(ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION),
MAKE_PAIR(ACAMERA_SENSOR_INFO_PHYSICAL_SIZE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_WHITE_LEVEL),
MAKE_PAIR(ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED),
MAKE_PAIR(ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE),
MAKE_PAIR(ACAMERA_SENSOR_INFO_END),
MAKE_PAIR(ACAMERA_SHADING_MODE),
MAKE_PAIR(ACAMERA_SHADING_AVAILABLE_MODES),
MAKE_PAIR(ACAMERA_SHADING_END),
MAKE_PAIR(ACAMERA_STATISTICS_FACE_DETECT_MODE),
MAKE_PAIR(ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE),
MAKE_PAIR(ACAMERA_STATISTICS_FACE_IDS),
MAKE_PAIR(ACAMERA_STATISTICS_FACE_LANDMARKS),
MAKE_PAIR(ACAMERA_STATISTICS_FACE_RECTANGLES),
MAKE_PAIR(ACAMERA_STATISTICS_FACE_SCORES),
MAKE_PAIR(ACAMERA_STATISTICS_LENS_SHADING_MAP),
MAKE_PAIR(ACAMERA_STATISTICS_SCENE_FLICKER),
MAKE_PAIR(ACAMERA_STATISTICS_HOT_PIXEL_MAP),
MAKE_PAIR(ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE),
MAKE_PAIR(ACAMERA_STATISTICS_END),
MAKE_PAIR(ACAMERA_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES),
MAKE_PAIR(ACAMERA_STATISTICS_INFO_MAX_FACE_COUNT),
MAKE_PAIR(ACAMERA_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES),
MAKE_PAIR(ACAMERA_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES),
MAKE_PAIR(ACAMERA_STATISTICS_INFO_END),
MAKE_PAIR(ACAMERA_TONEMAP_CURVE_BLUE),
MAKE_PAIR(ACAMERA_TONEMAP_CURVE_GREEN),
MAKE_PAIR(ACAMERA_TONEMAP_CURVE_RED),
MAKE_PAIR(ACAMERA_TONEMAP_MODE),
MAKE_PAIR(ACAMERA_TONEMAP_MAX_CURVE_POINTS),
MAKE_PAIR(ACAMERA_TONEMAP_AVAILABLE_TONE_MAP_MODES),
MAKE_PAIR(ACAMERA_TONEMAP_GAMMA),
MAKE_PAIR(ACAMERA_TONEMAP_PRESET_CURVE),
MAKE_PAIR(ACAMERA_TONEMAP_END),
MAKE_PAIR(ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL),
MAKE_PAIR(ACAMERA_INFO_END),
MAKE_PAIR(ACAMERA_BLACK_LEVEL_LOCK),
MAKE_PAIR(ACAMERA_BLACK_LEVEL_END),
MAKE_PAIR(ACAMERA_SYNC_FRAME_NUMBER),
MAKE_PAIR(ACAMERA_SYNC_MAX_LATENCY),
MAKE_PAIR(ACAMERA_SYNC_END),
MAKE_PAIR(ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS),
MAKE_PAIR(ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS),
MAKE_PAIR(ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS),
MAKE_PAIR(ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE),
MAKE_PAIR(ACAMERA_DEPTH_END),
};
const char* GetTagStr(acamera_metadata_tag_t tag) {
return GetPairStr<acamera_metadata_tag_t>(tag, tagInfo);
}
using FORMAT_PAIR = std::pair<int, const char*>;
static std::vector<FORMAT_PAIR> formatInfo{
MAKE_PAIR(AIMAGE_FORMAT_YUV_420_888),
MAKE_PAIR(AIMAGE_FORMAT_JPEG),
MAKE_PAIR(AIMAGE_FORMAT_RAW16),
MAKE_PAIR(AIMAGE_FORMAT_RAW_PRIVATE),
MAKE_PAIR(AIMAGE_FORMAT_RAW10),
MAKE_PAIR(AIMAGE_FORMAT_RAW12),
MAKE_PAIR(AIMAGE_FORMAT_DEPTH16),
MAKE_PAIR(AIMAGE_FORMAT_DEPTH_POINT_CLOUD),
MAKE_PAIR(AIMAGE_FORMAT_PRIVATE),
};
const char* GetFormatStr(int fmt) { return GetPairStr<int>(fmt, formatInfo); }
void PrintMetadataTags(int32_t entries, const uint32_t* pTags) {
LOGI("MetadataTag (start):");
for (int32_t idx = 0; idx < entries; ++idx) {
const char* name =
GetTagStr(static_cast<acamera_metadata_tag_t>(pTags[idx]));
LOGI("(%#08x) : %s", pTags[idx], name);
}
LOGI("MetadataTag (end)");
}
void PrintLensFacing(ACameraMetadata_const_entry& lens) {
ASSERT(lens.tag == ACAMERA_LENS_FACING, "Wrong tag(%#x) of %s to %s",
lens.tag, GetTagStr((acamera_metadata_tag_t)lens.tag), __FUNCTION__);
LOGI("LensFacing: tag(%#x), type(%d), count(%d), val(%#x)", lens.tag,
lens.type, lens.count, lens.data.u8[0]);
}
/*
* Stream_Configuration is in format of:
* format, width, height, input?
* ACAMERA_TYPE_INT32 type
*/
void PrintStreamConfigurations(ACameraMetadata_const_entry& val) {
#define MODE_LABLE "ModeInfo:"
const char* tagName = GetTagStr(static_cast<acamera_metadata_tag_t>(val.tag));
ASSERT(!(val.count & 0x3), "STREAM_CONFIGURATION (%d) should multiple of 4",
val.count);
ASSERT(val.type == ACAMERA_TYPE_INT32,
"STREAM_CONFIGURATION TYPE(%d) is not ACAMERA_TYPE_INT32(1)",
val.type);
LOGI("%s -- %s:", tagName, MODE_LABLE);
for (uint32_t i = 0; i < val.count; i += 4) {
LOGI("%s: %08d x %08d %s", GetFormatStr(val.data.i32[i]),
val.data.i32[i + 1], val.data.i32[i + 2],
val.data.i32[i + 3] ? "INPUT" : "OUTPUT");
}
#undef MODE_LABLE
}
void PrintTagVal(const char* printLabel, ACameraMetadata_const_entry& val) {
if (val.tag == ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS) {
PrintStreamConfigurations(val);
return;
}
const char* name = GetTagStr(static_cast<acamera_metadata_tag_t>(val.tag));
for (uint32_t i = 0; i < val.count; ++i) {
switch (val.type) {
case ACAMERA_TYPE_INT32:
LOGI("%s %s: %08d", printLabel, name, val.data.i32[i]);
break;
case ACAMERA_TYPE_BYTE:
LOGI("%s %s: %#02x", printLabel, name, val.data.u8[i]);
break;
case ACAMERA_TYPE_INT64:
LOGI("%s %s: %" PRIu64, printLabel, name, (int64_t)val.data.i64[i]);
break;
case ACAMERA_TYPE_FLOAT:
LOGI("%s %s: %f", printLabel, name, val.data.f[i]);
break;
case ACAMERA_TYPE_DOUBLE:
LOGI("%s %s: %" PRIx64, printLabel, name, val.data.i64[i]);
break;
case ACAMERA_TYPE_RATIONAL:
LOGI("%s %s: %08x, %08x", printLabel, name, val.data.r[i].numerator,
val.data.r[i].denominator);
break;
default:
ASSERT(false, "Unknown tag value type: %d", val.type);
}
}
}
/*
* PrintCamera():
* Enumerate existing camera and its metadata.
*/
void PrintCameras(ACameraManager* cmrMgr) {
if (!cmrMgr) return;
ACameraIdList* cameraIds = nullptr;
camera_status_t status = ACameraManager_getCameraIdList(cmrMgr, &cameraIds);
LOGI("camera Status = %d, %s", status, GetErrorStr(status));
for (int i = 0; i < cameraIds->numCameras; ++i) {
const char* id = cameraIds->cameraIds[i];
LOGI("=====cameraId = %d, cameraName = %s=====", i, id);
ACameraMetadata* metadataObj;
CALL_MGR(getCameraCharacteristics(cmrMgr, id, &metadataObj));
int32_t count = 0;
const uint32_t* tags = nullptr;
ACameraMetadata_getAllTags(metadataObj, &count, &tags);
for (int tagIdx = 0; tagIdx < count; ++tagIdx) {
ACameraMetadata_const_entry val = {
0,
};
camera_status_t status =
ACameraMetadata_getConstEntry(metadataObj, tags[tagIdx], &val);
if (status != ACAMERA_OK) {
LOGW("Unsupported Tag: %s",
GetTagStr(static_cast<acamera_metadata_tag_t>(tags[tagIdx])));
continue;
}
PrintTagVal("Camera Tag:", val);
if (ACAMERA_LENS_FACING == tags[tagIdx]) {
PrintLensFacing(val);
}
}
ACameraMetadata_free(metadataObj);
}
ACameraManager_deleteCameraIdList(cameraIds);
}
void PrintRequestMetadata(ACaptureRequest* req) {
if (!req) return;
int32_t count;
const uint32_t* tags;
CALL_REQUEST(getAllTags(req, &count, &tags));
for (int32_t idx = 0; idx < count; ++idx) {
ACameraMetadata_const_entry val;
CALL_REQUEST(getConstEntry(req, tags[idx], &val));
const char* name =
GetTagStr(static_cast<acamera_metadata_tag_t>(tags[idx]));
for (uint32_t i = 0; i < val.count; ++i) {
switch (val.type) {
case ACAMERA_TYPE_INT32:
LOGI("Capture Tag %s: %08d", name, val.data.i32[i]);
break;
case ACAMERA_TYPE_BYTE:
LOGI("Capture Tag %s: %#08x", name, val.data.u8[i]);
break;
case ACAMERA_TYPE_INT64:
LOGI("Capture Tag %s: %" PRIu64, name, (int64_t)val.data.i64[i]);
break;
case ACAMERA_TYPE_FLOAT:
LOGI("Capture Tag %s: %f", name, val.data.f[i]);
break;
case ACAMERA_TYPE_DOUBLE:
LOGI("Capture Tag %s: %" PRIx64, name, val.data.i64[i]);
break;
case ACAMERA_TYPE_RATIONAL:
LOGI("Capture Tag %s: %08x, %08x", name, val.data.r[i].numerator,
val.data.r[i].denominator);
break;
default:
ASSERT(false, "Unknown tag value type: %d", val.type);
}
}
}
}
/*
* CameraDevice error state translation, used in
* ACameraDevice_ErrorStateCallback
*/
using DEV_ERROR_PAIR = std::pair<int, const char*>;
static std::vector<DEV_ERROR_PAIR> devErrors{
MAKE_PAIR(ERROR_CAMERA_IN_USE), MAKE_PAIR(ERROR_MAX_CAMERAS_IN_USE),
MAKE_PAIR(ERROR_CAMERA_DISABLED), MAKE_PAIR(ERROR_CAMERA_DEVICE),
MAKE_PAIR(ERROR_CAMERA_SERVICE),
};
const char* GetCameraDeviceErrorStr(int err) {
return GetPairStr<int>(err, devErrors);
}
void PrintCameraDeviceError(int err) {
LOGI("CameraDeviceError(%#x): %s", err, GetCameraDeviceErrorStr(err));
}
11、camera_utils.h
camera/common/utils/camera_utils.h
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __CAMERA_CAMERA_UTILS_H__
#define __CAMERA_CAMERA_UTILS_H__
#include <camera/NdkCameraError.h>
#include <camera/NdkCameraManager.h>
/*
* A set of macros to call into Camera APIs. The API is grouped with a few
* objects, with object name as the prefix of function names.
*/
#define CALL_CAMERA(func) \
{ \
camera_status_t status = func; \
ASSERT(status == ACAMERA_OK, "%s call failed with code: %#x, %s", \
__FUNCTION__, status, GetErrorStr(status)); \
}
#define CALL_MGR(func) CALL_CAMERA(ACameraManager_##func)
#define CALL_DEV(func) CALL_CAMERA(ACameraDevice_##func)
#define CALL_METADATA(func) CALL_CAMERA(ACameraMetadata_##func)
#define CALL_CONTAINER(func) CALL_CAMERA(ACaptureSessionOutputContainer_##func)
#define CALL_OUTPUT(func) CALL_CAMERA(ACaptureSessionOutput_##func)
#define CALL_TARGET(func) CALL_CAMERA(ACameraOutputTarget_##func)
#define CALL_REQUEST(func) CALL_CAMERA(ACaptureRequest_##func)
#define CALL_SESSION(func) CALL_CAMERA(ACameraCaptureSession_##func)
/*
* A few debugging functions for error code strings etc
*/
const char* GetErrorStr(camera_status_t err);
const char* GetTagStr(acamera_metadata_tag_t tag);
void PrintMetadataTags(int32_t entries, const uint32_t* pTags);
void PrintLensFacing(ACameraMetadata_const_entry& lensData);
void PrintCameras(ACameraManager* cameraMgr);
void PrintCameraDeviceError(int err);
void PrintRequestMetadata(ACaptureRequest* req);
#endif // __CAMERA_CAMERA_UTILS_H__
12、CMakeLists.txt
camera/basic/src/main/cpp/CMakeLists.txt
#
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
cmake_minimum_required(VERSION 3.22.1)
set(CMAKE_VERBOSE_MAKEFILE on)
set(COMMON_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../../../common)
# build native_app_glue as a static lib
include_directories(${ANDROID_NDK}/sources/android/native_app_glue
${COMMON_SOURCE_DIR})
add_library(app_glue STATIC
${ANDROID_NDK}/sources/android/native_app_glue/android_native_app_glue.c)
# now build app's shared lib
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
# Export ANativeActivity_onCreate(),
# Refer to: https://github.com/android-ndk/ndk/issues/381.
set(CMAKE_SHARED_LINKER_FLAGS
"${CMAKE_SHARED_LINKER_FLAGS} -u ANativeActivity_onCreate")
add_library(ndk_camera SHARED
${CMAKE_CURRENT_SOURCE_DIR}/android_main.cpp
${CMAKE_CURRENT_SOURCE_DIR}/camera_engine.cpp
${CMAKE_CURRENT_SOURCE_DIR}/camera_manager.cpp
${CMAKE_CURRENT_SOURCE_DIR}/camera_listeners.cpp
${CMAKE_CURRENT_SOURCE_DIR}/image_reader.cpp
${CMAKE_CURRENT_SOURCE_DIR}/camera_ui.cpp
${COMMON_SOURCE_DIR}/utils/camera_utils.cpp)
# add lib dependencies(重点)
target_link_libraries(ndk_camera
android
log
m
app_glue
camera2ndk
mediandk)
13、native_debug.h
camera/common/utils/native_debug.h
#include <android/log.h>
#define LOG_TAG "CAMERA-SAMPLE"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
#define ASSERT(cond, fmt, ...) \
if (!(cond)) { \
__android_log_assert(#cond, LOG_TAG, fmt, ##__VA_ARGS__); \
}
二、JAVA层面
1、CameraActivity.java
camera/basic/src/main/java/com/sample/camera/basic/CameraActivity.java
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample.camera.basic;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.NativeActivity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;
import android.os.Bundle;
import android.util.Log;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.ImageButton;
import android.widget.PopupWindow;
import android.widget.RelativeLayout;
import android.widget.SeekBar;
import android.widget.TextView;
import android.widget.Toast;
import static android.hardware.camera2.CameraMetadata.LENS_FACING_BACK;
import java.util.Arrays;
class CameraSeekBar {
int _progress;
long _min, _max, _absVal;
SeekBar _seekBar;
TextView _sliderPrompt;
CameraSeekBar() {
_progress = 0;
_min = _max = _absVal = 0;
}
CameraSeekBar(SeekBar seekBar, TextView textView, long min, long max, long val) {
_seekBar = seekBar;
_sliderPrompt = textView;
_min = min;
_max = max;
_absVal = val;
if(_min != _max) {
_progress = (int) ((_absVal - _min) * _seekBar.getMax() / (_max - _min));
seekBar.setProgress(_progress);
updateProgress(_progress);
} else {
_progress = 0;
seekBar.setEnabled(false);
}
}
public boolean isSupported() {
return (_min != _max);
}
public void updateProgress(int progress) {
if (!isSupported())
return;
_progress = progress;
_absVal = (progress * ( _max - _min )) / _seekBar.getMax() + _min;
int val = (progress * (_seekBar.getWidth() - 2 * _seekBar.getThumbOffset())) / _seekBar.getMax();
_sliderPrompt.setText("" + _absVal);
_sliderPrompt.setX(_seekBar.getX() + val + _seekBar.getThumbOffset() / 2);
}
public int getProgress() {
return _progress;
}
public void updateAbsProgress(long val) {
if (!isSupported())
return;
int progress = (int)((val - _min) * _seekBar.getMax() / (_max - _min));
updateProgress(progress);
}
public long getAbsProgress() {
return _absVal;
}
}
public class CameraActivity extends NativeActivity
implements ActivityCompat.OnRequestPermissionsResultCallback {
volatile CameraActivity _savedInstance;
PopupWindow _popupWindow;
ImageButton _takePhoto;
CameraSeekBar _exposure, _sensitivity;
long[] _initParams;
private final String DBG_TAG = "NDK-CAMERA-BASIC";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(DBG_TAG, "OnCreate()");
// new initialization here... request for permission
_savedInstance = this;
setImmersiveSticky();
View decorView = getWindow().getDecorView();
decorView.setOnSystemUiVisibilityChangeListener
(new View.OnSystemUiVisibilityChangeListener() {
@Override
public void onSystemUiVisibilityChange(int visibility) {
setImmersiveSticky();
}
});
}
private boolean isCamera2Device() {
CameraManager camMgr = (CameraManager)getSystemService(Context.CAMERA_SERVICE);
boolean camera2Dev = true;
try {
String[] cameraIds = camMgr.getCameraIdList();
if (cameraIds.length != 0 ) {
for (String id : cameraIds) {
CameraCharacteristics characteristics = camMgr.getCameraCharacteristics(id);
int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY &&
facing == LENS_FACING_BACK) {
camera2Dev = false;
}
}
}
} catch (CameraAccessException e) {
e.printStackTrace();
camera2Dev = false;
}
return camera2Dev;
}
// get current rotation method
int getRotationDegree() {
return 90 * ((WindowManager)(getSystemService(WINDOW_SERVICE)))
.getDefaultDisplay()
.getRotation();
}
@Override
protected void onResume() {
super.onResume();
setImmersiveSticky();
}
void setImmersiveSticky() {
View decorView = getWindow().getDecorView();
decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_STABLE);
}
@Override
protected void onPause() {
if (_popupWindow != null && _popupWindow.isShowing()) {
_popupWindow.dismiss();
_popupWindow = null;
}
super.onPause();
}
@Override
protected void onDestroy() {
super.onDestroy();
}
private static final int PERMISSION_REQUEST_CODE_CAMERA = 1;
public void RequestCamera() {
if(!isCamera2Device()) {
Log.e(DBG_TAG, "Found legacy camera Device, this sample needs camera2 device");
return;
}
if (ActivityCompat.checkSelfPermission(
this,
Manifest.permission.CAMERA
) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(
this,
new String[]{Manifest.permission.CAMERA},
PERMISSION_REQUEST_CODE_CAMERA
);
return;
}
notifyCameraPermission(true);
}
@Override
public void onRequestPermissionsResult(int requestCode,
@NonNull String[] permissions,
@NonNull int[] grantResults) {
if (requestCode != PERMISSION_REQUEST_CODE_CAMERA) {
// The permissions request isn't ours.
super.onRequestPermissionsResult(requestCode,
permissions,
grantResults);
return;
}
if (permissions.length == 0) {
// https://developer.android.com/reference/androidx/core/app/ActivityCompat.OnRequestPermissionsResultCallback#onRequestPermissionsResult(int,java.lang.String[],int[])
//
// Note: It is possible that the permissions request interaction with the user is
// interrupted. In this case you will receive empty permissions and results arrays which
// should be treated as a cancellation.
//
// The docs aren't clear about *why* it might be canceled, so it's not clear what we
// should do here other than restart the request.
RequestCamera();
return;
}
boolean granted = Arrays.stream(grantResults)
.allMatch(element -> element == PackageManager.PERMISSION_GRANTED);
if (!granted) {
logDeniedPermissions(permissions, grantResults);
}
notifyCameraPermission(granted);
}
private void logDeniedPermissions(
@NonNull String[] requestedPermissions,
@NonNull int[] grantResults
) {
if (requestedPermissions.length != grantResults.length) {
throw new IllegalArgumentException(
String.format(
"requestedPermissions.length (%d) != grantResults.length (%d)",
requestedPermissions.length,
grantResults.length
)
);
}
for (int i = 0; i < requestedPermissions.length; i++) {
if (grantResults[i] != PackageManager.PERMISSION_GRANTED) {
Log.i(DBG_TAG, requestedPermissions[i] + " DENIED");
}
}
}
/**
* params[] exposure and sensitivity init values in (min, max, curVa) tuple
* 0: exposure min
* 1: exposure max
* 2: exposure val
* 3: sensitivity min
* 4: sensitivity max
* 5: sensitivity val
*/
@SuppressLint("InflateParams")
public void EnableUI(final long[] params)
{
// make our own copy
_initParams = new long[params.length];
System.arraycopy(params, 0, _initParams, 0, params.length);
runOnUiThread(new Runnable() {
@Override
public void run() {
try {
if (_popupWindow != null) {
_popupWindow.dismiss();
}
LayoutInflater layoutInflater
= (LayoutInflater) getBaseContext()
.getSystemService(LAYOUT_INFLATER_SERVICE);
View popupView = layoutInflater.inflate(R.layout.widgets, null);
_popupWindow = new PopupWindow(
popupView,
WindowManager.LayoutParams.MATCH_PARENT,
WindowManager.LayoutParams.WRAP_CONTENT);
RelativeLayout mainLayout = new RelativeLayout(_savedInstance);
ViewGroup.MarginLayoutParams params = new ViewGroup.MarginLayoutParams(
-1, -1);
params.setMargins(0, 0, 0, 0);
_savedInstance.setContentView(mainLayout, params);
// Show our UI over NativeActivity window
_popupWindow.showAtLocation(mainLayout, Gravity.BOTTOM | Gravity.START, 0, 0);
_popupWindow.update();
_takePhoto = (ImageButton) popupView.findViewById(R.id.takePhoto);
_takePhoto.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
TakePhoto();
}
});
_takePhoto.setEnabled(true);
(popupView.findViewById(R.id.exposureLabel)).setEnabled(true);
(popupView.findViewById(R.id.sensitivityLabel)).setEnabled(true);
SeekBar seekBar = (SeekBar) popupView.findViewById(R.id.exposure_seekbar);
_exposure = new CameraSeekBar(seekBar,
(TextView) popupView.findViewById(R.id.exposureVal),
_initParams[0], _initParams[1], _initParams[2]);
seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
_exposure.updateProgress(progress);
OnExposureChanged(_exposure.getAbsProgress());
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
seekBar = ((SeekBar) popupView.findViewById(R.id.sensitivity_seekbar));
_sensitivity = new CameraSeekBar(seekBar,
(TextView) popupView.findViewById(R.id.sensitivityVal),
_initParams[3], _initParams[4], _initParams[5]);
seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
_sensitivity.updateProgress(progress);
OnSensitivityChanged(_sensitivity.getAbsProgress());
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
} catch (WindowManager.BadTokenException e) {
// UI error out, ignore and continue
Log.e(DBG_TAG, "UI Exception Happened: " + e.getMessage());
}
}});
}
/**
Called from Native side to notify that a photo is taken
*/
public void OnPhotoTaken(String fileName) {
final String name = fileName;
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(getApplicationContext(),
"Photo saved to " + name, Toast.LENGTH_SHORT).show();
}
});
}
native static void notifyCameraPermission(boolean granted);
native static void TakePhoto();
native void OnExposureChanged(long exposure);
native void OnSensitivityChanged(long sensitivity);
static {
System.loadLibrary("ndk_camera");
}
}
三、清单文件AndroidManifest.xml
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
android:versionCode="1"
android:versionName="1.0">
<uses-feature android:name="android.hardware.camera" />
<uses-permission android:name="android.permission.CAMERA" />
<application
android:allowBackup="false"
android:fullBackupContent="false"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:screenOrientation="sensorLandscape"
android:configChanges="keyboardHidden|orientation|screenSize"
android:hasCode="true">
<activity android:name="com.sample.camera.basic.CameraActivity"
android:label="@string/app_name"
android:exported="true">
<meta-data android:name="android.app.lib_name"
android:value="ndk_camera" />(重点)
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>