先抛图修改过的:
从前面几篇文章,可以知道camerahal是在initialize的时候open操作被声明初始化的,现在先分析一下CameraHal初始化的内容:
CameraHal::CameraHal(int cameraId)
:commandThreadCommandQ("commandCmdQ")
{
LOG_FUNCTION_NAME
{
char trace_level[PROPERTY_VALUE_MAX];
int level;
property_get(CAMERAHAL_TRACE_LEVEL_PROPERTY_KEY, trace_level, "0");
sscanf(trace_level,"%d",&level);
setTracerLevel(level);
}
mCamId = cameraId;
mCamFd = -1;
mCommandRunning = -1;
mCameraStatus = 0;
mDisplayAdapter = new DisplayAdapter();
mEventNotifier = new AppMsgNotifier();
#if (CONFIG_CAMERA_MEM == CAMERA_MEM_ION)
mCamMemManager = new IonMemManager();
LOG1("%s(%d): Camera Hal memory is alloced from ION device",__FUNCTION__,__LINE__);
#elif(CONFIG_CAMERA_MEM == CAMERA_MEM_IONDMA)
if((strcmp(gCamInfos[cameraId].driver,"uvcvideo") == 0) //uvc camera
|| (gCamInfos[cameraId].pcam_total_info->mHardInfo.mSensorInfo.mPhy.type == CamSys_Phy_end)// soc cif
) {
gCamInfos[cameraId].pcam_total_info->mIsIommuEnabled = (IOMMU_ENABLED == 1)? true:false;
}
mCamMemManager = new IonDmaMemManager(gCamInfos[cameraId].pcam_total_info->mIsIommuEnabled);
LOG1("%s(%d): Camera Hal memory is alloced from ION device",__FUNCTION__,__LINE__);
#elif(CONFIG_CAMERA_MEM == CAMERA_MEM_PMEM)
if(access(CAMERA_PMEM_NAME, O_RDWR) < 0) {
LOGE("%s(%d): %s isn't registered, CameraHal_Mem current configuration isn't support ION memory!!!",
__FUNCTION__,__LINE__,CAMERA_PMEM_NAME);
} else {
mCamMemManager = new PmemManager((char*)CAMERA_PMEM_NAME);
LOG1("%s(%d): Camera Hal memory is alloced from %s device",__FUNCTION__,__LINE__,CAMERA_PMEM_NAME);
}
#endif
usleep(1000);
mPreviewBuf = new PreviewBufferProvider(mCamMemManager);
mVideoBuf = new BufferProvider(mCamMemManager);
mRawBuf = new BufferProvider(mCamMemManager);
mJpegBuf = new BufferProvider(mCamMemManager);
usleep(1000);
char value[PROPERTY_VALUE_MAX];
property_get(/*CAMERAHAL_TYPE_PROPERTY_KEY*/"sys.cam_hal.type", value, "none");
if (!strcmp(value, "fakecamera")) {
LOGD("it is a fake camera!");
mCameraAdapter = new CameraFakeAdapter(cameraId);
} else {
if((strcmp(gCamInfos[cameraId].driver,"uvcvideo") == 0)) {
LOGD("it is a uvc camera!");
mCameraAdapter = new CameraUSBAdapter(cameraId);
}
else if(gCamInfos[cameraId].pcam_total_info->mHardInfo.mSensorInfo.mPhy.type == CamSys_Phy_Cif){
LOGD("it is a isp soc camera");
if(gCamInfos[cameraId].pcam_total_info->mHardInfo.mSensorInfo.mPhy.info.cif.fmt == CamSys_Fmt_Raw_10b)
mCameraAdapter = new CameraIspSOCAdapter(cameraId);
else
mCameraAdapter = new CameraIspAdapter(cameraId);
}
else if(gCamInfos[cameraId].pcam_total_info->mHardInfo.mSensorInfo.mPhy.type == CamSys_Phy_Mipi){
LOGD("it is a isp camera");
mCameraAdapter = new CameraIspAdapter(cameraId);
}
else{
LOGD("it is a soc camera!");
mCameraAdapter = new CameraSOCAdapter(cameraId);
}
}
//initialize
{
char *call_process = getCallingProcess();
if(strstr(call_process,"com.android.cts.verifier")) {
mCameraAdapter->setImageAllFov(true);
} else {
mCameraAdapter->setImageAllFov(false);
}
}
mCameraAdapter->initialize();
updateParameters(mParameters);
mCameraAdapter->setPreviewBufProvider(mPreviewBuf);
mCameraAdapter->setDisplayAdapterRef(*mDisplayAdapter);
mCameraAdapter->setEventNotifierRef(*mEventNotifier);
mDisplayAdapter->setFrameProvider(mCameraAdapter);
mEventNotifier->setPictureRawBufProvider(mRawBuf);
mEventNotifier->setPictureJpegBufProvider(mJpegBuf);
mEventNotifier->setVideoBufProvider(mVideoBuf);
mEventNotifier->setFrameProvider(mCameraAdapter);
//command thread
mCommandThread = new CommandThread(this);
mCommandThread->run("CameraCmdThread", ANDROID_PRIORITY_URGENT_DISPLAY);
bool dataCbFrontMirror;
bool dataCbFrontFlip;
#if CONFIG_CAMERA_FRONT_MIRROR_MDATACB
if (gCamInfos[cameraId].facing_info.facing == CAMERA_FACING_FRONT) {
#if CONFIG_CAMERA_FRONT_MIRROR_MDATACB_ALL
dataCbFrontMirror = true;
#else
const char* cameraCallProcess = getCallingProcess();
if (strstr(CONFIG_CAMERA_FRONT_MIRROR_MDATACB_APK,cameraCallProcess)) {
dataCbFrontMirror = true;
} else {
dataCbFrontMirror = false;
}
if (strstr(CONFIG_CAMERA_FRONT_FLIP_MDATACB_APK,cameraCallProcess)) {
dataCbFrontFlip = true;
} else {
dataCbFrontFlip = false;
}
#endif
} else {
dataCbFrontMirror = false;
dataCbFrontFlip = false;
}
#else
dataCbFrontMirror = false;
#endif
mEventNotifier->setDatacbFrontMirrorFlipState(dataCbFrontMirror,dataCbFrontFlip);
LOG_FUNCTION_NAME_EXIT
}
1.mDisplayAdapter = new DisplayAdapter(); 这里会启动一个线程,跟着进去看一下构造函数:
mDisplayThread = new DisplayThread(this);
mDisplayThread->run("DisplayThread",ANDROID_PRIORITY_DISPLAY);
线程启动后会处于loop状态,等待其他线程发消息:
void DisplayAdapter::displayThread()
{
int err,stride,i,queue_cnt;
int dequeue_buf_index,queue_buf_index,queue_display_index;
buffer_handle_t *hnd = NULL;
NATIVE_HANDLE_TYPE *phnd;
GraphicBufferMapper& mapper = GraphicBufferMapper::get();
Message msg;
void *y_uv[3];
int frame_used_flag = -1;
Rect bounds;
LOG_FUNCTION_NAME
while (mDisplayRuning != STA_DISPLAY_STOP) {
display_receive_cmd:
if (displayThreadCommandQ.isEmpty() == false ) {
displayThreadCommandQ.get(&msg);
这个线程主要是用于显示用的,显示的地方也就是上层调用setPreviewTarget的时候,会把一个GraphicProducerBuffer类型的显示窗口buffer传下来。
2.mEventNotifier = new AppMsgNotifier();
对应的构造函数立面会启动两个线程:
//create thread
mCameraAppMsgThread = new CameraAppMsgThread(this);
mCameraAppMsgThread->run("AppMsgThread",ANDROID_PRIORITY_DISPLAY);
mEncProcessThread = new EncProcessThread(this);
mEncProcessThread->run("EncProcessThread",ANDROID_PRIORITY_NORMAL);
第一个线程会比较重要,用来接收preview线程发过来的消息,通知这边向上回调数据:
void AppMsgNotifier::eventThread()
{
bool loop = true;
Message msg;
int index,err = 0;
FramInfo_s *frame = NULL;
int frame_used_flag = -1;
LOG_FUNCTION_NAME
while (loop) {
memset(&msg,0,sizeof(msg));
eventThreadCommandQ.get(&msg);
switch (msg.command)
{
case CameraAppMsgThread::CMD_EVENT_PREVIEW_DATA_CB:
frame = (FramInfo_s*)msg.arg2;
processPreviewDataCb(frame);
//return frame
frame_used_flag = (int)msg.arg3;
mFrameProvider->returnFrame(frame->frame_index,frame_used_flag);
break;
....
....
....
至于cameraHAL层的消息发送机制,其实比较简单,是基于linux pipe实现的通信只针对两个进程之间。单向的通道。
第二个线程启动处理picture的线程,当我们按下拍照的按钮时就会发送cmd到这个线程上,这里会进行jpeg编解码相关操作,并保存到本地:
void AppMsgNotifier::encProcessThread()
{
bool loop = true;
Message msg;
int err = 0;
int frame_used_flag = -1;
LOG_FUNCTION_NAME
while (loop) {
memset(&msg,0,sizeof(msg));
encProcessThreadCommandQ.get(&msg);
3.接下来就是申请一些buffer的初始化了
4.选择camera类型的过程,这里涉及到了UVC camera,这里不同的选择,mCameraAdapter会被实例化成不同的cameraAdapter,其中的接口也就会变化。之后开始对camera初始化,
int CameraAdapter::initialize()
{
int ret = -1;
//create focus thread
LOG_FUNCTION_NAME
if((ret = cameraCreate(mCamId)) < 0)
return ret;
initDefaultParameters(mCamId);
LOG_FUNCTION_NAME_EXIT
return ret;
}
第一个是create一个camera,这里会跟驱动进行交互,有实际的open动作,并得到fd保存起来,后面ioctl就会通过这个fd与驱动进行通信。
先看create:
//talk to driver
//open camera
int CameraAdapter::cameraCreate(int cameraId)
{