Camera Open调用过程(基于API1+高通mm-camera)

Util.java

1./packages/apps/Camera/src/com/android/camera/Util.java

public static CameraManager.CameraProxy openCamera(Activity activity, int cameraId)
throws CameraHardwareException, CameraDisabledException {
throwIfCameraDisabled(activity);
    try {
    return CameraHolder.instance().open(cameraId);
    } catch (CameraHardwareException e) {
// In eng build, we throw the exception so that test tool
// can detect it and report it
if ("eng".equals(Build.TYPE)) {
throw new RuntimeException("openCamera failed", e);
} else {
throw e;
}
}
}

......代码跟踪下去到达如下代码:

CameraProxy cameraOpen(int cameraId) {
// Cannot open camera in mCameraHandler, otherwise all camera events
// will be routed to mCameraHandler looper, which in turn will call
// event handler like Camera.onFaceDetection, which in turn will modify
// UI and cause exception like this:
// CalledFromWrongThreadException: Only the original thread that created
// a view hierarchy can touch its views.
mCamera = android.hardware.Camera.open(cameraId); //函数返回一个Camera类的实例指针,保存于全局变量mCamera中
if (mCamera != null) {
mCameraProxy = new CameraProxy();
return mCameraProxy;
} else {
return null;
}
}

最后调用的函数为android.hardware.Camera.open函数,以传入cameraId的方式调用。其中android.hardware.Camera.open为hardware层提供的函数。

Camera.java

2.frameworks/base/core/java/android/hardware/Camera.java

   321  public static Camera open(int cameraId) {
   322          return new Camera(cameraId); // new一个Camera类,以传入cameraId的方式创建
   323      }
   ......
   343      Camera(int cameraId) { // Camera类的创建函数
   344          mShutterCallback = null;
   345          mRawImageCallback = null;
   346          mJpegCallback = null;
   347          mPreviewCallback = null;
   348          mPostviewCallback = null;
   349          mZoomListener = null;
   350          /* ### QC ADD-ONS: START */
   351          mCameraDataCallback = null;
   352          mCameraMetaDataCallback = null;
   353          /* ### QC ADD-ONS: END */
   354
   355          Looper looper;
   356          if ((looper = Looper.myLooper()) != null) {
   357              mEventHandler = new EventHandler(this, looper);
   358          } else if ((looper = Looper.getMainLooper()) != null) {
   359              mEventHandler = new EventHandler(this, looper);
   360          } else {
   361              mEventHandler = null;
   362          }
   363
   364          String packageName = ActivityThread.currentPackageName(); // 得到当前活动进程的包名
   365 // 调用本地方法native_setup,传入参数camera.java类 cameraId,packageName
   366          native_setup(new WeakReference<Camera>(this), cameraId, packageName);
   367      }

android_hardware_Camera.cpp  

3.native_setup函数追踪 frameworks/base/core/jni/android_hardware_Camera.cpp

      注:对于下面JNINativeMethod中了函数定义第二行参数引用参数类型表示(L包名),I表示 jint,V表示void,即参数类型为:(Object,int,String)void void为函数的返回值。

我们之所以能够找到下面这个函数是因为通过AndroidRuntime::registerNativeMethods(...) 注册了native_setup参数和方法android_hardware_Camera_native_setup的对应关系。

  1003  static JNINativeMethod camMethods[] = {
  1004    { "getNumberOfCameras",
  1005      "()I",
  1006      (void *)android_hardware_Camera_getNumberOfCameras },
  1007    { "_getCameraInfo",
  1008      "(ILandroid/hardware/Camera$CameraInfo;)V",
  1009      (void*)android_hardware_Camera_getCameraInfo },
  1010    { "native_setup",
  1011      "(Ljava/lang/Object;ILjava/lang/String;)V",
  1012      (void*)android_hardware_Camera_native_setup },
  1013    { "native_release",
  1014      "()V",
  1015      (void*)android_hardware_Camera_release },
  1016    { "setPreviewDisplay",
  1017      "(Landroid/view/Surface;)V",
  1018      (void *)android_hardware_Camera_setPreviewDisplay },
  1019    { "setPreviewTexture",
  1020      "(Landroid/graphics/SurfaceTexture;)V",
  1021      (void *)android_hardware_Camera_setPreviewTexture },
  1022    { "startPreview",
  1023      "()V",
  1024      (void *)android_hardware_Camera_startPreview },
  1025    { "_stopPreview",
  1026      "()V",
  1027      (void *)android_hardware_Camera_stopPreview },
  1028    { "previewEnabled", "()Z", (void *)android_hardware_Camera_previewEnabled },
  1031    { "setHasPreviewCallback",
  1032      "(ZZ)V",
  1033      (void *)android_hardware_Camera_setHasPreviewCallback },
  1034    { "_addCallbackBuffer",
  1035      "([BI)V",
  1036      (void *)android_hardware_Camera_addCallbackBuffer },
  1037    { "native_autoFocus",
  1038      "()V",
  1039      (void *)android_hardware_Camera_autoFocus },
  1040    { "native_cancelAutoFocus",
  1041      "()V",
  1042      (void *)android_hardware_Camera_cancelAutoFocus },
  1043    { "native_takePicture",
  1044      "(I)V",
  1045      (void *)android_hardware_Camera_takePicture },
  1046    { "native_setHistogramMode",
  1047      "(Z)V",
  1048       (void *)android_hardware_Camera_setHistogramMode },
  1049    { "native_setMetadataCb",
  1050      "(Z)V",
  1051      (void *)android_hardware_Camera_setMetadataCb },
  1052    { "native_sendHistogramData",
  1053      "()V",
  1054       (void *)android_hardware_Camera_sendHistogramData },
  1055   { "native_setLongshot",
  1056       "(Z)V",
  1057        (void *)android_hardware_Camera_setLongshot },
  1058    { "native_setParameters",
  1059      "(Ljava/lang/String;)V",
  1060      (void *)android_hardware_Camera_setParameters },
  1061    { "native_getParameters",
  1062      "()Ljava/lang/String;",
  1063      (void *)android_hardware_Camera_getParameters },
  1064    { "reconnect",
  1065      "()V",
  1066      (void*)android_hardware_Camera_reconnect },
  1067    { "lock",
  1068      "()V",
  1069      (void*)android_hardware_Camera_lock },
  1070    { "unlock",
  1071      "()V",
  1072      (void*)android_hardware_Camera_unlock },
  1073    { "startSmoothZoom",
  1074      "(I)V",
  1075      (void *)android_hardware_Camera_startSmoothZoom },
  1076    { "stopSmoothZoom",
  1077      "()V",
  1078      (void *)android_hardware_Camera_stopSmoothZoom },
  1079    { "setDisplayOrientation",
  1080      "(I)V",
  1081      (void *)android_hardware_Camera_setDisplayOrientation },
  1082    { "_enableShutterSound",
  1083      "(Z)Z",
  1084      (void *)android_hardware_Camera_enableShutterSound },
  1085    { "_startFaceDetection",
  1086      "(I)V",
  1087      (void *)android_hardware_Camera_startFaceDetection },
  1088    { "_stopFaceDetection",
  1089      "()V",
  1090      (void *)android_hardware_Camera_stopFaceDetection},
  1091    { "enableFocusMoveCallback",
  1092      "(I)V",
  1093      (void *)android_hardware_Camera_enableFocusMoveCallback},
  1094  };
......
  1217 return AndroidRuntime::registerNativeMethods(env, "android/hardware/Camera",
1218 camMethods, NELEM(camMethods));

native_setup函数将指向函数android_hardware_Camera_native_setup 

   586  // connect to camera service
   587  static void android_hardware_Camera_native_setup(JNIEnv *env, jobject thiz,
   588      jobject weak_this, jint cameraId, jstring clientPackageName)
   589  {
   590      // Convert jstring to String16
   591      const char16_t *rawClientName = env->GetStringChars(clientPackageName, NULL);
   592      jsize rawClientNameLen = env->GetStringLength(clientPackageName);
   593      String16 clientName(rawClientName, rawClientNameLen);
   594      env->ReleaseStringChars(clientPackageName, rawClientName);
   595
   596      sp<Camera> camera = Camera::connect(cameraId, clientName,
   597              Camera::USE_CALLING_UID);
   598
   599      if (camera == NULL) {
   600          jniThrowRuntimeException(env, "Fail to connect to camera service");
   601          return;
   602      }
   603
   604      // make sure camera hardware is alive
   605      if (camera->getStatus() != NO_ERROR) {
   606          jniThrowRuntimeException(env, "Camera initialization failed");
   607          return;
   608      }
   609
   610      jclass clazz = env->GetObjectClass(thiz);
   611      if (clazz == NULL) {
   612          jniThrowRuntimeException(env, "Can't find android/hardware/Camera");
   613          return;
   614      }
   615
   616      // We use a weak reference so the Camera object can be garbage collected.
   617      // The reference is only used as a proxy for callbacks.
   618      sp<JNICameraContext> context = new JNICameraContext(env, weak_this, clazz, camera);
   619      context->incStrong((void*)android_hardware_Camera_native_setup);
   620      camera->setListener(context);
   621
   622      // save context in opaque field
   623      env->SetIntField(thiz, fields.context, (int)context.get());
   624  }

Camera::connect函数的追踪

    libandroid_runtime.so 将目录文件 frameworks/av/include编译进去了,所以Camera.h位于frameworks/av/include/camera/Camera.h

/*
 * Copyright (C) 2008 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
#ifndef ANDROID_HARDWARE_CAMERA_H
#define ANDROID_HARDWARE_CAMERA_H
#include
#include
#include
#include
#include
#include
#include
#include
#include
namespace android {
class Surface;
class String8;
class String16;
// ref-counted object for callbacks
class CameraListener: virtual public RefBase
{
public:
    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2) = 0;
    virtual void postData(int32_t msgType, const sp<IMemory>& dataPtr,
                          camera_frame_metadata_t *metadata) = 0;
    virtual void postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) = 0;
};
class Camera;
template <>
struct CameraTraits<Camera>
{
    typedef CameraListener        TCamListener;
    typedef ICamera               TCamUser;
    typedef ICameraClient         TCamCallbacks;
};
class Camera :
    public CameraBase<Camera>,
    public BnCameraClient
{
public:
    enum {
        USE_CALLING_UID = ICameraService::USE_CALLING_UID
    };
            // construct a camera client from an existing remote
    static  sp<Camera>  create(const sp<ICamera>& camera);
    static  sp<Camera>  connect(int cameraId,
                                const String16& clientPackageName,
                                int clientUid);
            virtual     ~Camera();
            status_t    reconnect();
            status_t    lock();
            status_t    unlock();
            // pass the buffered IGraphicBufferProducer to the camera service
            status_t    setPreviewTexture(const sp<IGraphicBufferProducer>& bufferProducer);
            // start preview mode, must call setPreviewDisplay first
            status_t    startPreview();
            // stop preview mode
            void        stopPreview();
            // get preview state
            bool        previewEnabled();
            // start recording mode, must call setPreviewDisplay first
            status_t    startRecording();
            // stop recording mode
            void        stopRecording();
            // get recording state
            bool        recordingEnabled();
            // release a recording frame
            void        releaseRecordingFrame(const sp<IMemory>& mem);
            // autoFocus - status returned from callback
            status_t    autoFocus();
            // cancel auto focus
            status_t    cancelAutoFocus();
            // take a picture - picture returned from callback
            status_t    takePicture(int msgType);
            // set preview/capture parameters - key/value pairs
            status_t    setParameters(const String8& params);
            // get preview/capture parameters - key/value pairs
            String8     getParameters() const;
            // send command to camera driver
            status_t    sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
            // tell camera hal to store meta data or real YUV in video buffers.
            status_t    storeMetaDataInBuffers(bool enabled);
            void        setListener(const sp<CameraListener>& listener);
            void        setRecordingProxyListener(const sp<ICameraRecordingProxyListener>& listener);
            void        setPreviewCallbackFlags(int preview_callback_flag);
            sp<ICameraRecordingProxy> getRecordingProxy();
    // ICameraClient interface
    virtual void        notifyCallback(int32_t msgType, int32_t ext, int32_t ext2);
    virtual void        dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
                                     camera_frame_metadata_t *metadata);
    virtual void        dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
    class RecordingProxy : public BnCameraRecordingProxy
    {
    public:
        RecordingProxy(const sp<Camera>& camera);
        // ICameraRecordingProxy interface
        virtual status_t startRecording(const sp<ICameraRecordingProxyListener>& listener);
        virtual void stopRecording();
        virtual void releaseRecordingFrame(const sp<IMemory>& mem);
    private:
        sp<Camera>         mCamera;
    };
protected:
                        Camera(int cameraId);
                        Camera(const Camera&);
                        Camera& operator=(const Camera);
    sp<ICameraRecordingProxyListener>  mRecordingProxyListener;
    friend class        CameraBase;
};
}; // namespace android

Camera::connect表示连接到Camera Service,Camera则为客户端。接下来我们看Camera类中各个成员函数的实现,有必要进行详细分析此文件:

文件:frameworks/av/camera/Camera.cpp (camera客户端调用libcamera_client.so)

/*
**
** Copyright (C) 2008, The Android Open Source Project
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
**     http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "Camera"
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
namespace android {
Camera::Camera(int cameraId)
    : CameraBase(cameraId)
{
}
// construct a camera client from an existing camera remote
sp<Camera> Camera::create(const sp<ICamera>& camera)
{
     ALOGV("create");
     if (camera == 0) {
         ALOGE("camera remote is a NULL pointer");
         return 0;
     }
    sp<Camera> c = new Camera(-1);
    if (camera->connect(c) == NO_ERROR) {
        c->mStatus = NO_ERROR;
        c->mCamera = camera;
        camera->asBinder()->linkToDeath(c);
        return c;
    }
    return 0;
}
Camera::~Camera()
{
    // We don't need to call disconnect() here because if the CameraService
    // thinks we are the owner of the hardware, it will hold a (strong)
    // reference to us, and we can't possibly be here. We also don't want to
    // call disconnect() here if we are in the same process as mediaserver,
    // because we may be invoked by CameraService::Client::connect() and will
    // deadlock if we call any method of ICamera here.
}
sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
        int clientUid)
{
    return CameraBaseT::connect(cameraId, clientPackageName, clientUid);//调用CameraBaseT的connect函数
}
status_t Camera::reconnect()
{
    ALOGV("reconnect");
    sp <ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    return c->connect(this);
}
status_t Camera::lock()
{
    sp <ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    return c->lock();
}
status_t Camera::unlock()
{
    sp <ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    return c->unlock();
}
// pass the buffered IGraphicBufferProducer to the camera service
status_t Camera::setPreviewTexture(const sp<IGraphicBufferProducer>& bufferProducer)
{
    ALOGV("setPreviewTexture(%p)", bufferProducer.get());
    sp <ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    ALOGD_IF(bufferProducer == 0, "app passed NULL surface");
    return c->setPreviewTexture(bufferProducer);
}
// start preview mode
status_t Camera::startPreview()
{
    ALOGV("startPreview");
    sp <ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    return c->startPreview();
}
status_t Camera::storeMetaDataInBuffers(bool enabled)
{
    ALOGV("storeMetaDataInBuffers: %s",
            enabled? "true": "false");
    sp <ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    return c->storeMetaDataInBuffers(enabled);
}
// start recording mode, must call setPreviewDisplay first
status_t Camera::startRecording()
{
    ALOGV("startRecording");
    sp <ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    return c->startRecording();
}
// stop preview mode
void Camera::stopPreview()
{
    ALOGV("stopPreview");
    sp <ICamera> c = mCamera;
    if (c == 0) return;
    c->stopPreview();
}
// stop recording mode
void Camera::stopRecording()
{
    ALOGV("stopRecording");
    {
        Mutex::Autolock _l(mLock);
        mRecordingProxyListener.clear();
    }
    sp <ICamera> c = mCamera;
    if (c == 0) return;
    c->stopRecording();
}
// release a recording frame
void Camera::releaseRecordingFrame(const sp<IMemory>& mem)
{
    ALOGV("releaseRecordingFrame");
    sp <ICamera> c = mCamera;
    if (c == 0) return;
    c->releaseRecordingFrame(mem);
}
// get preview state
bool Camera::previewEnabled()
{
    ALOGV("previewEnabled");
    sp <ICamera> c = mCamera;
    if (c == 0) return false;
    return c->previewEnabled();
}
// get recording state
bool Camera::recordingEnabled()
{
    ALOGV("recordingEnabled");
    sp <ICamera> c = mCamera;
    if (c == 0) return false;
    return c->recordingEnabled();
}
status_t Camera::autoFocus()
{
    ALOGV("autoFocus");
    sp <ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    return c->autoFocus();
}
status_t Camera::cancelAutoFocus()
{
    ALOGV("cancelAutoFocus");
    sp <ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    return c->cancelAutoFocus();
}
// take a picture
status_t Camera::takePicture(int msgType)
{
    ALOGV("takePicture: 0x%x", msgType);
    sp <ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    return c->takePicture(msgType);
}
// set preview/capture parameters - key/value pairs
status_t Camera::setParameters(const String8& params)
{
    ALOGV("setParameters");
    sp <ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    return c->setParameters(params);
}
// get preview/capture parameters - key/value pairs
String8 Camera::getParameters() const
{
    ALOGV("getParameters");
    String8 params;
    sp <ICamera> c = mCamera;
    if (c != 0) params = mCamera->getParameters();
    return params;
}
// send command to camera driver
status_t Camera::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
{
    ALOGV("sendCommand");
    sp <ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    return c->sendCommand(cmd, arg1, arg2);
}
void Camera::setListener(const sp<CameraListener>& listener)
{
    Mutex::Autolock _l(mLock);
    mListener = listener;
}
void Camera::setRecordingProxyListener(const sp<ICameraRecordingProxyListener>& listener)
{
    Mutex::Autolock _l(mLock);
    mRecordingProxyListener = listener;
}
void Camera::setPreviewCallbackFlags(int flag)
{
    ALOGV("setPreviewCallbackFlags");
    sp <ICamera> c = mCamera;
    if (c == 0) return;
    mCamera->setPreviewCallbackFlag(flag);
}
// callback from camera service
void Camera::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2)
{
    return CameraBaseT::notifyCallback(msgType, ext1, ext2);
}
// callback from camera service when frame or image is ready
void Camera::dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
                          camera_frame_metadata_t *metadata)
{
    sp<CameraListener> listener;
    {
        Mutex::Autolock _l(mLock);
        listener = mListener;
    }
    if (listener != NULL) {
        listener->postData(msgType, dataPtr, metadata);
    }
}
// callback from camera service when timestamped frame is ready
void Camera::dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr)
{
    // If recording proxy listener is registered, forward the frame and return.
    // The other listener (mListener) is ignored because the receiver needs to
    // call releaseRecordingFrame.
    sp<ICameraRecordingProxyListener> proxylistener;
    {
        Mutex::Autolock _l(mLock);
        proxylistener = mRecordingProxyListener;
    }
    if (proxylistener != NULL) {
        proxylistener->dataCallbackTimestamp(timestamp, msgType, dataPtr);
        return;
    }
    sp<CameraListener> listener;
    {
        Mutex::Autolock _l(mLock);
        listener = mListener;
    }
    if (listener != NULL) {
        listener->postDataTimestamp(timestamp, msgType, dataPtr);
    } else {
        ALOGW("No listener was set. Drop a recording frame.");
        releaseRecordingFrame(dataPtr);
    }
}
sp<ICameraRecordingProxy> Camera::getRecordingProxy() {
    ALOGV("getProxy");
    return new RecordingProxy(this);
}
status_t Camera::RecordingProxy::startRecording(const sp<ICameraRecordingProxyListener>& listener)
{
    ALOGV("RecordingProxy::startRecording");
    mCamera->setRecordingProxyListener(listener);
    mCamera->reconnect();
    return mCamera->startRecording();
}
void Camera::RecordingProxy::stopRecording()
{
    ALOGV("RecordingProxy::stopRecording");
    mCamera->stopRecording();
}
void Camera::RecordingProxy::releaseRecordingFrame(const sp<IMemory>& mem)
{
    ALOGV("RecordingProxy::releaseRecordingFrame");
    mCamera->releaseRecordingFrame(mem);
}
Camera::RecordingProxy::RecordingProxy(const sp<Camera>& camera)
{
    mCamera = camera;
}
}; // namespace android

CameraBaseT::connect(cameraId, clientPackageName, clientUid);

我们来看CameraBase类的定义及实现 frameworks/av/camera/CameraBase.cpp

/*
**
** Copyright (C) 2013, The Android Open Source Project
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
**     http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "CameraBase"
#include
#include
#include
#include
#include
#include
#include
#include
// needed to instantiate
#include
#include
#include
namespace android {
namespace {
    sp<ICameraService>        gCameraService;
    const int                 kCameraServicePollDelay = 500000; // 0.5s
    const char*               kCameraServiceName      = "media.camera";
    Mutex                     gLock;
    class DeathNotifier : public IBinder::DeathRecipient
    {
    public:
        DeathNotifier() {
        }
        virtual void binderDied(const wp<IBinder>& who) {
            ALOGV("binderDied");
            Mutex::Autolock _l(gLock);
            gCameraService.clear();
            ALOGW("Camera service died!");
        }
    };
    sp<DeathNotifier>         gDeathNotifier;
}; // namespace anonymous
///
// CameraBase definition
///
// establish binder interface to camera service
//建立binder接口与Camera服务之间的接口
template <typename TCam, typename TCamTraits>
const sp<ICameraService>& CameraBase<TCam, TCamTraits>::getCameraService()
{
    Mutex::Autolock _l(gLock);
    if (gCameraService.get() == 0) {
        sp<IServiceManager> sm = defaultServiceManager();
/**
* sm是一个BpServiceManager,并且BpServiceManager的mRemote成员变量
* 将指向一个BpBinder(handle=0),0代表ServiceManager.
*/
        sp<IBinder> binder;
        do {
            binder = sm->getService(String16(kCameraServiceName)); //根据camera服务字符串,获取对应binder对象。
            if (binder != 0) {
                break;
            }
            ALOGW("CameraService not published, waiting...");
            usleep(kCameraServicePollDelay);
        } while(true);
        if (gDeathNotifier == NULL) {
            gDeathNotifier = new DeathNotifier();
        }
        binder->linkToDeath(gDeathNotifier);
        gCameraService = interface_cast<ICameraService>(binder); //根据binder获取ICameraService对象(frameworks/av/include/camera/ICameraService.h)
    }
    ALOGE_IF(gCameraService == 0, "no CameraService!?");
    return gCameraService;
}
template <typename TCam, typename TCamTraits>
sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
                                         const String16& clientPackageName,
                                               int clientUid)
{
//上层模板实例是Camera,因此这儿TCam就是Camera.
    ALOGV("%s: connect", __FUNCTION__);
//模板实例是一个Camera,因此c是一个Camera对象
    sp<TCam> c = new TCam(cameraId);
    sp<TCamCallbacks> cl = c;
    //这里getCameraService返回的是一个BpCameraService对象,并将一个BpBinder对象赋值给BpCameraServce对象的成员
//变量mRemote,这些是通过getCameraService调用defaultServiceManager()及interface_cast函数来实现的。
    const sp<ICameraService>& cs = getCameraService();
    if (cs != 0) { //mCamera定义在CameraBase类中
/**
 * cs->connect返回的是一个BpCamera对象,因此c->mCamera保存的是一个BpCamera对象。
* BpCamera也有一个mRemote对象,也指向一个BpBinder对象(下面BpCameraService::connect,这个BpBinder就是
* 依据reply中得到的BnBinder(BnCameraService)生成的对应的本地接口BpBinder),也就是说在这儿得到Service
* 的response.
*/
        c->mCamera = cs->connect(cl, cameraId, clientPackageName, clientUid); //调用ICameraService::connect方法
    }
    if (c->mCamera != 0) {
        c->mCamera->asBinder()->linkToDeath(c);
        c->mStatus = NO_ERROR;
    } else {
        c.clear();
    }
    return c;
}
/**
* 首先取得跨进程的IServiceManager,然后取得camera service(这个service在mediaserver启动时注册)。
* 最重要的,把这个返回的binder对象经过interface_cast处理,生成一个BpCameraService对象,
* 并将binder对象赋值给BpCameraService的成员变量mRemote。Interface_cast在
* framework/native/include/binder/IInterface.h中实现。
*/

template <typename TCam, typename TCamTraits>
void CameraBase<TCam, TCamTraits>::disconnect()
{
    ALOGV("%s: disconnect", __FUNCTION__);
    if (mCamera != 0) {
        mCamera->disconnect();
        mCamera->asBinder()->unlinkToDeath(this);
        mCamera = 0;
    }
    ALOGV("%s: disconnect (done)", __FUNCTION__);
}
template <typename TCam, typename TCamTraits>
CameraBase<TCam, TCamTraits>::CameraBase(int cameraId) :
    mStatus(UNKNOWN_ERROR),
    mCameraId(cameraId)
{
}
template <typename TCam, typename TCamTraits>
CameraBase<TCam, TCamTraits>::~CameraBase()
{
}
template <typename TCam, typename TCamTraits>
sp<typename TCamTraits::TCamUser> CameraBase<TCam, TCamTraits>::remote()
{
    return mCamera;
}
template <typename TCam, typename TCamTraits>
status_t CameraBase<TCam, TCamTraits>::getStatus()
{
    return mStatus;
}
template <typename TCam, typename TCamTraits>
void CameraBase<TCam, TCamTraits>::binderDied(const wp<IBinder>& who) {
    ALOGW("mediaserver's remote binder Camera object died");
    notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_SERVER_DIED, /*ext2*/0);
}
template <typename TCam, typename TCamTraits>
void CameraBase<TCam, TCamTraits>::setListener(const sp<TCamListener>& listener)
{
    Mutex::Autolock _l(mLock);
    mListener = listener;
}
// callback from camera service
template <typename TCam, typename TCamTraits>
void CameraBase<TCam, TCamTraits>::notifyCallback(int32_t msgType,
                                                  int32_t ext1,
                                                  int32_t ext2)
{
    sp<TCamListener> listener;
    {
        Mutex::Autolock _l(mLock);
        listener = mListener;
    }
    if (listener != NULL) {
        listener->notify(msgType, ext1, ext2);
    }
}
template <typename TCam, typename TCamTraits>
int CameraBase<TCam, TCamTraits>::getNumberOfCameras() {
    const sp<ICameraService> cs = getCameraService();
    if (!cs.get()) {
        // as required by the public Java APIs
        return 0;
    }
    return cs->getNumberOfCameras();
}
// this can be in BaseCamera but it should be an instance method
template <typename TCam, typename TCamTraits>
status_t CameraBase<TCam, TCamTraits>::getCameraInfo(int cameraId,
                               struct CameraInfo* cameraInfo) {
    const sp<ICameraService>& cs = getCameraService();
    if (cs == 0) return UNKNOWN_ERROR;
    return cs->getCameraInfo(cameraId, cameraInfo);
}
template <typename TCam, typename TCamTraits>
status_t CameraBase<TCam, TCamTraits>::addServiceListener(
                            const sp<ICameraServiceListener>& listener) {
    const sp<ICameraService>& cs = getCameraService();
    if (cs == 0) return UNKNOWN_ERROR;
    return cs->addListener(listener);
}
template <typename TCam, typename TCamTraits>
status_t CameraBase<TCam, TCamTraits>::removeServiceListener(
                            const sp<ICameraServiceListener>& listener) {
    const sp<ICameraService>& cs = getCameraService();
    if (cs == 0) return UNKNOWN_ERROR;
    return cs->removeListener(listener);
}
template class CameraBase<ProCamera>;
template class CameraBase<Camera>;
} // namespace android

文件:frameworks/av/camera/ICameraService.cpp

// connect to camera service
virtual sp<ICamera> connect(const sp<ICameraClient>& cameraClient, int cameraId,
const String16 &clientPackageName, int clientUid)
{
Parcel data, reply;
data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
data.writeStrongBinder(cameraClient->asBinder());
data.writeInt32(cameraId);
data.writeString16(clientPackageName);
data.writeInt32(clientUid);
/**
* 这里remote()函数返回的是mRemote成员变量,它指向一个BpBinder对象。
* 因此remote()->transact()实际上调用的是BpBinder::transact().
*/
remote()->transact(BnCameraService::CONNECT, data, &reply);
return interface_cast<ICamera>(reply.readStrongBinder());
}
/**
* 这里又一次用到了interface_cast,在interface_cast里,将生成一个newBpCamera(binder objdt),
* 客户端将从reply中得到的服务端CameraClient对象,经readStrongBinder()生成相应的本地接口BpBinder对象。
* in BpCamera构造函数里,实际上是通过其基类BpRefBase的构造函数,(CameraClient继承自BnCameraClient,BnCameraClient
* 继承自BnCameraClient,BnCameraClient继承自BBinder),将这个本地接口(BpBinder)赋值给mRemote成员变量,也就是说BpCamera
* 对象的mRemote成员变量实际上指向从服务器端返回的CameraClient对象对应的BpBinder本地接口对象这样BpCamera和BnCamera之间
* 建立了一一对应的关系。Camera后面的操作,如startPreview等就是基于这个对应关系进行binder通讯的。
*/

remote()->transact调用,会根据binder机制,后面将调用到如下代码:

文件:frameworks/native/libs/binder/Binder.cpp

status_t BBinder::transact(
    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
    data.setDataPosition(0);
    status_t err = NO_ERROR;
    switch (code) {
        case PING_TRANSACTION:
            reply->writeInt32(pingBinder());
            break;
        default:
            err = onTransact(code, data, reply, flags); //因为CameraService重写了onTransact,因此调用CameraService::onTransact().
            break;
    }
    if (reply != NULL) {
        reply->setDataPosition(0);
    }
    return err;
}

接下来看CameraService::onTransact()函数的实现:

文件:frameworks/av/services/camera/libcameraservice/CameraService.cpp

status_t CameraService::onTransact(
    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) {
    // Permission checks
    switch (code) {
        case BnCameraService::CONNECT:
        case BnCameraService::CONNECT_PRO:
            const int pid = getCallingPid();
            const int self_pid = getpid();
            if (pid != self_pid) {
                // we're called from a different process, do the real check
                if (!checkCallingPermission(
                        String16("android.permission.CAMERA"))) {
                    const int uid = getCallingUid();
                    ALOGE("Permission Denial: "
                         "can't use the camera pid=%d, uid=%d", pid, uid);
                    return PERMISSION_DENIED;
                }
            }
            break;
    }
    return BnCameraService::onTransact(code, data, reply, flags);
}

接下来看BnCameraService::onTransact函数的实现:

文件:frameworks/av/ camera/ICameraService.cpp

status_t BnCameraService::onTransact(
    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
    switch(code) {
        case GET_NUMBER_OF_CAMERAS: {
            CHECK_INTERFACE(ICameraService, data, reply);
            reply->writeInt32(getNumberOfCameras());
            return NO_ERROR;
        } break;
        case GET_CAMERA_INFO: {
            CHECK_INTERFACE(ICameraService, data, reply);
            CameraInfo cameraInfo;
            memset(&cameraInfo, 0, sizeof(cameraInfo));
            status_t result = getCameraInfo(data.readInt32(), &cameraInfo);
            reply->writeInt32(cameraInfo.facing);
            reply->writeInt32(cameraInfo.orientation);
            reply->writeInt32(result);
            return NO_ERROR;
        } break;
        case CONNECT: {
            CHECK_INTERFACE(ICameraService, data, reply);
            sp<ICameraClient> cameraClient =
                    interface_cast<ICameraClient>(data.readStrongBinder());
            int32_t cameraId = data.readInt32();
            const String16 clientName = data.readString16();
            int32_t clientUid = data.readInt32();
//即下方中实现的connect函数
            sp<ICamera> camera = connect(cameraClient, cameraId,
                    clientName, clientUid);
            reply->writeStrongBinder(camera->asBinder());
            return NO_ERROR;
        } break;
        case CONNECT_PRO: {
            CHECK_INTERFACE(ICameraService, data, reply);
            sp<IProCameraCallbacks> cameraClient = interface_cast<IProCameraCallbacks>(data.readStrongBinder());
            int32_t cameraId = data.readInt32();
            const String16 clientName = data.readString16();
            int32_t clientUid = data.readInt32();
            sp<IProCameraUser> camera = connect(cameraClient, cameraId,
                                                clientName, clientUid);
            reply->writeStrongBinder(camera->asBinder());
            return NO_ERROR;
        } break;
        case ADD_LISTENER: {
            CHECK_INTERFACE(ICameraService, data, reply);
            sp<ICameraServiceListener> listener =
                interface_cast<ICameraServiceListener>(data.readStrongBinder());
            reply->writeInt32(addListener(listener));
            return NO_ERROR;
        } break;
        case REMOVE_LISTENER: {
            CHECK_INTERFACE(ICameraService, data, reply);
            sp<ICameraServiceListener> listener =
                interface_cast<ICameraServiceListener>(data.readStrongBinder());
            reply->writeInt32(removeListener(listener));
            return NO_ERROR;
        } break;
        default:
            return BBinder::onTransact(code, data, reply, flags);
    }
}

libcameraService.so

4.camera服务端调用(libcameraservice.so)

文件:frameworks/av/services/camera/libcameraservice/CameraService.cpp   //通过binder调用(没搞清楚具体过程 -> 很复杂了解了部分过程)

sp<ICamera> CameraService::connect(
        const sp<ICameraClient>& cameraClient,
        int cameraId,
        const String16& clientPackageName,
        int clientUid) {
    String8 clientName8(clientPackageName);
    int callingPid = getCallingPid();
    LOG1("CameraService::connect E (pid %d \"%s\", id %d)", callingPid,
            clientName8.string(), cameraId);
    if (!validateConnect(cameraId, /*inout*/clientUid)) {
        return NULL;
    }
    sp<Client> client;
    {
        Mutex::Autolock lock(mServiceLock);
        if (!canConnectUnsafe(cameraId, clientPackageName,
                              cameraClient->asBinder(),
                              /*out*/client)) {
            return NULL;
        } else if (client.get() != NULL) {
            return client;
        }
        int facing = -1;
        int deviceVersion = getDeviceVersion(cameraId, &facing);
        // If there are other non-exclusive users of the camera,
        //  this will tear them down before we can reuse the camera
        if (isValidCameraId(cameraId)) {
            // transition from PRESENT -> NOT_AVAILABLE
            updateStatus(ICameraServiceListener::STATUS_NOT_AVAILABLE,
                         cameraId);
        }
        switch(deviceVersion) {
          case CAMERA_DEVICE_API_VERSION_1_0:
            client = new CameraClient(this, cameraClient,
                    clientPackageName, cameraId,
                    facing, callingPid, clientUid, getpid());
            break;
          case CAMERA_DEVICE_API_VERSION_2_0:
          case CAMERA_DEVICE_API_VERSION_2_1:
          case CAMERA_DEVICE_API_VERSION_3_0:
            client = new Camera2Client(this, cameraClient,
                    clientPackageName, cameraId,
                    facing, callingPid, clientUid, getpid(),
                    deviceVersion);
            break;
          case -1:
            ALOGE("Invalid camera id %d", cameraId);
            return NULL;
          default:
            ALOGE("Unknown camera device HAL version: %d", deviceVersion);
            return NULL;
        }
        if (!connectFinishUnsafe(client, client->asBinder())) {
            // this is probably not recoverable.. maybe the client can try again
            // OK: we can only get here if we were originally in PRESENT state
            updateStatus(ICameraServiceListener::STATUS_PRESENT, cameraId);
            return NULL;
        }
        mClient[cameraId] = client;
        LOG1("CameraService::connect X (id %d, this pid is %d)", cameraId,
             getpid());
    }
    // important: release the mutex here so the client can call back
    // into the service from its destructor (can be at the end of the call)
    return client;
}

接下来看connectFinishUnsafe函数的实现:

bool CameraService::connectFinishUnsafe(const sp<BasicClient>& client,
                                        const sp<IBinder>& clientBinder) {
    if (client->initialize(mModule) != OK) {
        return false;
    }
    clientBinder->linkToDeath(this);
    return true;
}

client为类CameraClient,其initialize定义如下:

文件:frameworks/av/services/camera/libcameraservice/CameraClient.cpp

status_t CameraClient::initialize(camera_module_t *module) {
    int callingPid = getCallingPid();
    status_t res;
    LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId);
    // Verify ops permissions
    res = startCameraOps();
    if (res != OK) {
        return res;
    }
    char camera_device_name[10];
    snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);
    mHardware = new CameraHardwareInterface(camera_device_name);
    res = mHardware->initialize(&module->common); //1.调用initialize
    if (res != OK) {
        ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
                __FUNCTION__, mCameraId, strerror(-res), res);
        mHardware.clear();
        return NO_INIT;
    }
    mHardware->setCallbacks(notifyCallback, //2.调用setCallbacks
            dataCallback,
            dataCallbackTimestamp,
            (void *)mCameraId);
    // Enable zoom, error, focus, and metadata messages by default
    enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS | //3.调用enableMsgType
                  CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE);
    LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId);
    return OK;
}

它将调用CameraHardwareInterface::initialize函数,我们来看CameraHardwareInterface类的initialize函数的实现:

文件:frameworks/av/services/camera/libcameraservice/CameraHardwareInterface.h

status_t initialize(hw_module_t *module)
{
ALOGI("Opening camera %s", mName.string());
int rc = module->methods->open(module, mName.string(),
  (hw_device_t **)&mDevice); /*注意mDevice参数,此函数的主要目的是获得mDevice这个指针变量*/ ???
if (rc != OK) {
ALOGE("Could not open camera %s: %d", mName.string(), rc);
return rc;
}
initHalPreviewWindow();
return rc;
}

5.接下来是hal层open函数的调用

在文件:hardware/libhardware/include/hardware/hardware.h中我们先看一下关于hw_module_t结构体的定义:

struct hw_module_t;
struct hw_module_methods_t;
struct hw_device_t;
/**
 * Every hardware module must have a data structure named HAL_MODULE_INFO_SYM
 * and the fields of this data structure must begin with hw_module_t
 * followed by module specific information.
 */
typedef struct hw_module_t {
    /** tag must be initialized to HARDWARE_MODULE_TAG */
    uint32_t tag;
    /**
     * The API version of the implemented module. The module owner is
     * responsible for updating the version when a module interface has
     * changed.
     *
     * The derived modules such as gralloc and audio own and manage this field.
     * The module user must interpret the version field to decide whether or
     * not to inter-operate with the supplied module implementation.
     * For example, SurfaceFlinger is responsible for making sure that
     * it knows how to manage different versions of the gralloc-module API,
     * and AudioFlinger must know how to do the same for audio-module API.
     *
     * The module API version should include a major and a minor component.
     * For example, version 1.0 could be represented as 0x0100. This format
     * implies that versions 0x0100-0x01ff are all API-compatible.
     *
     * In the future, libhardware will expose a hw_get_module_version()
     * (or equivalent) function that will take minimum/maximum supported
     * versions as arguments and would be able to reject modules with
     * versions outside of the supplied range.
     */
    uint16_t module_api_version;
#define version_major module_api_version
    /**
     * version_major/version_minor defines are supplied here for temporary
     * source code compatibility. They will be removed in the next version.
     * ALL clients must convert to the new version format.
     */
    /**
     * The API version of the HAL module interface. This is meant to
     * version the hw_module_t, hw_module_methods_t, and hw_device_t
     * structures and definitions.
     *
     * The HAL interface owns this field. Module users/implementations
     * must NOT rely on this value for version information.
     *
     * Presently, 0 is the only valid value.
     */
    uint16_t hal_api_version;
#define version_minor hal_api_version
    /** Identifier of module */
    const char *id;
    /** Name of this module */
    const char *name;
    /** Author/owner/implementor of the module */
    const char *author;
    /** Modules methods */
    struct hw_module_methods_t* methods;
    /** module's dso */
    void* dso;
    /** padding to 128 bytes, reserved for future use */
    uint32_t reserved[32-7];
} hw_module_t;
来看camera对应的结构体定义:

文件:hardware/qcom/camera/QCamera2/HAL/QCamera2Hal.cpp

#include "QCamera2Factory.h"

static hw_module_t camera_common = {
    tag: HARDWARE_MODULE_TAG,
    module_api_version: CAMERA_MODULE_API_VERSION_1_0,
    hal_api_version: HARDWARE_HAL_API_VERSION,
    id: CAMERA_HARDWARE_MODULE_ID,
    name: "QCamera Module",
    author: "Qualcomm Innovation Center Inc",
    methods: &qcamera::QCamera2Factory::mModuleMethods,
    dso: NULL,
    reserved:  {0},
};
camera_module_t HAL_MODULE_INFO_SYM = {
    common: camera_common,
    get_number_of_cameras: qcamera::QCamera2Factory::get_number_of_cameras,
    get_camera_info: qcamera::QCamera2Factory::get_camera_info,
#ifndef USE_JB_MR1
    set_callbacks: NULL,
#endif
#ifdef USE_VENDOR_CAMERA_EXT
    get_vendor_tag_ops: NULL,
    reserved:  {0}
#endif
};

来看qcom::QCamera2Factory::mModuleMethods的定义: hardware/qcom/camera/QCamera2/HAL/QCamera2Factory.cpp

/*===========================================================================
 * FUNCTION   : camera_device_open
 *
 * DESCRIPTION: static function to open a camera device by its ID
 *
 * PARAMETERS :
 *   @camera_id : camera ID
 *   @hw_device : ptr to struct storing camera hardware device info
 *
 * RETURN     : int32_t type of status
 *              NO_ERROR  -- success
 *              none-zero failure code
 *==========================================================================*/
int QCamera2Factory::camera_device_open(
    const struct hw_module_t *module, const char *id,
    struct hw_device_t **hw_device)
{
    if (module != &HAL_MODULE_INFO_SYM.common) {
        ALOGE("Invalid module. Trying to open %p, expect %p",
            module, &HAL_MODULE_INFO_SYM.common);
        return INVALID_OPERATION;
    }
    if (!id) {
        ALOGE("Invalid camera id");
        return BAD_VALUE;
    }
    return gQCamera2Factory.cameraDeviceOpen(atoi(id), hw_device); //转向QCamera2Factory.cameraDeviceOpen函数
}
struct hw_module_methods_t QCamera2Factory::mModuleMethods = {
    open: QCamera2Factory::camera_device_open,
};
}; // namespace qcamera

接下来看QCamera2Factory.cameraDeviceOpen函数的实现: hardware/qcom/camera/QCamera2/HAL/QCamera2Factory.cpp

/*===========================================================================
 * FUNCTION   : cameraDeviceOpen
 *
 * DESCRIPTION: open a camera device with its ID
 *
 * PARAMETERS :
 *   @camera_id : camera ID
 *   @hw_device : ptr to struct storing camera hardware device info
 *
 * RETURN     : int32_t type of status
 *              NO_ERROR  -- success
 *              none-zero failure code
 *==========================================================================*/
int QCamera2Factory::cameraDeviceOpen(int camera_id,
                    struct hw_device_t **hw_device)
{
    int rc = NO_ERROR;
    if (camera_id < 0 || camera_id >= mNumOfCameras)
        return BAD_VALUE;
    QCamera2HardwareInterface *hw = new QCamera2HardwareInterface(camera_id);
    if (!hw) {
        ALOGE("Allocation of hardware interface failed");
        return NO_MEMORY;
    }
    rc = hw->openCamera(hw_device); //调用QCamera2HardwareInterface::openCamera
    if (rc != NO_ERROR) {
        delete hw;
    }
    return rc;
}

接下来看QCamera2HardwareInterface::openCamera的实现:hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp

/*===========================================================================
 * FUNCTION   : openCamera
 *
 * DESCRIPTION: open camera
 *
 * PARAMETERS :
 *   @hw_device  : double ptr for camera device struct
 *
 * RETURN     : int32_t type of status
 *              NO_ERROR  -- success
 *              none-zero failure code
 *==========================================================================*/
int QCamera2HardwareInterface::openCamera(struct hw_device_t **hw_device)
{
    int rc = NO_ERROR;
    if (mCameraOpened || gcam_ispreviewing) {
        *hw_device = NULL;
        return PERMISSION_DENIED;
    }
    ALOGE("[KPI Perf] %s: E PROFILE_OPEN_CAMERA camera id %d", __func__,mCameraId);
    pthread_mutex_lock(&g_camlock);
rc = openCamera();
    if (rc == NO_ERROR){
        *hw_device = &mCameraDevice.common;
#ifdef IS_SCANNER_CAMERA
mCamDevStore[mCameraId] = &mCameraDevice;
if((mCameraId != CAMERA_SCANNER_ID) && !msingle_scanner){
#endif
//just cmos camera come here, close scanner
if(mCamDevStore[CAMERA_SCANNER_ID] != NULL){
ALOGD("cmos camera come here, close scanner");
   QCamera2HardwareInterface *hw =
       reinterpret_cast<QCamera2HardwareInterface *>(
           reinterpret_cast<camera_device_t *>(mCamDevStore[CAMERA_SCANNER_ID])->priv);
hw->closeCamera();
}
       if (m_thermalAdapter.init(this) != 0) {
         ALOGE("Init thermal adapter failed");
       }
#ifdef IS_SCANNER_CAMERA
}
#endif
    }
    else
        *hw_device = NULL;
pthread_mutex_unlock(&g_camlock);
    return rc;
}

接下来看openCamera的实现:hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp

/*===========================================================================
 * FUNCTION   : openCamera
 *
 * DESCRIPTION: open camera
 *
 * PARAMETERS : none
 *
 * RETURN     : int32_t type of status
 *              NO_ERROR  -- success
 *              none-zero failure code
 *==========================================================================*/
int QCamera2HardwareInterface::openCamera()
{
    int32_t l_curr_width = 0;
    int32_t l_curr_height = 0;
    m_max_pic_width = 0;
    m_max_pic_height = 0;
    char value[PROPERTY_VALUE_MAX];
    int enable_4k2k;
    int i;
    if (mCameraHandle) {
        ALOGE("Failure: Camera already opened");
        return ALREADY_EXISTS;
    }
    mCameraHandle = camera_open(mCameraId); //追踪camera_open的实现
    if (!mCameraHandle) {
        ALOGE("camera_open failed.");
        return UNKNOWN_ERROR;
    }
    if (NULL == gCamCapability[mCameraId])
        initCapabilities(mCameraId,mCameraHandle);
    mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
                                              camEvtHandle,
                                              (void *) this);
#ifdef IS_SCANNER_CAMERA
msingle_scanner = mCameraHandle->ops->scanner_single();
    if((mCameraId == CAMERA_SCANNER_ID) || msingle_scanner){
gCamCapability[mCameraId]->supported_focus_modes_cnt = 0;
gCamCapability[mCameraId]->supported_focus_modes_cnt = 0;
gCamCapability[mCameraId]->supported_flash_modes_cnt = 0;
gCamCapability[mCameraId]->qcom_supported_feature_mask = 0;
}
#endif
    /* get max pic size for jpeg work buf calculation*/
    for(i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt - 1; i++)
    {
      l_curr_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
      l_curr_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
      if ((l_curr_width * l_curr_height) >
        (m_max_pic_width * m_max_pic_height)) {
        m_max_pic_width = l_curr_width;
        m_max_pic_height = l_curr_height;
      }
    }
    //reset the preview and video sizes tables in case they were changed earlier
    copyList(savedSizes[mCameraId].all_preview_sizes, gCamCapability[mCameraId]->preview_sizes_tbl,
             savedSizes[mCameraId].all_preview_sizes_cnt);
    gCamCapability[mCameraId]->preview_sizes_tbl_cnt = savedSizes[mCameraId].all_preview_sizes_cnt;
    copyList(savedSizes[mCameraId].all_video_sizes, gCamCapability[mCameraId]->video_sizes_tbl,
             savedSizes[mCameraId].all_video_sizes_cnt);
    gCamCapability[mCameraId]->video_sizes_tbl_cnt = savedSizes[mCameraId].all_video_sizes_cnt;
    //check if video size 4k x 2k support is enabled
    property_get("sys.camera.4k2k.enable", value, "0");
    enable_4k2k = atoi(value) > 0 ? 1 : 0;
    ALOGD("%s: enable_4k2k is %d", __func__, enable_4k2k);
    if (!enable_4k2k) {
       //if the 4kx2k size exists in the supported preview size or
       //supported video size remove it
       bool found;
       cam_dimension_t true_size_4k_2k;
       cam_dimension_t size_4k_2k;
       true_size_4k_2k.width = 4096;
       true_size_4k_2k.height = 2160;
       size_4k_2k.width = 3840;
       size_4k_2k.height = 2160;
       found = removeSizeFromList(gCamCapability[mCameraId]->preview_sizes_tbl,
                                  gCamCapability[mCameraId]->preview_sizes_tbl_cnt,
                                  true_size_4k_2k);
       if (found) {
          gCamCapability[mCameraId]->preview_sizes_tbl_cnt--;
       }
       found = removeSizeFromList(gCamCapability[mCameraId]->preview_sizes_tbl,
                                  gCamCapability[mCameraId]->preview_sizes_tbl_cnt,
                                  size_4k_2k);
       if (found) {
          gCamCapability[mCameraId]->preview_sizes_tbl_cnt--;
       }
       found = removeSizeFromList(gCamCapability[mCameraId]->video_sizes_tbl,
                                  gCamCapability[mCameraId]->video_sizes_tbl_cnt,
                                  true_size_4k_2k);
       if (found) {
          gCamCapability[mCameraId]->video_sizes_tbl_cnt--;
       }
       found = removeSizeFromList(gCamCapability[mCameraId]->video_sizes_tbl,
                                  gCamCapability[mCameraId]->video_sizes_tbl_cnt,
                                  size_4k_2k);
       if (found) {
          gCamCapability[mCameraId]->video_sizes_tbl_cnt--;
       }
    }
#ifdef IS_SCANNER_CAMERA
if((mCameraId != CAMERA_SCANNER_ID) && !msingle_scanner){
#endif
   int32_t rc = m_postprocessor.init(jpegEvtHandle, this);
   if (rc != 0) {
       ALOGE("Init Postprocessor failed");
       mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
       mCameraHandle = NULL;
       return UNKNOWN_ERROR;
   }
   // update padding info from jpeg
   cam_padding_info_t padding_info;
   m_postprocessor.getJpegPaddingReq(padding_info);
   if (gCamCapability[mCameraId]->padding_info.width_padding < padding_info.width_padding) {
       gCamCapability[mCameraId]->padding_info.width_padding = padding_info.width_padding;
   }
   if (gCamCapability[mCameraId]->padding_info.height_padding < padding_info.height_padding) {
       gCamCapability[mCameraId]->padding_info.height_padding = padding_info.height_padding;
   }
   if (gCamCapability[mCameraId]->padding_info.plane_padding < padding_info.plane_padding) {
       gCamCapability[mCameraId]->padding_info.plane_padding = padding_info.plane_padding;
   }
#ifdef IS_SCANNER_CAMERA
}
#endif
    mParameters.init(gCamCapability[mCameraId], mCameraHandle, this, this);
#ifdef IS_SCANNER_CAMERA
if((mCameraId != CAMERA_SCANNER_ID) && !msingle_scanner){
#endif
   int32_t rc = m_thermalAdapter.init(this);
   if (rc != 0) {
       ALOGE("Init thermal adapter failed");
   }
#ifdef IS_SCANNER_CAMERA
}
#endif
    mCameraOpened = true;
    return NO_ERROR;
}

接下来看camera_open函数的实现: hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c

/*===========================================================================
 * FUNCTION   : camera_open
 *
 * DESCRIPTION: open a camera by camera index
 *
 * PARAMETERS :
 *   @camera_idx : camera index. should within range of 0 to num_of_cameras
 *
 * RETURN     : ptr to a virtual table containing camera handle and operation table.
 *              NULL if failed.
 *==========================================================================*/
mm_camera_vtbl_t * camera_open(uint8_t camera_idx)
{
    int32_t rc = 0;
    mm_camera_obj_t* cam_obj = NULL;
    CDBG("%s: E camera_idx = %d\n", __func__, camera_idx);
    if (camera_idx >= g_cam_ctrl.num_cam) {
        CDBG_ERROR("%s: Invalid camera_idx (%d)", __func__, camera_idx);
        return NULL;
    }
    pthread_mutex_lock(&g_intf_lock);
    /* opened already */
    if(NULL != g_cam_ctrl.cam_obj[camera_idx]) {
        /* Add reference */
        g_cam_ctrl.cam_obj[camera_idx]->ref_count++;
        pthread_mutex_unlock(&g_intf_lock);
        CDBG("%s:  opened alreadyn", __func__);
        return &g_cam_ctrl.cam_obj[camera_idx]->vtbl;
    }
    cam_obj = (mm_camera_obj_t *)malloc(sizeof(mm_camera_obj_t));
    if(NULL == cam_obj) {
        pthread_mutex_unlock(&g_intf_lock);
        CDBG("%s:  no mem", __func__);
        return NULL;
    }
    /* initialize camera obj */
    memset(cam_obj, 0, sizeof(mm_camera_obj_t));
    cam_obj->ref_count++;
    cam_obj->my_hdl = mm_camera_util_generate_handler(camera_idx);
    cam_obj->vtbl.camera_handle = cam_obj->my_hdl; /* set handler */
    cam_obj->vtbl.ops = &mm_camera_ops;
    pthread_mutex_init(&cam_obj->cam_lock, NULL);
    rc = mm_camera_open(cam_obj); //调用mm_camera_open函数
    if(rc != 0) {
        CDBG_ERROR("%s: mm_camera_open err = %d", __func__, rc);
        pthread_mutex_destroy(&cam_obj->cam_lock);
        g_cam_ctrl.cam_obj[camera_idx] = NULL;
        free(cam_obj);
        cam_obj = NULL;
        pthread_mutex_unlock(&g_intf_lock);
        return NULL;
    }else{
        CDBG("%s: Open succeded\n", __func__);
        g_cam_ctrl.cam_obj[camera_idx] = cam_obj;
        pthread_mutex_unlock(&g_intf_lock);
        return &cam_obj->vtbl;
    }
}

接下来看mm_camera_open函数的实现:hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c

/*===========================================================================
 * FUNCTION   : mm_camera_open
 *
 * DESCRIPTION: open a camera
 *
 * PARAMETERS :
 *   @my_obj   : ptr to a camera object
 *
 * RETURN     : int32_t type of status
 *              0  -- success
 *              -1 -- failure
 *==========================================================================*/
int32_t mm_camera_open(mm_camera_obj_t *my_obj)
{
    char dev_name[MM_CAMERA_DEV_NAME_LEN];
    int32_t rc = 0;
    int8_t n_try=MM_CAMERA_DEV_OPEN_TRIES;
    uint8_t sleep_msec=MM_CAMERA_DEV_OPEN_RETRY_SLEEP;
    unsigned int cam_idx = 0;
    CDBG("%s:  begin\n", __func__);
    snprintf(dev_name, sizeof(dev_name), "/dev/%s",
             mm_camera_util_get_dev_name(my_obj->my_hdl));
    sscanf(dev_name, "/dev/video%u", &cam_idx);
    CDBG_ERROR("%s: dev name = %s, cam_idx = %d", __func__, dev_name, cam_idx);
    do{
        n_try--;
        my_obj->ctrl_fd = open(dev_name, O_RDWR | O_NONBLOCK); //设备打开的真正函数
        CDBG("%s:  ctrl_fd = %d, errno == %d", __func__, my_obj->ctrl_fd, errno);
        if((my_obj->ctrl_fd > 0) || (errno != EIO) || (n_try <= 0 )) {
            CDBG_ERROR("%s:  opened, break out while loop", __func__);
            break;
        }
        CDBG("%s:failed with I/O error retrying after %d milli-seconds",
             __func__, sleep_msec);
        usleep(sleep_msec * 1000);
    }while (n_try > 0);
    if (my_obj->ctrl_fd <= 0) {
        CDBG_ERROR("%s: cannot open control fd of '%s' (%s)\n",
                 __func__, dev_name, strerror(errno));
        rc = -1;
        goto on_error;
    }
    /* open domain socket*/
    n_try = MM_CAMERA_DEV_OPEN_TRIES;
    do {
        n_try--;
        my_obj->ds_fd = mm_camera_socket_create(cam_idx, MM_CAMERA_SOCK_TYPE_UDP);
        CDBG("%s:  ds_fd = %d, errno = %d", __func__, my_obj->ds_fd, errno);
        if((my_obj->ds_fd > 0) || (n_try <= 0 )) {
            CDBG("%s:  opened, break out while loop", __func__);
            break;
        }
        CDBG("%s:failed with I/O error retrying after %d milli-seconds",
             __func__, sleep_msec);
        usleep(sleep_msec * 1000);
    } while (n_try > 0);
    if (my_obj->ds_fd <= 0) {
        CDBG_ERROR("%s: cannot open domain socket fd of '%s'(%s)\n",
                 __func__, dev_name, strerror(errno));
        rc = -1;
        goto on_error;
    }
    pthread_mutex_init(&my_obj->msg_lock, NULL);
    pthread_mutex_init(&my_obj->cb_lock, NULL);
    pthread_mutex_init(&my_obj->evt_lock, NULL);
    pthread_cond_init(&my_obj->evt_cond, NULL);
    CDBG("%s : Launch evt Thread in Cam Open",__func__);
    mm_camera_cmd_thread_launch(&my_obj->evt_thread,
                                mm_camera_dispatch_app_event,
                                (void *)my_obj);
    /* launch event poll thread
     * we will add evt fd into event poll thread upon user first register for evt */
    CDBG("%s : Launch evt Poll Thread in Cam Open", __func__);
    mm_camera_poll_thread_launch(&my_obj->evt_poll_thread,
                                 MM_CAMERA_POLL_TYPE_EVT);
    mm_camera_evt_sub(my_obj, TRUE);
    CDBG("%s:  end (rc = %d)\n", __func__, rc);
    /* we do not need to unlock cam_lock here before return
     * because for open, it's done within intf_lock */
    return rc;
on_error:
    if (my_obj->ctrl_fd > 0) {
        close(my_obj->ctrl_fd);
        my_obj->ctrl_fd = 0;
    }
    if (my_obj->ds_fd > 0) {
        mm_camera_socket_close(my_obj->ds_fd);
       my_obj->ds_fd = 0;
    }
    /* we do not need to unlock cam_lock here before return
     * because for open, it's done within intf_lock */
    return rc;
}

open函数返回的文件描述符保存在my_obj->ctrl_fd中进行管理。打开设备节点/dev/video0(后置相机),/dev/video2(前置相机),这个顺序是和内核在启动时video的注册顺序相关的。

6.kernel驱动最终调用 kernel/drivers/media/platform/msm/camera_v2/msm.c

static int msm_open(struct file *filep)
{
int rc;
unsigned long flags;
struct msm_video_device *pvdev = video_drvdata(filep);
BUG_ON(!pvdev);
/* !!! only ONE open is allowed !!! */
if (atomic_read(&pvdev->opened))
return -EBUSY;
atomic_set(&pvdev->opened, 1);
spin_lock_irqsave(&msm_pid_lock, flags);
msm_pid = get_pid(task_pid(current));
spin_unlock_irqrestore(&msm_pid_lock, flags);
/* create event queue */
rc = v4l2_fh_open(filep);
if (rc  < 0)
return rc;
spin_lock_irqsave(&msm_eventq_lock, flags);
msm_eventq = filep->private_data;
spin_unlock_irqrestore(&msm_eventq_lock, flags);
return rc;
}

内核中将调用v4l2_fh_open

    

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

塞外totem

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值