在开发相机预览功能时,App会调用Android API函数startPreview(),在Camera.java中定义:
public native final void startPreview();
这是个native函数,会直接调用jni层的函数,在android_hardware_Camera.cpp中定义:
static void android_hardware_Camera_startPreview(JNIEnv *env, jobject thiz)
{
ALOGV("startPreview");
sp<Camera> camera = get_native_camera(env, thiz, NULL);
if (camera == 0) return;
if (camera->startPreview() != NO_ERROR) {
jniThrowRuntimeException(env, "startPreview failed");
return;
}
}
sp<Camera> get_native_camera(JNIEnv *env, jobject thiz, JNICameraContext** pContext)
{
sp<Camera> camera;
Mutex::Autolock _l(sLock);
JNICameraContext* context = reinterpret_cast<JNICameraContext*>(env->GetLongField(thiz, fields.context));
if (context != NULL) {
camera = context->getCamera();
}
ALOGV("get_native_camera: context=%p, camera=%p", context, camera.get());
if (camera == 0) {
jniThrowRuntimeException(env,
"Camera is being used after Camera.release() was called");
}
if (pContext != NULL) *pContext = context;
return camera;
}
sp<Camera> getCamera() { Mutex::Autolock _l(mLock); return mCamera; }
如上跟踪代码可以了解到,jni的startPreview()会调用C++Camera类的startPreview(),在Camera.cpp中定义:
/ start preview mode
status_t Camera::startPreview()
{
ALOGV("startPreview");
sp <ICamera> c = mCamera;
if (c == 0) return NO_INIT;
return c->startPreview();
}
下一步毫无疑问会调用ICamera类中startPreview()函数,只是ICamera对象mCamera是从哪来的呢?
首先看下声明mCamera的地方,在CameraBase.h中:
sp<TCamUser> mCamera;
跟踪代码可知mCamera是在CameraBase类中声明的sp<TCamUser>的对象,TCamUser在Camera.h中定义:
template <>
struct CameraTraits<Camera>
{
typedef CameraListener TCamListener;
typedef ICamera TCamUser;
typedef ICameraClient TCamCallbacks;
typedef status_t (ICameraService::*TCamConnectService)(const sp<ICameraClient>&,
int, const String16&, int,
/*out*/
sp<ICamera>&);
static TCamConnectService fnConnectService;
};
从以上代码中可知,mCamera是ICamera类型的对象,它是在哪儿实例化的呢?下面我们逆向跟踪下代码,注意是逆向,
首先Camera::create()函数为mCamera赋值,如下:
// construct a camera client from an existing camera remote
sp<Camera> Camera::create(const sp<ICamera>& camera)
{
ALOGV("create");
if (camera == 0) {
ALOGE("camera remote is a NULL pointer");
return 0;
}
sp<Camera> c = new Camera(-1);
if (camera->connect(c) == NO_ERROR) {
c->mStatus = NO_ERROR;
c->mCamera = camera;
camera->asBinder()->linkToDeath(c);
return c;
}
return 0;
}
status_t CameraSource::isCameraAvailable(
省略...
if (camera == 0) {
mCamera = Camera::connect(cameraId, clientName, clientUid);
if (mCamera == 0) return -EBUSY;
mCameraFlags &= ~FLAGS_HOT_CAMERA;
} else {
// We get the proxy from Camera, not ICamera. We need to get the proxy
// to the remote Camera owned by the application. Here mCamera is a
// local Camera object created by us. We cannot use the proxy from
// mCamera here.
mCamera = Camera::create(camera);
if (mCamera == 0) return -EBUSY;
省略...
}
mCamera->lock();
return OK;
}
status_t CameraSource::initWithCameraAccess(
const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId,
const String16& clientName,
uid_t clientUid,
Size videoSize,
int32_t frameRate,
bool storeMetaDataInVideoBuffers) {
省略...
if ((err = isCameraAvailable(camera, proxy, cameraId,
clientName, clientUid)) != OK) {
ALOGE("Camera connection could not be established.");
return err;
}
省略...
return OK;
}
status_t CameraSource::init(
const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId,
const String16& clientName,
uid_t clientUid,
Size videoSize,
int32_t frameRate,
bool storeMetaDataInVideoBuffers) {
ALOGV("init");
status_t err = OK;
int64_t token = IPCThreadState::self()->clearCallingIdentity();
err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid,
videoSize, frameRate,
storeMetaDataInVideoBuffers);
IPCThreadState::self()->restoreCallingIdentity(token);
return err;
}
// static
CameraSource *CameraSource::CreateFromCamera(
const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId,
const String16& clientName,
uid_t clientUid,
Size videoSize,
int32_t frameRate,
const sp<IGraphicBufferProducer>& surface,
bool storeMetaDataInVideoBuffers) {
CameraSource *source = new CameraSource(camera, proxy, cameraId,
clientName, clientUid, videoSize, frameRate, surface,
storeMetaDataInVideoBuffers);
return source;
}
status_t StagefrightRecorder::setupCameraSource(
sp<CameraSource> *cameraSource) {
省略...
if (mCaptureTimeLapse) {
省略...
} else {
*cameraSource = CameraSource::CreateFromCamera(
mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,
videoSize, mFrameRate,
mPreviewSurface, encoderSupportsCameraSourceMetaDataMode);
}
mCamera.clear();
mCameraProxy.clear();
if (*cameraSource == NULL) {
return UNKNOWN_ERROR;
}
if ((*cameraSource)->initCheck() != OK) {
(*cameraSource).clear();
*cameraSource = NULL;
return NO_INIT;
}
省略...
return OK;
}
status_t StagefrightRecorder::setCamera(const sp<ICamera> &camera,
const sp<ICameraRecordingProxy> &proxy) {
ALOGV("setCamera");
if (camera == 0) {
ALOGE("camera is NULL");
return BAD_VALUE;
}
if (proxy == 0) {
ALOGE("camera proxy is NULL");
return BAD_VALUE;
}
mCamera = camera;
mCameraProxy = proxy;
return OK;
}
status_t MediaRecorderClient::setCamera(const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy)
{
ALOGV("setCamera");
Mutex::Autolock lock(mLock);
if (mRecorder == NULL) {
ALOGE("recorder is not initialized");
return NO_INIT;
}
return mRecorder->setCamera(camera, proxy);
}
MediaRecorderClient是BnMediaRecorder的派生类,MediaRecorderClient的setCamera()函数会在BnMediaRecorder的
onTransact()函数中被调用,如下:
status_t BnMediaRecorder::onTransact(
uint32_t code, const Parcel& data,
Parcel* reply, uint32_t flags)
{
switch (code) {
省略...
case SET_CAMERA: {
ALOGV("SET_CAMERA");
CHECK_INTERFACE(IMediaRecorder, data, reply);
sp<ICamera> camera = interface_cast<ICamera>(data.readStrongBinder());
sp<ICameraRecordingProxy> proxy =
interface_cast<ICameraRecordingProxy>(data.readStrongBinder());
reply->writeInt32(setCamera(camera, proxy));
return NO_ERROR;
} break;
省略...
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
在前面章节,我们已经介绍了interface_cast()函数,这里的interface_cast<ICamera>函数会返回保存的BpCamera对象,如果没有保存的则会实例化BpCamera类并返回该对象.另外BpCamera在实例化时会注入一个IBinder对象.
好了,在Camera.cpp中的startPreview()函数中注入的mCamera对象已经找到源头了,实际上就是上述BpCamera的实例.
然而注入的IBinder又是在哪实例化的?这个问题我们留待后续章节分析.
public native final void startPreview();
这是个native函数,会直接调用jni层的函数,在android_hardware_Camera.cpp中定义:
static void android_hardware_Camera_startPreview(JNIEnv *env, jobject thiz)
{
ALOGV("startPreview");
sp<Camera> camera = get_native_camera(env, thiz, NULL);
if (camera == 0) return;
if (camera->startPreview() != NO_ERROR) {
jniThrowRuntimeException(env, "startPreview failed");
return;
}
}
sp<Camera> get_native_camera(JNIEnv *env, jobject thiz, JNICameraContext** pContext)
{
sp<Camera> camera;
Mutex::Autolock _l(sLock);
JNICameraContext* context = reinterpret_cast<JNICameraContext*>(env->GetLongField(thiz, fields.context));
if (context != NULL) {
camera = context->getCamera();
}
ALOGV("get_native_camera: context=%p, camera=%p", context, camera.get());
if (camera == 0) {
jniThrowRuntimeException(env,
"Camera is being used after Camera.release() was called");
}
if (pContext != NULL) *pContext = context;
return camera;
}
sp<Camera> getCamera() { Mutex::Autolock _l(mLock); return mCamera; }
如上跟踪代码可以了解到,jni的startPreview()会调用C++Camera类的startPreview(),在Camera.cpp中定义:
/ start preview mode
status_t Camera::startPreview()
{
ALOGV("startPreview");
sp <ICamera> c = mCamera;
if (c == 0) return NO_INIT;
return c->startPreview();
}
下一步毫无疑问会调用ICamera类中startPreview()函数,只是ICamera对象mCamera是从哪来的呢?
首先看下声明mCamera的地方,在CameraBase.h中:
sp<TCamUser> mCamera;
跟踪代码可知mCamera是在CameraBase类中声明的sp<TCamUser>的对象,TCamUser在Camera.h中定义:
template <>
struct CameraTraits<Camera>
{
typedef CameraListener TCamListener;
typedef ICamera TCamUser;
typedef ICameraClient TCamCallbacks;
typedef status_t (ICameraService::*TCamConnectService)(const sp<ICameraClient>&,
int, const String16&, int,
/*out*/
sp<ICamera>&);
static TCamConnectService fnConnectService;
};
从以上代码中可知,mCamera是ICamera类型的对象,它是在哪儿实例化的呢?下面我们逆向跟踪下代码,注意是逆向,
首先Camera::create()函数为mCamera赋值,如下:
// construct a camera client from an existing camera remote
sp<Camera> Camera::create(const sp<ICamera>& camera)
{
ALOGV("create");
if (camera == 0) {
ALOGE("camera remote is a NULL pointer");
return 0;
}
sp<Camera> c = new Camera(-1);
if (camera->connect(c) == NO_ERROR) {
c->mStatus = NO_ERROR;
c->mCamera = camera;
camera->asBinder()->linkToDeath(c);
return c;
}
return 0;
}
status_t CameraSource::isCameraAvailable(
省略...
if (camera == 0) {
mCamera = Camera::connect(cameraId, clientName, clientUid);
if (mCamera == 0) return -EBUSY;
mCameraFlags &= ~FLAGS_HOT_CAMERA;
} else {
// We get the proxy from Camera, not ICamera. We need to get the proxy
// to the remote Camera owned by the application. Here mCamera is a
// local Camera object created by us. We cannot use the proxy from
// mCamera here.
mCamera = Camera::create(camera);
if (mCamera == 0) return -EBUSY;
省略...
}
mCamera->lock();
return OK;
}
status_t CameraSource::initWithCameraAccess(
const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId,
const String16& clientName,
uid_t clientUid,
Size videoSize,
int32_t frameRate,
bool storeMetaDataInVideoBuffers) {
省略...
if ((err = isCameraAvailable(camera, proxy, cameraId,
clientName, clientUid)) != OK) {
ALOGE("Camera connection could not be established.");
return err;
}
省略...
return OK;
}
status_t CameraSource::init(
const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId,
const String16& clientName,
uid_t clientUid,
Size videoSize,
int32_t frameRate,
bool storeMetaDataInVideoBuffers) {
ALOGV("init");
status_t err = OK;
int64_t token = IPCThreadState::self()->clearCallingIdentity();
err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid,
videoSize, frameRate,
storeMetaDataInVideoBuffers);
IPCThreadState::self()->restoreCallingIdentity(token);
return err;
}
// static
CameraSource *CameraSource::CreateFromCamera(
const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId,
const String16& clientName,
uid_t clientUid,
Size videoSize,
int32_t frameRate,
const sp<IGraphicBufferProducer>& surface,
bool storeMetaDataInVideoBuffers) {
CameraSource *source = new CameraSource(camera, proxy, cameraId,
clientName, clientUid, videoSize, frameRate, surface,
storeMetaDataInVideoBuffers);
return source;
}
status_t StagefrightRecorder::setupCameraSource(
sp<CameraSource> *cameraSource) {
省略...
if (mCaptureTimeLapse) {
省略...
} else {
*cameraSource = CameraSource::CreateFromCamera(
mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,
videoSize, mFrameRate,
mPreviewSurface, encoderSupportsCameraSourceMetaDataMode);
}
mCamera.clear();
mCameraProxy.clear();
if (*cameraSource == NULL) {
return UNKNOWN_ERROR;
}
if ((*cameraSource)->initCheck() != OK) {
(*cameraSource).clear();
*cameraSource = NULL;
return NO_INIT;
}
省略...
return OK;
}
status_t StagefrightRecorder::setCamera(const sp<ICamera> &camera,
const sp<ICameraRecordingProxy> &proxy) {
ALOGV("setCamera");
if (camera == 0) {
ALOGE("camera is NULL");
return BAD_VALUE;
}
if (proxy == 0) {
ALOGE("camera proxy is NULL");
return BAD_VALUE;
}
mCamera = camera;
mCameraProxy = proxy;
return OK;
}
status_t MediaRecorderClient::setCamera(const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy)
{
ALOGV("setCamera");
Mutex::Autolock lock(mLock);
if (mRecorder == NULL) {
ALOGE("recorder is not initialized");
return NO_INIT;
}
return mRecorder->setCamera(camera, proxy);
}
MediaRecorderClient是BnMediaRecorder的派生类,MediaRecorderClient的setCamera()函数会在BnMediaRecorder的
onTransact()函数中被调用,如下:
status_t BnMediaRecorder::onTransact(
uint32_t code, const Parcel& data,
Parcel* reply, uint32_t flags)
{
switch (code) {
省略...
case SET_CAMERA: {
ALOGV("SET_CAMERA");
CHECK_INTERFACE(IMediaRecorder, data, reply);
sp<ICamera> camera = interface_cast<ICamera>(data.readStrongBinder());
sp<ICameraRecordingProxy> proxy =
interface_cast<ICameraRecordingProxy>(data.readStrongBinder());
reply->writeInt32(setCamera(camera, proxy));
return NO_ERROR;
} break;
省略...
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
在前面章节,我们已经介绍了interface_cast()函数,这里的interface_cast<ICamera>函数会返回保存的BpCamera对象,如果没有保存的则会实例化BpCamera类并返回该对象.另外BpCamera在实例化时会注入一个IBinder对象.
好了,在Camera.cpp中的startPreview()函数中注入的mCamera对象已经找到源头了,实际上就是上述BpCamera的实例.
然而注入的IBinder又是在哪实例化的?这个问题我们留待后续章节分析.