您的位置:首页 > 运维架构 > 网站架构

Android Camera 系统架构源码分析(2)---->Camera的startPreview和setPreviewCallback

2015-10-26 16:22 846 查看
CamerastartPreview流程

上接第一篇,这个过程的主要任务是,如何读取数据的,读取的数据是什么格式,最好能知道是如何去预览的

上层APP调用的是Camera.java的startPreview();,下面列出startPreview的调用流程

//Camera.java
publicnativefinalvoidstartPreview();
//android_hardware_Camera.cpp
staticvoidandroid_hardware_Camera_startPreview(JNIEnv*env,jobjectthiz)
{
//这里的camera变量是,Camera.cpp类
camera->startPreview();
}
//Camera.cpp
status_tCamera::startPreview()
{
//此处的mCamera在CameraService.cpp的connect函数里被设置
//也就是CameraClient类
sp<ICamera>c=mCamera;
returnc->startPreview();
}


CameraClient.cpp里的startPreview函数

status_tCameraClient::startPreview(){
returnstartCameraMode(CAMERA_PREVIEW_MODE);
}
status_tCameraClient::startCameraMode(camera_modemode){
switch(mode){
caseCAMERA_PREVIEW_MODE:
if(mSurface==0&&mPreviewWindow==0){
LOG1("mSurfaceisnotsetyet.");
//stillabletostartpreviewinthiscase.
}
returnstartPreviewMode();
caseCAMERA_RECORDING_MODE:
//...
}
}
//CameraClient.cpp
status_tCameraClient::startPreviewMode(){
mHardware->previewEnabled();
mHardware->setPreviewWindow(mPreviewWindow);
result=mHardware->startPreview();
}
//CameraHardwareInterface.h
status_tstartPreview(){
//mDevice是Cam1DeviceFactory.cppcreateCam1Device()返回的对象
//既是DefaultCam1Device类
returnmDevice->ops->start_preview(mDevice);
}


上面的mDevice->ops->start_preview()函数对应的是DefaultCam1Device的父类Cam1Device类的操作函数。

Cam1DeviceBase::startPreview()
{
//(1)此函数在其子类DefaultCam1Device中实现
//仅是初始化了CameraAdapter
onStartPreview();
//(2)初始化DisplayClient,重要,稍后研究
enableDisplayClient();
//(3)
mpCamClient->startPreview();
//(4)我们通过(3)和(4)开始研究,再返回去看(1)(2)
mpCamAdapter->startPreview();
enableMsgType(CAMERA_MSG_PREVIEW_METADATA);
//
mIsPreviewEnabled=true;
}


mpCamclient->startPreview()

CamClient::startPreview()
{
mpPreviewClient->startPreview();
}
//PreviewClient.cpp
PreviewClient::startPreview()
{
//获得预览参数,这里参数为800*480,yuv420sp
ms8PrvTgtFmt=mpParamsMgr->getPreviewFormat();
mpParamsMgr->getPreviewSize(&mi4PrvWidth,&mi4PrvHeight);
//初始化预览Buf
initBuffers();
//
returnonStateChanged();
}
PreviewClient::initBuffers()
{
//预览数据的Buf
mpImgBufMgr=ImgBufManager::alloc(ms8PrvTgtFmt,mi4PrvWidth,
mi4PrvHeight,eMAX_PREVIEW_BUFFER_NUM,
"PreviewClientCb",mpCamMsgCbInfo->mRequestMemory,
0,0);
//预览数据的处理类,这里只是保留了一个处理接口
//里面并没有什么东西,可自行填充
mpExtImgProc=ExtImgProc::createInstance();
mpExtImgProc->init();
}
PreviewClient::onStateChanged()
{//发送了一个eID_WAKEUP的消息
postCommand(Command(Command::eID_WAKEUP));
}
//接收eID_WAKEUP消息
PreviewClient::threadLoop()
{
Commandcmd;
if(getCommand(cmd))
{
switch(cmd.eId)
{
caseCommand::eID_WAKEUP:
caseCommand::eID_PREVIEW_FRAME:
caseCommand::eID_POSTVIEW_FRAME:
onClientThreadLoop(cmd);
break;
//
caseCommand::eID_EXIT:
//...
}
}
//开始处理数据Buf
PreviewClient::onClientThreadLoop(Commandconst&rCmd)
{
//(1)Getreferencestopool/queuebeforestarting,sothatnothingwillbefreeduringoperations.
sp<ImgBufManager>pBufMgr=NULL;
sp<IImgBufQueue>pBufQueue=NULL;
{
Mutex::Autolock_l(mModuleMtx);
//
pBufMgr=mpImgBufMgr;
pBufQueue=mpImgBufQueue;
if(pBufMgr==0||pBufQueue==0||!isEnabledState())
}
//(2)stop&clearallbufferssothatwewon'tdequeanyundefinedbuffer.
pBufQueue->stopProcessor();
//(3)PrepareallTODObuffers.准备buf
if(!prepareAllTodoBuffers(pBufQueue,pBufMgr))
//(4)Start这个函数只是发出了一个广播,通知等待者
if(!pBufQueue->startProcessor())
//(5)Dountilallwantedmessagesaredisabled.
while(1)
{
//(.1)阻塞等待通知,并开始处理buf
waitAndHandleReturnBuffers(pBufQueue);
//(.2)breakifdisabled.
//addisProcessorRunningtomakesuretheformerpauseProcessor
//issucessfullyprocessed.
if(!isEnabledState()||!pBufQueue->isProcessorRunning())
{
MY_LOGI("Previewclientdisabled");
break;
}
//(.3)re-prepareallTODObuffers,ifpossible,
//sincesomeDONE/CANCELbuffersreturn.把Buf放回队列里
prepareAllTodoBuffers(pBufQueue,pBufMgr);
}
//(6)stop.
pBufQueue->pauseProcessor();
pBufQueue->flushProcessor();//clear"TODO"
pBufQueue->stopProcessor();//clear"DONE"
//
//(7)Cancelallun-returnedbuffers.
cancelAllUnreturnBuffers();
}
PreviewClient::waitAndHandleReturnBuffers(sp<IImgBufQueue>const&rpBufQueue)
{
Vector<ImgBufQueNode>vQueNode;
//(1)dequebuffersfromprocessor.阻塞等待通知
rpBufQueue->dequeProcessor(vQueNode);
//(2)handlebuffersdequedfromprocessor.
ret=handleReturnBuffers(vQueNode);
}


我们找到了哪里开始处理数据,那问题来了,是如何处理数据,数据又是如何被显示的,而这些数据又是从哪里来的

先来看数据是如何处理的

PreviewClient::handleReturnBuffers(Vector<ImgBufQueNode>const&rvQueNode)
{
//(1)determinetheindexofthelatestDONEbufferforcallback.
int32_tidxToCallback=0;
for(idxToCallback=rvQueNode.size()-1;idxToCallback>=0;idxToCallback--)
{
if(rvQueNode[idxToCallback].isDONE())
break;
}
//ShowTimeduration.
if(0<=idxToCallback)
{
nsecs_tconst_timestamp1=rvQueNode[idxToCallback].getImgBuf()->getTimestamp();
mProfile_buffer_timestamp.pulse(_timestamp1);
nsecs_tconst_msDuration_buffer_timestamp=::ns2ms(mProfile_buffer_timestamp.getDuration());
mProfile_buffer_timestamp.reset(_timestamp1);
//
mProfile_dequeProcessor.pulse();
nsecs_tconst_msDuration_dequeProcessor=::ns2ms(mProfile_dequeProcessor.getDuration());
mProfile_dequeProcessor.reset();
}
//
//(2)RemovefromListandpeformcallback,onebyone.
int32_tconstqueSize=rvQueNode.size();
for(int32_ti=0;i<queSize;i++)
{
ImgBufQueNodeconst&rQueNode=rvQueNode[i];
sp<IImgBuf>const&rpQueImgBuf=rQueNode.getImgBuf();//ImgBufinQueue.
sp<ICameraImgBuf>pListImgBuf=NULL;
ImgBufNodeconstListNode=*mImgBufList.begin();//NodeinList.
pListImgBuf=ListNode.getImgBuf();//ImgBufinList.
//(.4)Performcallback.
if(i==idxToCallback){
//
if(mpExtImgProc!=NULL)
{
if(mpExtImgProc->getImgMask()&ExtImgProc::BufType_PreviewCB)
{
IExtImgProc::ImgInfoimg;
//
img.bufType=ExtImgProc::BufType_PreviewCB;
img.format=rpQueImgBuf->getImgFormat();
img.width=rpQueImgBuf->getImgWidth();
img.height=rpQueImgBuf->getImgHeight();
img.stride[0]=rpQueImgBuf->getImgWidthStride(0);
img.stride[1]=rpQueImgBuf->getImgWidthStride(1);
img.stride[2]=rpQueImgBuf->getImgWidthStride(2);
img.virtAddr=(MUINT32)(rpQueImgBuf->getVirAddr());
img.bufSize=rpQueImgBuf->getBufSize();
//预留的处理函数,现在这里是空
mpExtImgProc->doImgProc(img);
}
}
//对数据进行处理
performPreviewCallback(pListImgBuf,rQueNode.getCookieDE());
}
}
}
PreviewClient::performPreviewCallback(sp<ICameraImgBuf>const&pCameraImgBuf,int32_tconstmsgType)
{
if(pCameraImgBuf!=0)
{
//[2]Callback
sp<CamMsgCbInfo>pCamMsgCbInfo;
{
pCamMsgCbInfo=mpCamMsgCbInfo;
}
//调用处理函数
//这个mDataCb回调函数藏得很深
//在CameraClient.cpp的initialize()函数里
//mHardware->setCallbacks(notifyCallback,dataCallback,dataCallbackTimestamp,(void*)mCameraId);
//这句话其中的dataCallback,设置了mDataCb函数
pCamMsgCbInfo->mDataCb(
0!=msgType?msgType:(int32_t)CAMERA_MSG_PREVIEW_FRAME,
pCameraImgBuf->get_camera_memory(),
pCameraImgBuf->getBufIndex(),
NULL,
pCamMsgCbInfo->mCbCookie
);
}
}


最后调用了mDataCb()回调函数。如果在APP层用了setPreviewCallback(),则会在此时调用回调函数,并把数据传回去给APP。注意,这里的msgType被设置成了CAMERA_MSG_PREVIEW_FRAME。

Cam1DeviceBase的setCallbacks()设置了很多回调函数,这些回调函数应该是挺有用的,有空了解一下。类似于startPreview()提供给Frameworks层调用。在Frameworks的CameraClient::initialize被调用

//设置Camera的各种回调函数
Cam1DeviceBase::setCallbacks(
camera_notify_callbacknotify_cb,
camera_data_callbackdata_cb,
camera_data_timestamp_callbackdata_cb_timestamp,
camera_request_memoryget_memory,
void*user
)
{
mpCamMsgCbInfo->mCbCookie=user;
mpCamMsgCbInfo->mNotifyCb=notify_cb;
mpCamMsgCbInfo->mDataCb=data_cb;
mpCamMsgCbInfo->mDataCbTimestamp=data_cb_timestamp;
mpCamMsgCbInfo->mRequestMemory=get_memory;
//
if(mpCamClient!=0)
{
mpCamClient->setCallbacks(mpCamMsgCbInfo);
}
//
if(mpCamAdapter!=0)
{
mpCamAdapter->setCallbacks(mpCamMsgCbInfo);
}
}


Frameworks:

status_tCameraClient::initialize(camera_module_t*module){
mHardware=newCameraHardwareInterface(camera_device_name);
res=mHardware->initialize(&module->common);
//Cam1DeviceBase的setCallbacks()
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
(void*)mCameraId);
}


dataCallback是个回调函数,也在CameraClient中。此函数中,接收了多种的msgType。这就表示数据回调中可以有多种的数据类型,有RAW的,有COMPRESSD的,等

voidCameraClient::dataCallback(int32_tmsgType,
constsp<IMemory>&dataPtr,camera_frame_metadata_t*metadata,void*user){
switch(msgType&~CAMERA_MSG_PREVIEW_METADATA){
//!++
#if1//defined(MTK_CAMERA_BSP_SUPPORT)
caseMTK_CAMERA_MSG_EXT_DATA:
client->handleMtkExtData(dataPtr,metadata);
break;
#endif
//!--
caseCAMERA_MSG_PREVIEW_FRAME:
client->handlePreviewData(msgType,dataPtr,metadata);
break;
caseCAMERA_MSG_POSTVIEW_FRAME:
client->handlePostview(dataPtr);
break;
caseCAMERA_MSG_RAW_IMAGE:
client->handleRawPicture(dataPtr);
break;
caseCAMERA_MSG_COMPRESSED_IMAGE:
client->handleCompressedPicture(dataPtr);
break;
default:
client->handleGenericData(msgType,dataPtr,metadata);
break;
}
}


我们的msgType是CAMERA_MSG_PREVIEW_FRAME。调用了handlePreviewData函数

voidCameraClient::handlePreviewData(int32_tmsgType,
constsp<IMemory>&mem,
camera_frame_metadata_t*metadata){
//获取mem
sp<IMemoryHeap>heap=mem->getMemory(&offset,&size);
//iscallbackenabled?判断
if(!(flags&CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)){
//Iftheenablebitisoff,thecopy-outandone-shotbitsareignored
LOG2("framecallbackisdisabled");
return;
}
//holdastrongpointertotheclient
sp<ICameraClient>c=mRemoteCallback;
//clearcallbackflagsifnoclientorone-shotmode判断
if(c==0||(mPreviewCallbackFlag&CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)){
LOG2("Disablepreviewcallback");
}
//有两种方式把mem发送出去,一种是copy一种是直接把地址发送出去
if(c!=0){
//Isthereceivedframecopiedoutornot?
if(flags&CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK){
LOG2("frameiscopied");
copyFrameAndPostCopiedFrame(msgType,c,heap,offset,size,metadata);
}else{
LOG2("frameisforwarded");
c->dataCallback(msgType,mem,metadata);
}
}
}


最后是通过sp<ICameraClient>c=mRemoteCallback;mRemoteCallback->dataCallback发送出去的。mRemoteCallback是ICameraClient类型和CameraClient两者都有dataCallback两者又是什么关系,如果有关系,这里此不是会死循环。mRemoteCallback搜一下,是在CameraClient::connect()被里初始化的,于是乎在这里找,找了大半天也没找到在哪里被初始化的。被坑了半天,原来在CameraClient的构造函数里。

我们返回到Camera.cpp的connect函数里。发现,还是挺好玩的。再从Camera.cpp的connect()细跟一下

sp<Camera>Camera::connect(intcameraId,constString16&clientPackageName,
intclientUid)
{
returnCameraBaseT::connect(cameraId,clientPackageName,clientUid);
}
//CameraBaseT在CameraBase被定义成typedefCameraBase<TCam>CameraBaseT;
//CameraBase在Camera被初始化为CameraBase<Camera>,所以上面就相应于调用了CameraBase<Camera>::connect()
template<typenameTCam,typenameTCamTraits>
sp<TCam>CameraBase<TCam,TCamTraits>::connect(intcameraId,
constString16&clientPackageName,intclientUid)
{
//把所有的TCam替换成Camera,就是刚才Camera.cpp里的camera在这里被构造
sp<TCam>c=newTCam(cameraId);
//TCamCallbacks在Camera.h里被定义为ICameraClient,被赋值为Camera。Camera继承ICameraClient
sp<TCamCallbacks>cl=c;
constsp<ICameraService>&cs=getCameraService();
//fnConnectService在Camera被初始化为ICameraService::connect()
TCamConnectServicefnConnectService=TCamTraits::fnConnectService;
//下面调用了CameraService::connect()c和cl是同一个值,做为两个不同的参数传进了CameraService::connect()
status=(cs.get()->*fnConnectService)(cl,cameraId,clientPackageName,clientUid,/*out*/c->mCamera);
}
//CameraService::connec()构造了一个CameraClient(),又是一个CameraClient,
//但是和上面的ICameraClient没有半毛线关系。只是名字相似,真的是一万个++泥马在奔腾。
status_tCameraService::connect(constsp<ICameraClient>&cameraClient,intcameraId,
constString16&clientPackageName,intclientUid,/*out*/sp<ICamera>&device){
//CameraService::connec()就做了两件事情,初始化Camera里的mCamera和把Camera传给CameraClient
//CameraClient的构造函数会传到他的父类构造函数Client()里,Client却是在CameraService。Camera类就是mRemoteCallback
client=newCameraClient(this,cameraClient,clientPackageName,cameraId,
facing,callingPid,clientUid,getpid());
device=client;
returnOK;}


所以,上面的c->dataCallback。调用的是Camera的dataCallback()

//callbackfromcameraservicewhenframeorimageisready
voidCamera::dataCallback(int32_tmsgType,constsp<IMemory>&dataPtr,
camera_frame_metadata_t*metadata)
{
sp<CameraListener>listener;
listener=mListener;
listener->postData(msgType,dataPtr,metadata);
}


这里的mListener是在android_hardware_Camera_native_setup()里被设置的类型是MtkJNICameraContext。所以listener->postData()调用的是MtkJNICameraContext的postData()。

voidMtkJNICameraContext::postData(int32_tmsgType,constsp<IMemory>&dataPtr,
camera_frame_metadata_t*metadata)
{
JNICameraContext::postData(msgType,dataPtr,metadata);
}
voidJNICameraContext::postData(int32_tmsgType,constsp<IMemory>&dataPtr,
camera_frame_metadata_t*metadata)
{
switch(dataMsgType){
caseCAMERA_MSG_VIDEO_FRAME:
//...
caseCAMERA_MSG_RAW_IMAGE:
//Thereisnodata.
case0:
break;
default:
ALOGV("dataCallback(%d,%p)",dataMsgType,dataPtr.get());
copyAndPost(env,dataPtr,dataMsgType);
break;
}
//postframemetadatatoJava
if(metadata&&(msgType&CAMERA_MSG_PREVIEW_METADATA)){
postMetadata(env,CAMERA_MSG_PREVIEW_METADATA,metadata);
}
}
voidJNICameraContext::copyAndPost(JNIEnv*env,constsp<IMemory>&dataPtr,intmsgType)
{
jbyteArrayobj=NULL;
//allocateJavabytearrayandcopydata
if(dataPtr!=NULL){
sp<IMemoryHeap>heap=dataPtr->getMemory(&offset,&size);
uint8_t*heapBase=(uint8_t*)heap->base();
constjbyte*data=reinterpret_cast<constjbyte*>(heapBase+offset);
if(msgType==CAMERA_MSG_RAW_IMAGE){
obj=getCallbackBuffer(env,&mRawImageCallbackBuffers,size);
}elseif(msgType==CAMERA_MSG_PREVIEW_FRAME&&mManualBufferMode){
//再次构造一个Buffer,并发送出去
obj=getCallbackBuffer(env,&mCallbackBuffers,size);
}else{
ALOGD("Allocatingcallbackbuffer");
obj=env->NewByteArray(size);
}
if(obj==NULL){
ALOGE("Couldn'tallocatebytearrayforJPEGdata");
env->ExceptionClear();
}else{
env->SetByteArrayRegion(obj,0,size,data);
}
}else{
ALOGE("imageheapisNULL");
}
}
//postimagedatatoJava
//主要是这句话,调用了一个Java函数,函数名为fields.post_event,post_event被初始化为postEventFromNative()
env->CallStaticVoidMethod(mCameraJClass,fields.post_event,
mCameraJObjectWeak,msgType,0,0,obj);
if(obj){
env->DeleteLocalRef(obj);
}
}
//终于加到Java了
privatestaticvoidpostEventFromNative(Objectcamera_ref,
intwhat,intarg1,intarg2,Objectobj)
{
//就是发送了一个msg,what为CAMERA_MSG_PREVIEW_FRAME。剩下的就不深究了
Messagem=c.mEventHandler.obtainMessage(what,arg1,arg2,obj);
c.mEventHandler.sendMessage(m);
}


不过看到这里,我们好像并没有看出个所有然来,最后的回调仅仅是调用上层的回调函数,把数据往上丢,却不显示也不处理。接下来我们还有两个问题,数据从哪里来?然后这些数据又是怎么显示的。请看下一篇

CamerastartPreview流程

上接第一篇,这个过程的主要任务是,如何读取数据的,读取的数据是什么格式,最好能知道是如何去预览的

上层APP调用的是Camera.java的startPreview();,下面列出startPreview的调用流程

//Camera.java
publicnativefinalvoidstartPreview();
//android_hardware_Camera.cpp
staticvoidandroid_hardware_Camera_startPreview(JNIEnv*env,jobjectthiz)
{
//这里的camera变量是,Camera.cpp类
camera->startPreview();
}
//Camera.cpp
status_tCamera::startPreview()
{
//此处的mCamera在CameraService.cpp的connect函数里被设置
//也就是CameraClient类
sp<ICamera>c=mCamera;
returnc->startPreview();
}


CameraClient.cpp里的startPreview函数

status_tCameraClient::startPreview(){
returnstartCameraMode(CAMERA_PREVIEW_MODE);
}
status_tCameraClient::startCameraMode(camera_modemode){
switch(mode){
caseCAMERA_PREVIEW_MODE:
if(mSurface==0&&mPreviewWindow==0){
LOG1("mSurfaceisnotsetyet.");
//stillabletostartpreviewinthiscase.
}
returnstartPreviewMode();
caseCAMERA_RECORDING_MODE:
//...
}
}
//CameraClient.cpp
status_tCameraClient::startPreviewMode(){
mHardware->previewEnabled();
mHardware->setPreviewWindow(mPreviewWindow);
result=mHardware->startPreview();
}
//CameraHardwareInterface.h
status_tstartPreview(){
//mDevice是Cam1DeviceFactory.cppcreateCam1Device()返回的对象
//既是DefaultCam1Device类
returnmDevice->ops->start_preview(mDevice);
}


上面的mDevice->ops->start_preview()函数对应的是DefaultCam1Device的父类Cam1Device类的操作函数。

Cam1DeviceBase::startPreview()
{
//(1)此函数在其子类DefaultCam1Device中实现
//仅是初始化了CameraAdapter
onStartPreview();
//(2)初始化DisplayClient,重要,稍后研究
enableDisplayClient();
//(3)
mpCamClient->startPreview();
//(4)我们通过(3)和(4)开始研究,再返回去看(1)(2)
mpCamAdapter->startPreview();
enableMsgType(CAMERA_MSG_PREVIEW_METADATA);
//
mIsPreviewEnabled=true;
}


mpCamclient->startPreview()

CamClient::startPreview()
{
mpPreviewClient->startPreview();
}
//PreviewClient.cpp
PreviewClient::startPreview()
{
//获得预览参数,这里参数为800*480,yuv420sp
ms8PrvTgtFmt=mpParamsMgr->getPreviewFormat();
mpParamsMgr->getPreviewSize(&mi4PrvWidth,&mi4PrvHeight);
//初始化预览Buf
initBuffers();
//
returnonStateChanged();
}
PreviewClient::initBuffers()
{
//预览数据的Buf
mpImgBufMgr=ImgBufManager::alloc(ms8PrvTgtFmt,mi4PrvWidth,
mi4PrvHeight,eMAX_PREVIEW_BUFFER_NUM,
"PreviewClientCb",mpCamMsgCbInfo->mRequestMemory,
0,0);
//预览数据的处理类,这里只是保留了一个处理接口
//里面并没有什么东西,可自行填充
mpExtImgProc=ExtImgProc::createInstance();
mpExtImgProc->init();
}
PreviewClient::onStateChanged()
{//发送了一个eID_WAKEUP的消息
postCommand(Command(Command::eID_WAKEUP));
}
//接收eID_WAKEUP消息
PreviewClient::threadLoop()
{
Commandcmd;
if(getCommand(cmd))
{
switch(cmd.eId)
{
caseCommand::eID_WAKEUP:
caseCommand::eID_PREVIEW_FRAME:
caseCommand::eID_POSTVIEW_FRAME:
onClientThreadLoop(cmd);
break;
//
caseCommand::eID_EXIT:
//...
}
}
//开始处理数据Buf
PreviewClient::onClientThreadLoop(Commandconst&rCmd)
{
//(1)Getreferencestopool/queuebeforestarting,sothatnothingwillbefreeduringoperations.
sp<ImgBufManager>pBufMgr=NULL;
sp<IImgBufQueue>pBufQueue=NULL;
{
Mutex::Autolock_l(mModuleMtx);
//
pBufMgr=mpImgBufMgr;
pBufQueue=mpImgBufQueue;
if(pBufMgr==0||pBufQueue==0||!isEnabledState())
}
//(2)stop&clearallbufferssothatwewon'tdequeanyundefinedbuffer.
pBufQueue->stopProcessor();
//(3)PrepareallTODObuffers.准备buf
if(!prepareAllTodoBuffers(pBufQueue,pBufMgr))
//(4)Start这个函数只是发出了一个广播,通知等待者
if(!pBufQueue->startProcessor())
//(5)Dountilallwantedmessagesaredisabled.
while(1)
{
//(.1)阻塞等待通知,并开始处理buf
waitAndHandleReturnBuffers(pBufQueue);
//(.2)breakifdisabled.
//addisProcessorRunningtomakesuretheformerpauseProcessor
//issucessfullyprocessed.
if(!isEnabledState()||!pBufQueue->isProcessorRunning())
{
MY_LOGI("Previewclientdisabled");
break;
}
//(.3)re-prepareallTODObuffers,ifpossible,
//sincesomeDONE/CANCELbuffersreturn.把Buf放回队列里
prepareAllTodoBuffers(pBufQueue,pBufMgr);
}
//(6)stop.
pBufQueue->pauseProcessor();
pBufQueue->flushProcessor();//clear"TODO"
pBufQueue->stopProcessor();//clear"DONE"
//
//(7)Cancelallun-returnedbuffers.
cancelAllUnreturnBuffers();
}
PreviewClient::waitAndHandleReturnBuffers(sp<IImgBufQueue>const&rpBufQueue)
{
Vector<ImgBufQueNode>vQueNode;
//(1)dequebuffersfromprocessor.阻塞等待通知
rpBufQueue->dequeProcessor(vQueNode);
//(2)handlebuffersdequedfromprocessor.
ret=handleReturnBuffers(vQueNode);
}


我们找到了哪里开始处理数据,那问题来了,是如何处理数据,数据又是如何被显示的,而这些数据又是从哪里来的

先来看数据是如何处理的

PreviewClient::handleReturnBuffers(Vector<ImgBufQueNode>const&rvQueNode)
{
//(1)determinetheindexofthelatestDONEbufferforcallback.
int32_tidxToCallback=0;
for(idxToCallback=rvQueNode.size()-1;idxToCallback>=0;idxToCallback--)
{
if(rvQueNode[idxToCallback].isDONE())
break;
}
//ShowTimeduration.
if(0<=idxToCallback)
{
nsecs_tconst_timestamp1=rvQueNode[idxToCallback].getImgBuf()->getTimestamp();
mProfile_buffer_timestamp.pulse(_timestamp1);
nsecs_tconst_msDuration_buffer_timestamp=::ns2ms(mProfile_buffer_timestamp.getDuration());
mProfile_buffer_timestamp.reset(_timestamp1);
//
mProfile_dequeProcessor.pulse();
nsecs_tconst_msDuration_dequeProcessor=::ns2ms(mProfile_dequeProcessor.getDuration());
mProfile_dequeProcessor.reset();
}
//
//(2)RemovefromListandpeformcallback,onebyone.
int32_tconstqueSize=rvQueNode.size();
for(int32_ti=0;i<queSize;i++)
{
ImgBufQueNodeconst&rQueNode=rvQueNode[i];
sp<IImgBuf>const&rpQueImgBuf=rQueNode.getImgBuf();//ImgBufinQueue.
sp<ICameraImgBuf>pListImgBuf=NULL;
ImgBufNodeconstListNode=*mImgBufList.begin();//NodeinList.
pListImgBuf=ListNode.getImgBuf();//ImgBufinList.
//(.4)Performcallback.
if(i==idxToCallback){
//
if(mpExtImgProc!=NULL)
{
if(mpExtImgProc->getImgMask()&ExtImgProc::BufType_PreviewCB)
{
IExtImgProc::ImgInfoimg;
//
img.bufType=ExtImgProc::BufType_PreviewCB;
img.format=rpQueImgBuf->getImgFormat();
img.width=rpQueImgBuf->getImgWidth();
img.height=rpQueImgBuf->getImgHeight();
img.stride[0]=rpQueImgBuf->getImgWidthStride(0);
img.stride[1]=rpQueImgBuf->getImgWidthStride(1);
img.stride[2]=rpQueImgBuf->getImgWidthStride(2);
img.virtAddr=(MUINT32)(rpQueImgBuf->getVirAddr());
img.bufSize=rpQueImgBuf->getBufSize();
//预留的处理函数,现在这里是空
mpExtImgProc->doImgProc(img);
}
}
//对数据进行处理
performPreviewCallback(pListImgBuf,rQueNode.getCookieDE());
}
}
}
PreviewClient::performPreviewCallback(sp<ICameraImgBuf>const&pCameraImgBuf,int32_tconstmsgType)
{
if(pCameraImgBuf!=0)
{
//[2]Callback
sp<CamMsgCbInfo>pCamMsgCbInfo;
{
pCamMsgCbInfo=mpCamMsgCbInfo;
}
//调用处理函数
//这个mDataCb回调函数藏得很深
//在CameraClient.cpp的initialize()函数里
//mHardware->setCallbacks(notifyCallback,dataCallback,dataCallbackTimestamp,(void*)mCameraId);
//这句话其中的dataCallback,设置了mDataCb函数
pCamMsgCbInfo->mDataCb(
0!=msgType?msgType:(int32_t)CAMERA_MSG_PREVIEW_FRAME,
pCameraImgBuf->get_camera_memory(),
pCameraImgBuf->getBufIndex(),
NULL,
pCamMsgCbInfo->mCbCookie
);
}
}


最后调用了mDataCb()回调函数。如果在APP层用了setPreviewCallback(),则会在此时调用回调函数,并把数据传回去给APP。注意,这里的msgType被设置成了CAMERA_MSG_PREVIEW_FRAME。

Cam1DeviceBase的setCallbacks()设置了很多回调函数,这些回调函数应该是挺有用的,有空了解一下。类似于startPreview()提供给Frameworks层调用。在Frameworks的CameraClient::initialize被调用

//设置Camera的各种回调函数
Cam1DeviceBase::setCallbacks(
camera_notify_callbacknotify_cb,
camera_data_callbackdata_cb,
camera_data_timestamp_callbackdata_cb_timestamp,
camera_request_memoryget_memory,
void*user
)
{
mpCamMsgCbInfo->mCbCookie=user;
mpCamMsgCbInfo->mNotifyCb=notify_cb;
mpCamMsgCbInfo->mDataCb=data_cb;
mpCamMsgCbInfo->mDataCbTimestamp=data_cb_timestamp;
mpCamMsgCbInfo->mRequestMemory=get_memory;
//
if(mpCamClient!=0)
{
mpCamClient->setCallbacks(mpCamMsgCbInfo);
}
//
if(mpCamAdapter!=0)
{
mpCamAdapter->setCallbacks(mpCamMsgCbInfo);
}
}


Frameworks:

status_tCameraClient::initialize(camera_module_t*module){
mHardware=newCameraHardwareInterface(camera_device_name);
res=mHardware->initialize(&module->common);
//Cam1DeviceBase的setCallbacks()
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
(void*)mCameraId);
}


dataCallback是个回调函数,也在CameraClient中。此函数中,接收了多种的msgType。这就表示数据回调中可以有多种的数据类型,有RAW的,有COMPRESSD的,等

voidCameraClient::dataCallback(int32_tmsgType,
constsp<IMemory>&dataPtr,camera_frame_metadata_t*metadata,void*user){
switch(msgType&~CAMERA_MSG_PREVIEW_METADATA){
//!++
#if1//defined(MTK_CAMERA_BSP_SUPPORT)
caseMTK_CAMERA_MSG_EXT_DATA:
client->handleMtkExtData(dataPtr,metadata);
break;
#endif
//!--
caseCAMERA_MSG_PREVIEW_FRAME:
client->handlePreviewData(msgType,dataPtr,metadata);
break;
caseCAMERA_MSG_POSTVIEW_FRAME:
client->handlePostview(dataPtr);
break;
caseCAMERA_MSG_RAW_IMAGE:
client->handleRawPicture(dataPtr);
break;
caseCAMERA_MSG_COMPRESSED_IMAGE:
client->handleCompressedPicture(dataPtr);
break;
default:
client->handleGenericData(msgType,dataPtr,metadata);
break;
}
}


我们的msgType是CAMERA_MSG_PREVIEW_FRAME。调用了handlePreviewData函数

voidCameraClient::handlePreviewData(int32_tmsgType,
constsp<IMemory>&mem,
camera_frame_metadata_t*metadata){
//获取mem
sp<IMemoryHeap>heap=mem->getMemory(&offset,&size);
//iscallbackenabled?判断
if(!(flags&CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)){
//Iftheenablebitisoff,thecopy-outandone-shotbitsareignored
LOG2("framecallbackisdisabled");
return;
}
//holdastrongpointertotheclient
sp<ICameraClient>c=mRemoteCallback;
//clearcallbackflagsifnoclientorone-shotmode判断
if(c==0||(mPreviewCallbackFlag&CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)){
LOG2("Disablepreviewcallback");
}
//有两种方式把mem发送出去,一种是copy一种是直接把地址发送出去
if(c!=0){
//Isthereceivedframecopiedoutornot?
if(flags&CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK){
LOG2("frameiscopied");
copyFrameAndPostCopiedFrame(msgType,c,heap,offset,size,metadata);
}else{
LOG2("frameisforwarded");
c->dataCallback(msgType,mem,metadata);
}
}
}


最后是通过sp<ICameraClient>c=mRemoteCallback;mRemoteCallback->dataCallback发送出去的。mRemoteCallback是ICameraClient类型和CameraClient两者都有dataCallback两者又是什么关系,如果有关系,这里此不是会死循环。mRemoteCallback搜一下,是在CameraClient::connect()被里初始化的,于是乎在这里找,找了大半天也没找到在哪里被初始化的。被坑了半天,原来在CameraClient的构造函数里。

我们返回到Camera.cpp的connect函数里。发现,还是挺好玩的。再从Camera.cpp的connect()细跟一下

sp<Camera>Camera::connect(intcameraId,constString16&clientPackageName,
intclientUid)
{
returnCameraBaseT::connect(cameraId,clientPackageName,clientUid);
}
//CameraBaseT在CameraBase被定义成typedefCameraBase<TCam>CameraBaseT;
//CameraBase在Camera被初始化为CameraBase<Camera>,所以上面就相应于调用了CameraBase<Camera>::connect()
template<typenameTCam,typenameTCamTraits>
sp<TCam>CameraBase<TCam,TCamTraits>::connect(intcameraId,
constString16&clientPackageName,intclientUid)
{
//把所有的TCam替换成Camera,就是刚才Camera.cpp里的camera在这里被构造
sp<TCam>c=newTCam(cameraId);
//TCamCallbacks在Camera.h里被定义为ICameraClient,被赋值为Camera。Camera继承ICameraClient
sp<TCamCallbacks>cl=c;
constsp<ICameraService>&cs=getCameraService();
//fnConnectService在Camera被初始化为ICameraService::connect()
TCamConnectServicefnConnectService=TCamTraits::fnConnectService;
//下面调用了CameraService::connect()c和cl是同一个值,做为两个不同的参数传进了CameraService::connect()
status=(cs.get()->*fnConnectService)(cl,cameraId,clientPackageName,clientUid,/*out*/c->mCamera);
}
//CameraService::connec()构造了一个CameraClient(),又是一个CameraClient,
//但是和上面的ICameraClient没有半毛线关系。只是名字相似,真的是一万个++泥马在奔腾。
status_tCameraService::connect(constsp<ICameraClient>&cameraClient,intcameraId,
constString16&clientPackageName,intclientUid,/*out*/sp<ICamera>&device){
//CameraService::connec()就做了两件事情,初始化Camera里的mCamera和把Camera传给CameraClient
//CameraClient的构造函数会传到他的父类构造函数Client()里,Client却是在CameraService。Camera类就是mRemoteCallback
client=newCameraClient(this,cameraClient,clientPackageName,cameraId,
facing,callingPid,clientUid,getpid());
device=client;
returnOK;}


所以,上面的c->dataCallback。调用的是Camera的dataCallback()

//callbackfromcameraservicewhenframeorimageisready
voidCamera::dataCallback(int32_tmsgType,constsp<IMemory>&dataPtr,
camera_frame_metadata_t*metadata)
{
sp<CameraListener>listener;
listener=mListener;
listener->postData(msgType,dataPtr,metadata);
}


这里的mListener是在android_hardware_Camera_native_setup()里被设置的类型是MtkJNICameraContext。所以listener->postData()调用的是MtkJNICameraContext的postData()。

voidMtkJNICameraContext::postData(int32_tmsgType,constsp<IMemory>&dataPtr,
camera_frame_metadata_t*metadata)
{
JNICameraContext::postData(msgType,dataPtr,metadata);
}
voidJNICameraContext::postData(int32_tmsgType,constsp<IMemory>&dataPtr,
camera_frame_metadata_t*metadata)
{
switch(dataMsgType){
caseCAMERA_MSG_VIDEO_FRAME:
//...
caseCAMERA_MSG_RAW_IMAGE:
//Thereisnodata.
case0:
break;
default:
ALOGV("dataCallback(%d,%p)",dataMsgType,dataPtr.get());
copyAndPost(env,dataPtr,dataMsgType);
break;
}
//postframemetadatatoJava
if(metadata&&(msgType&CAMERA_MSG_PREVIEW_METADATA)){
postMetadata(env,CAMERA_MSG_PREVIEW_METADATA,metadata);
}
}
voidJNICameraContext::copyAndPost(JNIEnv*env,constsp<IMemory>&dataPtr,intmsgType)
{
jbyteArrayobj=NULL;
//allocateJavabytearrayandcopydata
if(dataPtr!=NULL){
sp<IMemoryHeap>heap=dataPtr->getMemory(&offset,&size);
uint8_t*heapBase=(uint8_t*)heap->base();
constjbyte*data=reinterpret_cast<constjbyte*>(heapBase+offset);
if(msgType==CAMERA_MSG_RAW_IMAGE){
obj=getCallbackBuffer(env,&mRawImageCallbackBuffers,size);
}elseif(msgType==CAMERA_MSG_PREVIEW_FRAME&&mManualBufferMode){
//再次构造一个Buffer,并发送出去
obj=getCallbackBuffer(env,&mCallbackBuffers,size);
}else{
ALOGD("Allocatingcallbackbuffer");
obj=env->NewByteArray(size);
}
if(obj==NULL){
ALOGE("Couldn'tallocatebytearrayforJPEGdata");
env->ExceptionClear();
}else{
env->SetByteArrayRegion(obj,0,size,data);
}
}else{
ALOGE("imageheapisNULL");
}
}
//postimagedatatoJava
//主要是这句话,调用了一个Java函数,函数名为fields.post_event,post_event被初始化为postEventFromNative()
env->CallStaticVoidMethod(mCameraJClass,fields.post_event,
mCameraJObjectWeak,msgType,0,0,obj);
if(obj){
env->DeleteLocalRef(obj);
}
}
//终于加到Java了
privatestaticvoidpostEventFromNative(Objectcamera_ref,
intwhat,intarg1,intarg2,Objectobj)
{
//就是发送了一个msg,what为CAMERA_MSG_PREVIEW_FRAME。剩下的就不深究了
Messagem=c.mEventHandler.obtainMessage(what,arg1,arg2,obj);
c.mEventHandler.sendMessage(m);
}


不过看到这里,我们好像并没有看出个所有然来,最后的回调仅仅是调用上层的回调函数,把数据往上丢,却不显示也不处理。接下来我们还有两个问题,数据从哪里来?然后这些数据又是怎么显示的。请看下一篇
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: