CAMERA数据的分析(二)------- 数据的向上传输

CAMERA数据的分析(二)-------

数据的向上传输

 

接着上一篇的分析,数据获取后,获取到的数据放在下面bufProvider里面:

Step 4.2.6:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\platform\mt8127\v1\hal\adapter\MtkDefault\Preview\PreviewBufMgr.cpp

 

 case eBuf_Disp:

       {

           spbufProvider =mspImgBufProvidersMgr->getDisplayPvdr();

           if(bufProvider != 0)

           {

               bufProvider->enqueProvider(node);

           }

       }

 

现在需要搞清楚的是这个bufProvider到底是谁。我们看看getDisplayPvdr这个的定义:

 

//  get [Display] ImageBuffer Provider

sp             getDisplayPvdr() const  { return getProvider(IImgBufProvider::eID_DISPLAY);}

 

这个bufProvider是从mspImgBufProvidersMgr中获取到了eID_DISPLAYbufProvider。而这个bufProvider是在前面的分析中我们创建的。是否还记得!

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\client\DisplayClient\DisplayClient.cpp

bool

DisplayClient::

init()

{

   bool ret = false;

   //

   MY_LOGD("+");

   //

   ret =   createDisplayThread()

       &&  createImgBufQueue()

           ;

   //

   MY_LOGD("- ret(%d)", ret);

   return  ret;

}

 

bool

DisplayClient::

createImgBufQueue()

{

   bool ret = false;

   //

   MY_LOGD("+");

   //

    {

       Mutex::Autolock _l(mModuleMtx);

       mpImgBufQueue = newImgBufQueue(IImgBufProvider::eID_DISPLAY, "CameraDisplay@ImgBufQue");

       if  ( mpImgBufQueue == 0 )

       {

           MY_LOGE("Fail to new ImgBufQueue");

           goto lbExit;

       }

    }

   //

   mpExtImgProc = ExtImgProc::createInstance();

   if(mpExtImgProc != NULL)

    {

       mpExtImgProc->init();

    }

   //

   ret = true;

lbExit:

   MY_LOGD("-");

   return  ret;

}

 

对了,就是在DisplayClient的初始化的时候创建的这个eID_DISPLAYIImgBufProvider,后面把这个IImgBufProvider在调用DisplayClient::

enableDisplay的时候:setImgBufProviderClient(rpClient),而这个rpClient传进来的参数就是mpCamAdapter,如下:

mpDisplayClient->enableDisplay(previewSize.width,previewSize.height, queryDisplayBufCount(), mpCamAdapter),还是一步不的跟进下吧,这样跟清晰些,就从setPreviewWindow说起吧,setPreviewWindow前面的调用请参照之前文章的说明。

step 1:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\device\Cam1DeviceBase.cpp

status_t

Cam1DeviceBase::

setPreviewWindow(preview_stream_ops* window)

{

   CAM_TRACE_CALL();

   MY_LOGI("+ window(%p)", window);

   //

   status_t status = initDisplayClient(window);

   if  ( OK == status &&previewEnabled() && mpDisplayClient != 0 )

    {

       status = enableDisplayClient();

       if(mbWindowReady)

       {

           waitStartPreviewDone();

       }

    }

   //

   return  status;

}

 

Step 2:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\device\Cam1DeviceBase.cpp

 

status_t

Cam1DeviceBase::

enableDisplayClient()

{

   status_t status = OK;

   Size previewSize;

   //

   MY_LOGD("+");

   //

   //  [1] Get preview size.

   if  ( !queryPreviewSize(previewSize.width, previewSize.height) )

    {

       MY_LOGE("queryPreviewSize");

       status = DEAD_OBJECT;

       goto lbExit;

    }

   //

   if(mpParamsMgr->getIfFirstPreviewFrameAsBlack())

    {

       mpDisplayClient->setFirstFrameBlack();

       mpParamsMgr->set(MtkCameraParameters::KEY_FIRST_PREVIEW_FRAME_BLACK,0);

    }

   //  [2] Enable

   if  ( ! mpDisplayClient->enableDisplay(previewSize.width,previewSize.height, queryDisplayBufCount(), mpCamAdapter) )

    {

       MY_LOGE("mpDisplayClient(%p)->enableDisplay()",mpDisplayClient.get());

       status = INVALID_OPERATION;

       goto lbExit;

    }

   //

   status = OK;

lbExit:

   MY_LOGD("- status(%d)",status);

   return  status;

}

 

Step 3:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\client\DisplayClient\DisplayClient.c

bool

DisplayClient::

enableDisplay(

   int32_t const   i4Width,

   int32_t const   i4Height,

   int32_t const   i4BufCount,

   spconst& rpClient

)

{

   bool ret = false;

   preview_stream_ops* pStreamOps = mpStreamOps;

   //

   //  [1] Re-configurate thisinstance if any setting changes.

   if  ( ! checkConfig(i4Width,i4Height, i4BufCount, rpClient) )

    {

       MY_LOGW(" Uninit the current DisplayClient(%p)and re-config...", this);

       //

       //  [.1] uninitialize

        uninit();

       //

       //  [.2] initialize

       if  ( ! init() )

       {

           MY_LOGE("re-init() failed");

           goto lbExit;

       }

       //

       //  [.3] set related window info.

       if  ( ! setWindow(pStreamOps,i4Width, i4Height, i4BufCount) )

       {

           goto lbExit;

       }

       //

       //  [.4] set Image Buffer ProviderClient.

       if  ( ! setImgBufProviderClient(rpClient))

       {

           goto lbExit;

       }

    }

   //

   //  [2] Enable.

   if  ( ! enableDisplay() )

    {

       goto lbExit;

    }

   //

   ret = true;

lbExit:

   return  ret;

}

 

Step 4:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\client\DisplayClient\DisplayClient.c

 

bool

DisplayClient::

setImgBufProviderClient(spconst&rpClient)

{

   bool ret = false;

   //

   MY_LOGD("+ ImgBufProviderClient(%p), mpImgBufQueue.get(%p)",rpClient.get(), mpImgBufQueue.get());

   //

   if  ( rpClient == 0 )

    {

       MY_LOGE("NULL ImgBufProviderClient");

       mpImgBufPvdrClient = NULL;

       goto lbExit;

    }

   //

   if  ( mpImgBufQueue != 0 )

{

/*

这个mpImgBufQueue就是最开始我们说的,eID_DISPLAYbufProvider,是在DisplayClientinit里面创建起来的。

rpClient传递进来的参数是mpCamAdapter,所以就是看mpCamAdapter中的onImgBufProviderCreated方法了。

*/

       if  ( ! rpClient->onImgBufProviderCreated(mpImgBufQueue))

       {

           goto lbExit;

       }

       mpImgBufPvdrClient = rpClient;

    }

   //

   ret = true;

lbExit:

   MY_LOGD("-");

   return  ret;

};

 

Step 5:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\platform\mt8127\v1\hal\adapter\BaseCamAdapter.cpp

 

bool

BaseCamAdapter::

onImgBufProviderCreated(spconst& rpProvider)

{

   if  ( rpProvider == 0 )

    {

       MY_LOGW("NULL provider");

       return  false;

    }

//

/*

这里的rpProvider就是mpImgBufQueue,所以rpProvider->getProviderId()获取到的就是eID_DISPLAY了。

*/

    int32_t consti4ProviderId = rpProvider->getProviderId();

   if  ( (size_t)i4ProviderId >=mpImgBufProvidersMgr->getProvidersSize() )

    {

       MY_LOGE("bad ProviderId=%x >= %d", i4ProviderId,mpImgBufProvidersMgr->getProvidersSize());

       return  false;

    }

//

/*

rpProvider就是mpImgBufQueue放入到mpImgBufProvidersMgr里面去。

*/

    mpImgBufProvidersMgr->setProvider(i4ProviderId,rpProvider);

   //

   //

   MY_LOGI("- id=%d, ImgBufProvider=%p", i4ProviderId,rpProvider.get());

   return  true;

}

 

这样就重要与第一篇中的获取到的数据放入到bufProvider =mspImgBufProvidersMgr->getDisplayPvdr();中对于上了。getDisplayPvdr()获取的就是eID_DISPLAY。所以第一篇中获取到的数据,实际上就是获取到了DisplayClient中的mpImgBufQueue里面去了。下面继续看看这个mpImgBufQueue是如何向上面传输他的数据的。

我们前面有说到DisplayClient::init()的时候创建了上面用于放数据的BUFF,可以看到,同时也创建了一个线程:createDisplayThread。就是用来显示这些BUFF的一个线程。所以下面我们顺着这个线程往上看:

 

Step 1:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\client\DisplayClient\DisplayClient.c

 

bool

DisplayClient::

init()

{

   bool ret = false;

   //

   MY_LOGD("+");

   //

   ret =   createDisplayThread()

       &&  createImgBufQueue()

           ;

   //

   MY_LOGD("- ret(%d)", ret);

   return  ret;

}

 

Step 2:

 

bool

DisplayClient::

createDisplayThread()

{

   bool    ret = false;

   status_t status = OK;

   //

   Mutex::Autolock _l(mModuleMtx);

   //

    mpDisplayThread =IDisplayThread::createInstance(this);

   if  (

           mpDisplayThread == 0

       ||  OK != (status =mpDisplayThread->run())

       )

    {

       MY_LOGE(

           "Fail to run DisplayThread - mpDisplayThread.get(%p),status[%s(%d)]",

           mpDisplayThread.get(), ::strerror(-status), -status

       );

       goto lbExit;

    }

   //

   ret = true;

lbExit:

   return  ret;

}

 

Step 3:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\client\DisplayClient\DisplayClient.cpp

/*

线程的处理函数,前面开机的过程中已经介绍过了,该线程跑起来后会发一个eID_WAKEUP的指令多来。

*/

bool

DisplayThread::

threadLoop()

{

   Command cmd;

   if  ( getCommand(cmd) )

    {

       switch  (cmd.eId)

        {

       case Command::eID_EXIT:

           MY_LOGD("Command::%s", cmd.name());

           break;

       //

       case Command::eID_WAKEUP:

       default:

           if  ( mpThreadHandler != 0 )

           {

/*

1.  //调用的是handleronThreadloop

2.  //在前面已知DisplayThreadhandler被设置成了DisplayClient

DisplayClientonThreadLoop()DisplayClient.BufOps.cpp

上面的这个说明还不太理解,被这些onThreadLoop搞得有点晕,先悄悄的掠过吧,以后进一步分析。

*/

                mpThreadHandler->onThreadLoop(cmd);

           }

           else

           {

                MY_LOGE("cannot handle cmd(%s)due to mpThreadHandler==NULL", cmd.name());

           }

           break;

       }

    }

   //

   MY_LOGD("- mpThreadHandler.get(%p)", mpThreadHandler.get());

   return  true;

}

 

Step 4:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\client\DisplayClient\DisplayClient.BufOps.cpp

 

bool

DisplayClient::

onThreadLoop(Command const& rCmd)

{

   //  (0) lock Processor.

   sp pImgBufQueue;

    {

       Mutex::Autolock _l(mModuleMtx);

/*

mpImgBufQueue终于被用到了,这个mpImgBufQueue就是我们上面获取到的数据存放的地方。

*/

       pImgBufQueue = mpImgBufQueue;

       if  ( pImgBufQueue == 0 || !isDisplayEnabled() )

       {

           MY_LOGW("pImgBufQueue.get(%p), isDisplayEnabled(%d)",pImgBufQueue.get(), isDisplayEnabled());

           return  true;

       }

    }

 

   //  (1) Prepare all TODO buffers.

   if  ( !prepareAllTodoBuffers(pImgBufQueue) )

    {

       return  true;

    }

 

   //  (2) Start

   if  ( !pImgBufQueue->startProcessor() )

    {

       return  true;

    }

   //

    {

       Mutex::Autolock _l(mStateMutex);

       mState = eState_Loop;

       mStateCond.broadcast();

    }

   //

   //  (3) Do until disabled.

   while   ( 1 )

    {

       //  (.1)

       waitAndHandleReturnBuffers(pImgBufQueue);

 

       //  (.2) break if disabled.

       if  ( ! isDisplayEnabled() )

       {

           MY_LOGI("Display disabled");

           break;

       }

 

       //  (.3) re-prepare all TODObuffers, if possible,

       //  since some DONE/CANCEL buffersreturn.

       prepareAllTodoBuffers(pImgBufQueue);

    }

   //

   //  (4) Stop

   pImgBufQueue->pauseProcessor();

   pImgBufQueue->flushProcessor();

   pImgBufQueue->stopProcessor();

   //

   //  (5) Cancel all un-returnedbuffers.

   cancelAllUnreturnBuffers();

   //

    {

       Mutex::Autolock _l(mStateMutex);

       mState = eState_Suspend;

       mStateCond.broadcast();

    }

   //

   return  true;

}

 

Step  5:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\client\DisplayClient\DisplayClient.BufOps.cpp

 

bool

DisplayClient::

waitAndHandleReturnBuffers(spconst&rpBufQueue)

{

   bool ret = false;

   Vector vQueNode;

   //

   MY_LOGD_IF((1<=miLogLevel), "+");

   //

    {

       Mutex::Autolock _l(mModuleMtx);

       if  ( ! isDisplayEnabled() )

       {

           MY_LOGD("Display is already disabled");

           goto lbExit;

       }

    }

   //  (1) deque buffers fromprocessor.

   rpBufQueue->dequeProcessor(vQueNode);

   if  ( vQueNode.empty() ) {

       MY_LOGW("vQueNode.empty()");

       goto lbExit;

    }

 

   //  (2) handle buffers dequed fromprocessor.

    ret = handleReturnBuffers(vQueNode);

 

lbExit:

   //

   MY_LOGD_IF((2<=miLogLevel), "- ret(%d)", ret);

   return ret;

}

 

Step 6:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\client\DisplayClient\DisplayClient.BufOps.cpp

 

bool

DisplayClient::

handleReturnBuffers(Vectorconst&rvQueNode)

{

   /*

    * Notes:

    *  For 30 fps, we just enque(display) the latest frame,

    *  and cancel the others.

    *  For frame rate > 30 fps, weshould judge the timestamp here or source.

    */

   //  (1) determine the latest DONEbuffer index to display; otherwise CANCEL.

   nsecs_t _msDuration_buffer_timestamp = 0, _msDuration_dequeProcessor =0;

   int32_t idxToDisp = 0;

   for ( idxToDisp = rvQueNode.size()-1; idxToDisp >= 0; idxToDisp--)

    {

       if  (rvQueNode[idxToDisp].isDONE() )

           break;

    }

   if  ( rvQueNode.size() > 1 )

    {

       MY_LOGW("(%d) display frame count > 1 --> select %d todisplay", rvQueNode.size(), idxToDisp);

    }

   //

   //  Show Time duration.

   if  ( 0 <= idxToDisp )

    {

       nsecs_t const _timestamp1 =rvQueNode[idxToDisp].getImgBuf()->getTimestamp();

       mProfile_buffer_timestamp.pulse(_timestamp1);

       _msDuration_buffer_timestamp =::ns2ms(mProfile_buffer_timestamp.getDuration());

       mProfile_buffer_timestamp.reset(_timestamp1);

       //

       mProfile_dequeProcessor.pulse();

       _msDuration_dequeProcessor = ::ns2ms(mProfile_dequeProcessor.getDuration());

       mProfile_dequeProcessor.reset();

       //

       MY_LOGD_IF(

           0, "+ %s(%lld) %s(%lld)",

           (_msDuration_buffer_timestamp < 0 ) ? "time inversion!" :"", _msDuration_buffer_timestamp,

           (_msDuration_dequeProcessor > 34) ? "34ms < Duration" :"", _msDuration_dequeProcessor

       );

    }

   //

   //  (2) Lock

   Mutex::Autolock _l(mModuleMtx);

   //

   //  (3) Remove from List andenquePrvOps/cancelPrvOps, one by one.

   int32_t const queSize = rvQueNode.size();

   for (int32_t i = 0; i < queSize; i++)

    {

       spconst&      rpQueImgBuf = rvQueNode[i].getImgBuf(); //  ImgBuf in Queue.

       spconst pStreamImgBuf =*mStreamBufList.begin();  //  ImgBuf in List.

       //  (.1)  Check valid pointers to image buffers inQueue & List

       if  ( rpQueImgBuf == 0 ||pStreamImgBuf == 0 )

       {

           MY_LOGW("Bad ImgBuf:(Que[%d], List.begin)=(%p, %p)", i,rpQueImgBuf.get(), pStreamImgBuf.get());

           continue;

       }

       //  (.2)  Check the equality of image buffers betweenQueue & List.

/*

这里应该是一个rpQueImgBuf数据到pStreamImgBuf数据的一个传递,但是只是发现匹配了下虚拟的地址,然后就交棒给pStreamImgBuf了。还不太理解?

*/

       if  ( rpQueImgBuf->getVirAddr() !=pStreamImgBuf->getVirAddr() )

       {

           MY_LOGW("Bad address in ImgBuf:(Que[%d], List.begin)=(%p,%p)", i, rpQueImgBuf->getVirAddr(), pStreamImgBuf->getVirAddr());

           continue;

       }

       //  (.3)  Every check is ok. Now remove the node from thelist.

       mStreamBufList.erase(mStreamBufList.begin());

       //

       //  (.4)  enquePrvOps/cancelPrvOps

       if  ( i == idxToDisp ) {

           MY_LOGD_IF(

                (1<=miLogLevel),

                "+ %s(%lld) %s(%lld), Showframe:%d %d [ion:%d %p/%d %lld]",

                (_msDuration_buffer_timestamp< 0 ) ? "time inversion!" : "",_msDuration_buffer_timestamp,

                (_msDuration_dequeProcessor> 34) ? "34ms < Duration" : "",_msDuration_dequeProcessor,

                i, rvQueNode[i].getStatus(),pStreamImgBuf->getIonFd(),

                pStreamImgBuf->getVirAddr(),pStreamImgBuf->getBufSize(), pStreamImgBuf->getTimestamp()

           );

           //

           if(mpExtImgProc != NULL)

           {

                if(mpExtImgProc->getImgMask() &ExtImgProc::BufType_Display)

                {

                    IExtImgProc::ImgInfo img;

                    //

                    img.bufType     = ExtImgProc::BufType_Display;

                    img.format      = pStreamImgBuf->getImgFormat();

                    img.width       = pStreamImgBuf->getImgWidth();

                    img.height      = pStreamImgBuf->getImgHeight();

                    img.stride[0]   = pStreamImgBuf->getImgWidthStride(0);

                    img.stride[1]   = pStreamImgBuf->getImgWidthStride(1);

                    img.stride[2]   = pStreamImgBuf->getImgWidthStride(2);

                    img.virtAddr    =(MUINTPTR)(pStreamImgBuf->getVirAddr());

                    img.bufSize     = pStreamImgBuf->getBufSize();

                    //

                   mpExtImgProc->doImgProc(img);

                }

           }

           // For Display Rotation

           if(rvQueNode[i].getOrientation() != 0)

               pStreamImgBuf->setNeedDisplayRotation(true);

           else

               pStreamImgBuf->setNeedDisplayRotation(false);

           //

           enquePrvOps(pStreamImgBuf);

       }

       else {

           MY_LOGW(

                "Drop frame:%d %d [ion:%d %p/%d%lld]",

                i, rvQueNode[i].getStatus(),pStreamImgBuf->getIonFd(),

                pStreamImgBuf->getVirAddr(),pStreamImgBuf->getBufSize(), pStreamImgBuf->getTimestamp()

           );

           cancelPrvOps(pStreamImgBuf);

       }

    }

   //

   MY_LOGD_IF((1<=miLogLevel), "-");

   return  true;

}

 

Step 7:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\client\DisplayClient\DisplayClient.Stream.cpp

 

void

DisplayClient::

enquePrvOps(spconst&rpImgBuf)

{

   mProfile_enquePrvOps.pulse();

   if  (mProfile_enquePrvOps.getDuration() >= ::s2ns(2) ) {

       mProfile_enquePrvOps.updateFps();

       mProfile_enquePrvOps.showFps();

       mProfile_enquePrvOps.reset();

    }

    //

   status_t    err = 0;

   //

   CamProfile profile(__FUNCTION__, "DisplayClient");

   profile.print_overtime(

       ((1<=miLogLevel) ? 0 : 1000),

       "+ locked buffer count(%d), rpImgBuf(%p,%p), Timestamp(%lld)",

       mStreamBufList.size(), rpImgBuf.get(), rpImgBuf->getVirAddr(),rpImgBuf->getTimestamp()

   );

   //

   //  [1] unlock buffer beforesending to display

   GraphicBufferMapper::get().unlock(rpImgBuf->getBufHndl());

   profile.print_overtime(1, "GraphicBufferMapper::unlock");

   //

   //  [2] Dump image if wanted.

   dumpImgBuf_If(rpImgBuf);

   //

   //  [3] set timestamp.

   err = mpStreamOps->set_timestamp(mpStreamOps,rpImgBuf->getTimestamp());

   profile.print_overtime(2, "mpStreamOps->set_timestamp,Timestamp(%lld)", rpImgBuf->getTimestamp());

   if  ( err )

    {

       MY_LOGE(

           "mpStreamOps->set_timestamp failed: status[%s(%d)],rpImgBuf(%p), Timestamp(%lld)",

           ::strerror(-err), -err, rpImgBuf.get(), rpImgBuf->getTimestamp()

       );

    }

   //

   //  [4] set gralloc buffer type& dirty

#if ('1'==MTKCAM_HAVE_GRALLOC_EXTRA)

    {

       gralloc_extra_ion_sf_info_t info;

       gralloc_extra_query(rpImgBuf->getBufHndl(), GRALLOC_EXTRA_GET_IOCTL_ION_SF_INFO,&info);

       gralloc_extra_sf_set_status(&info,

           GRALLOC_EXTRA_MASK_TYPE | GRALLOC_EXTRA_MASK_DIRTY|GRALLOC_EXTRA_MASK_YUV_COLORSPACE,

           GRALLOC_EXTRA_BIT_TYPE_CAMERA | GRALLOC_EXTRA_BIT_DIRTY | GRALLOC_EXTRA_BIT_YUV_BT601_FULL

       );

       gralloc_extra_perform(rpImgBuf->getBufHndl(),GRALLOC_EXTRA_SET_IOCTL_ION_SF_INFO, &info);

       //

       int setOrientation = rpImgBuf->getOrientation();

       int preOrientation = 0;

       int afterOrientation = 0;

       if( !rpImgBuf->getNeedDisplayRotation() )

           setOrientation = 0;

       gralloc_extra_query(rpImgBuf->getBufHndl(),GRALLOC_EXTRA_GET_ORIENTATION, &preOrientation);

       gralloc_extra_perform(rpImgBuf->getBufHndl(),GRALLOC_EXTRA_SET_ORIENTATION, &setOrientation);

       gralloc_extra_query(rpImgBuf->getBufHndl(),GRALLOC_EXTRA_GET_ORIENTATION, &afterOrientation);

       MY_LOGD_IF((2<=miLogLevel), "gralloc orientation(old,set,new)=(%d,%d,%d)", preOrientation,setOrientation,afterOrientation);

    }

#endif

   //

   //  [5] unlocks and post thebuffer to display.

   err =mpStreamOps->enqueue_buffer(mpStreamOps, rpImgBuf->getBufHndlPtr());

   profile.print_overtime(10, "mpStreamOps->enqueue_buffer, Timestamp(%lld)",rpImgBuf->getTimestamp());

   if  ( err )

    {

       MY_LOGE(

           "mpStreamOps->enqueue_buffer failed: status[%s(%d)],rpImgBuf(%p,%p)",

           ::strerror(-err), -err, rpImgBuf.get(), rpImgBuf->getVirAddr()

       );

    }

}

 

/*

代码跟到这里就不得不去找找mpStreamOps,这个到底是什么东西了。然后再看它的enqueue_buffer具体做了什么。mpStreamOps是如何与app中的surface关联起来的。我们再一步步跟进学习下。

*/

 

Step 1:

APP应用中的调用接口:

mCameraDevice.setPreviewDisplay(holder);

/*

         private SurfaceHolder holder;

这个holder是应用层调用的,具体如何用的,待以后学习。到这里已经是纯应用的东西了。

*/

step 2:

\frameworks\base\core\java\android\hardware\Camera.java

 

   public final void setPreviewDisplay(SurfaceHolder holder) throwsIOException {

       if (holder != null) {

           setPreviewSurface(holder.getSurface());

       } else {

           setPreviewSurface((Surface)null);

       }

}

/*

    public nativefinal void setPreviewSurface(Surface surface) throws IOException;

setPreviewSurface是个本地的方法,在android_hardware_Camera.cpp实现

*/

Step 3:

frameworks\base\core\jni\android_hardware_Camera.cpp

 

static voidandroid_hardware_Camera_setPreviewTexture(JNIEnv *env,

        jobject thiz, jobject jSurfaceTexture)

{

    ALOGV("setPreviewTexture");

    sp camera =get_native_camera(env, thiz, NULL);

    if (camera == 0) return;

/*

初始化一个GraphicBufferProducer类型的producer,这个producer与应用的SurfaceHolder进行关联上,具体细节先不关注,就认为producer是对应应用的SurfaceHolder的好了。

*/

    sp producer =NULL;

    if (jSurfaceTexture != NULL) {

        producer =SurfaceTexture_getProducer(env, jSurfaceTexture);

        if (producer == NULL) {

            jniThrowException(env,"java/lang/IllegalArgumentException",

                    "SurfaceTexturealready released in setPreviewTexture");

            return;

        }

 

    }

 

    if (camera->setPreviewTarget(producer)!= NO_ERROR) {

        jniThrowException(env,"java/io/IOException",

                "setPreviewTexturefailed");

    }

}

 

Step 4:

\frameworks\av\camera\ICamera.cpp

    // pass the buffered IGraphicBufferProducerto the camera service

    status_t setPreviewTarget(constsp& bufferProducer)

    {

        ALOGV("setPreviewTarget");

        Parcel data, reply;

       data.writeInterfaceToken(ICamera::getInterfaceDescriptor());

        spb(IInterface::asBinder(bufferProducer));

        data.writeStrongBinder(b);

        remote()->transact(SET_PREVIEW_TARGET,data, &reply);

        return reply.readInt32();

    }

 

Step 5:

\frameworks\av\camera\ICamera.cpp

 

status_tBnCamera::onTransact(

    uint32_t code, const Parcel& data,Parcel* reply, uint32_t flags)

{

    switch(code) {

        case DISCONNECT: {

            ALOGV("DISCONNECT");

            CHECK_INTERFACE(ICamera, data,reply);

            disconnect();

            reply->writeNoException();

            return NO_ERROR;

        } break;

        case SET_PREVIEW_TARGET:{

           ALOGV("SET_PREVIEW_TARGET");

            CHECK_INTERFACE(ICamera, data,reply);

            sp st=

               interface_cast(data.readStrongBinder());

            reply->writeInt32(setPreviewTarget(st));

            return NO_ERROR;

        } break;

 

/*

这个setPreviewTarget最终调用的是java客户端的camraclinet中的setPreviewTarget。具体又是一个binder的机制问题,后续研究,这个就不细说了。

*/

 

Step 5:

\frameworks\av\services\camera\libcameraservice\api1\CameraClient.cpp

// set thebuffer consumer that the preview will use

status_tCameraClient::setPreviewTarget(

        constsp& bufferProducer) {

    LOG1("setPreviewTarget(%p) (pid%d)", bufferProducer.get(),

            getCallingPid());

 

    sp binder;

    sp window;

    if (bufferProducer != 0) {

        binder =IInterface::asBinder(bufferProducer);

        // Using controlledByApp flag to ensurethat the buffer queue remains in

        // async mode for the old camera API, wheremany applications depend

        // on that behavior.

        window = newSurface(bufferProducer, /*controlledByApp*/ true);

    }

    return setPreviewWindow(binder, window);

}

 

Step 7:

\frameworks\av\services\camera\libcameraservice\api1\CameraClient.cpp

 

status_tCameraClient::setPreviewWindow(const sp& binder,

        const sp&window) {

    Mutex::Autolock lock(mLock);

    status_t result = checkPidAndHardware();

    if (result != NO_ERROR) return result;

 

    // return if no change in surface.

    if (binder == mSurface) {

        return NO_ERROR;

    }

 

    if (window != 0) {

        result =native_window_api_connect(window.get(), NATIVE_WINDOW_API_CAMERA);

        if (result != NO_ERROR) {

           ALOGE("native_window_api_connect failed: %s (%d)",strerror(-result),

                    result);

            return result;

        }

    }

 

    // If preview has been already started,register preview buffers now.

    if (mHardware->previewEnabled()) {

        if (window != 0) {

           native_window_set_scaling_mode(window.get(),

                   NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);

           native_window_set_buffers_transform(window.get(), mOrientation);

            result =mHardware->setPreviewWindow(window);

        }

    }

    //!++

    else if ( window == 0 ) {

        result =mHardware->setPreviewWindow(window); // Notify CamDevice the window is null

    }

    //!--

/*

Window为应用传递进来的,所以不会为空。这样就把应用的window给到了mPreviewWindow。搞清楚这个mPreviewWindow的初始化很重要,后面再startpreview中就有用到它,我们就可以吧mPreviewWindow看做为我们用到CAMERAAPP中用于显示CAMERA数据的窗口,例如默认的camera.app.

*/

    if (result == NO_ERROR) {

        // Everythinghas succeeded.  Disconnect the old windowand remember the

        // new window.

       disconnectWindow(mPreviewWindow);

        mSurface =binder;

        mPreviewWindow= window;

    } else {

        // Something went wrong after weconnected to the new window, so

        // disconnect here.

        disconnectWindow(window);

    }

 

    return result;

}

 

/*

搞清楚了mPreviewWindow,我们再继续往回看startPreview,以前我们有忽视掉数据的处理,现在我们再从头试一遍。

*/

 

Step 7:

\frameworks\av\services\camera\libcameraservice\api1\CameraClient.cpp

 

// startpreview mode

status_tCameraClient::startPreview() {

    LOG1("startPreview (pid %d)",getCallingPid());

    return startCameraMode(CAMERA_PREVIEW_MODE);

}

 

Step 8:

 

// startpreview or recording

status_tCameraClient::startCameraMode(camera_mode mode) {

    LOG1("startCameraMode(%d)",mode);

    Mutex::Autolock lock(mLock);

    status_t result = checkPidAndHardware();

    if (result != NO_ERROR) return result;

 

    switch(mode) {

        case CAMERA_PREVIEW_MODE:

            if (mSurface == 0 &&mPreviewWindow == 0) {

                LOG1("mSurface is not setyet.");

                // still able to start previewin this case.

            }

            return startPreviewMode();

        case CAMERA_RECORDING_MODE:

            if (mSurface == 0 &&mPreviewWindow == 0) {

                ALOGE("mSurface ormPreviewWindow must be set before startRecordingMode.");

                return INVALID_OPERATION;

            }

            return startRecordingMode();

        default:

            return UNKNOWN_ERROR;

    }

}

 

Step 9:

 

\frameworks\av\services\camera\libcameraservice\api1\CameraClient.cpp

 

status_tCameraClient::startPreviewMode() {

    LOG1("startPreviewMode");

    status_t result = NO_ERROR;

 

    // if preview has been enabled, nothingneeds to be done

    if (mHardware->previewEnabled()) {

        return NO_ERROR;

    }

 

    if (mPreviewWindow != 0) {

       native_window_set_scaling_mode(mPreviewWindow.get(),

               NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);

       native_window_set_buffers_transform(mPreviewWindow.get(),

                mOrientation);

    }

 

/*

之前我们关注的是mHardware->startPreview()这个接口的调用流程,现在我们对数据的分析,就要跟进下setPreviewWindow这个。看看上层的window是如何设置下去的。

*/

    mHardware->setPreviewWindow(mPreviewWindow);

    result = mHardware->startPreview();

 

   return result;

}

/*

mHardwareCam1DeviceBase的过程前面有分析过。

*/

Step 10:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\device\Cam1DeviceBase.cpp

status_t

Cam1DeviceBase::

setPreviewWindow(preview_stream_ops*window)

{

    CAM_TRACE_CALL();

    MY_LOGI("+ window(%p)", window);

    //

    status_t status =initDisplayClient(window);

    if  (OK == status && previewEnabled() && mpDisplayClient != 0 )

    {

        status = enableDisplayClient();

        if(mbWindowReady)

        {

            waitStartPreviewDone();

        }

    }

    //

    return status;

}

 

Step 11:

 

status_t

Cam1DeviceBase::

initDisplayClient(preview_stream_ops*window)

{

    CAM_TRACE_CALL();

#if'1'!=MTKCAM_HAVE_DISPLAY_CLIENT

    #warning "Not Build DisplayClient"

    MY_LOGD("Not Build DisplayClient");

    return OK;

#else

    status_t status = OK;

    Size previewSize;

    //

    MY_LOGD("+ window(%p)", window);

    //

    //

    // [1] Check to see whether the passed window is NULL or not.

    if  (! window )

    {

        MY_LOGW("NULL window is passedinto...");

        mbWindowReady = false;

        //

        if ( mpDisplayClient != 0 )

        {

            MY_LOGW("destroy the currentdisplay client(%p)...", mpDisplayClient.get());

            mpDisplayClient->uninit();

            mpDisplayClient.clear();

        }

        status = OK;

        goto lbExit;

    }

    mbWindowReady = true;

    //

    //

    // [2] Get preview size.

    if  (! queryPreviewSize(previewSize.width, previewSize.height) )

    {

        MY_LOGE("queryPreviewSize");

        status = DEAD_OBJECT;

        goto lbExit;

    }

    //

    //

    // [3] Initialize Display Client.

    if  (mpDisplayClient != 0 )

    {

        if ( previewEnabled() )

        {

            MY_LOGW("Do nothing sinceDisplay Client(%p) is already created after startPreview()",mpDisplayClient.get());

//          This method must be called beforestartPreview(). The one exception is that

//          if the preview surface texture is notset (or set to null) before startPreview() is called,

//          then this method may be called oncewith a non-null parameter to set the preview surface.

            status = OK;

            goto lbExit;

        }

        else

        {

            MY_LOGW("New window is setafter stopPreview or takePicture. Destroy the current displayclient(%p)...", mpDisplayClient.get());

            mpDisplayClient->uninit();

            mpDisplayClient.clear();

        }

    }

    // [3.1] create a Display Client.

    mpDisplayClient =IDisplayClient::createInstance();

    if  (mpDisplayClient == 0 )

    {

        MY_LOGE("Cannot creatempDisplayClient");

        status = NO_MEMORY;

        goto lbExit;

    }

    // Display Rotation

    if(mpParamsMgr->getDisplayRotationSupported())

    {

        MY_LOGD("orientation = %d",mOrientation);

       mpDisplayClient->SetOrientationForDisplay(mOrientation);

    }

    // [3.2] initialize the newly-created Display Client.

    if  (! mpDisplayClient->init() )

    {

        MY_LOGE("mpDisplayClient init()failed");

        mpDisplayClient->uninit();

        mpDisplayClient.clear();

        status = NO_MEMORY;

        goto lbExit;

    }

    // [3.3] set preview_stream_ops & related window info.

    if  (! mpDisplayClient->setWindow(window,previewSize.width, previewSize.height, queryDisplayBufCount()) )

    {

        status = INVALID_OPERATION;

        goto lbExit;

    }

    // [3.4] set Image Buffer Provider Client if it exist.

    if  (mpCamAdapter != 0 && ! mpDisplayClient->setImgBufProviderClient(mpCamAdapter))

    {

        status = INVALID_OPERATION;

        goto lbExit;

    }

    //

    //

    status = OK;

    //

lbExit:

    if  (OK != status )

    {

        MY_LOGD("Cleanup...");

        mpDisplayClient->uninit();

        mpDisplayClient.clear();

    }

    //

    MY_LOGD("- status(%d)", status);

    return status;

#endif//MTKCAM_HAVE_DISPLAY_CLIENT

}

 

 

Step 12:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\client\DisplayClient\DisplayClient.cpp

/******************************************************************************

 * Set the preview_stream_ops to which framesare sent.

 *

 * Notes:

 *  (1)When calling setWindow(), all preview parameters have been decided.

 *     [CameraService]

 *         mHardware->setParameters() -> mHardware->setPreviewWindow()-> mHardware->startPreview()

 *         --> enableDisplay during startPreview()

 *         mHardware->setParameters() -> mHardware->startPreview() ->mHardware->setPreviewWindow()

 *         --> enableDisplay during setPreviewWindow()

 *  (2)During inactive preview, window may be changed by setWindow().

*******************************************************************************/

bool

DisplayClient::

setWindow(

    preview_stream_ops*const window,

    int32_t const   wndWidth,

    int32_t const   wndHeight,

    int32_t const   i4MaxImgBufCount

)

{

    MY_LOGI("+ window(%p), WxH=%dx%d,count(%d)", window, wndWidth, wndHeight, i4MaxImgBufCount);

    //

    if  (! window )

    {

        MY_LOGE("NULL window passedinto");

        return false;

    }

    //

    if  (0 >= wndWidth || 0 >= wndHeight || 0 >= i4MaxImgBufCount )

    {

        MY_LOGE("bad arguments -WxH=%dx%d, count(%d)", wndWidth, wndHeight, i4MaxImgBufCount);

        return false;

    }

    //

    //

    Mutex::Autolock _l(mModuleMtx);

    return set_preview_stream_ops(window, wndWidth,wndHeight, i4MaxImgBufCount);

}

 

Step 13:

\vendor\mediatek\proprietary\hardware\mtkcam\legacy\v1\client\DisplayClient\DisplayClient.Stream.cpp

 

bool

DisplayClient::

set_preview_stream_ops(

    preview_stream_ops*const window,

    int32_t const  wndWidth,

    int32_t const   wndHeight,

    int32_t const   i4MaxImgBufCount

)

{

    CamProfile profile(__FUNCTION__,"DisplayClient");

    //

    bool       ret = false;

    status_t   err = 0;

    int32_t    min_undequeued_buf_count = 0;

    //

    // (2) Check

    if  (! mStreamBufList.empty() )

    {

        MY_LOGE(

            "locked buffer count(%d)!=0,"

            "callers must return alldequeued buffers, "

//            "and then callcleanupQueue()"

            , mStreamBufList.size()

        );

        dumpDebug(mStreamBufList,__FUNCTION__);

        goto lbExit;

    }

    //

    // (3) Sava info.

    mpStreamImgInfo.clear();

    if(mi4Orientation != 0)

    {

        // For Display Rotation

        MY_LOGD("mi4Orientation =%d", mi4Orientation);

        mpStreamImgInfo     = new ImgInfo(wndWidth, wndHeight,CAMERA_DISPLAY_FORMAT_ROTATION, CAMERA_DISPLAY_FORMAT_HAL_ROTATION,"Camera@Display", mi4Orientation);

    }

    else

    {

        mpStreamImgInfo     = new ImgInfo(wndWidth, wndHeight,CAMERA_DISPLAY_FORMAT, CAMERA_DISPLAY_FORMAT_HAL, "Camera@Display");

    }

/*

终于看到mpStreamOps了,所以这个mpStreamOps就是对应需要显示数据的window

*/

    mpStreamOps         = window;

    mi4MaxImgBufCount   = i4MaxImgBufCount;

    //

    //

    // (4.1) Set gralloc usage bits for window.

    if(mi4Orientation != 0)

    {

        // For Display Rotation

        err =mpStreamOps->set_usage(mpStreamOps, CAMERA_GRALLOC_USAGE_ROTATION);

    }

    else

    {

        err =mpStreamOps->set_usage(mpStreamOps, CAMERA_GRALLOC_USAGE);

    }

    if  (err )

    {

        MY_LOGE("set_usage failed:status[%s(%d)]", ::strerror(-err), -err);

        if ( ENODEV == err )

        {

            MY_LOGD("Preview surfaceabandoned");

            mpStreamOps = NULL;

        }

        goto lbExit;

    }

    //

    // (4.2) Get minimum undequeue buffer count

    err =mpStreamOps->get_min_undequeued_buffer_count(mpStreamOps,&min_undequeued_buf_count);

    if  (err )

    {

       MY_LOGE("get_min_undequeued_buffer_count failed:status[%s(%d)]", ::strerror(-err), -err);

        if ( ENODEV == err )

        {

            MY_LOGD("Preview surfaceabandoned!");

            mpStreamOps = NULL;

        }

        goto lbExit;

    }

    //

    // (4.3) Set the number of buffers needed for display.

    MY_LOGI(

        "set_buffer_count(%d) =wanted_buf_count(%d) + min_undequeued_buf_count(%d)",

       mi4MaxImgBufCount+min_undequeued_buf_count, mi4MaxImgBufCount,min_undequeued_buf_count

    );

    err =mpStreamOps->set_buffer_count(mpStreamOps,mi4MaxImgBufCount+min_undequeued_buf_count);

    if  (err )

    {

        MY_LOGE("set_buffer_count failed:status[%s(%d)]", ::strerror(-err), -err);

        if ( ENODEV == err )

        {

            MY_LOGD("Preview surfaceabandoned!");

            mpStreamOps = NULL;

        }

        goto lbExit;

    }

    //

    // (4.4) Set window geometry

    err = mpStreamOps->set_buffers_geometry(

            mpStreamOps,

            mpStreamImgInfo->mu4ImgWidth,

            mpStreamImgInfo->mu4ImgHeight,

            mpStreamImgInfo->mi4ImgFormat

        );

    if  (err )

    {

        MY_LOGE(

           "set_buffers_geometry(%dx%d@%s/%x) failed: status[%s(%d)]",

            mpStreamImgInfo->mu4ImgWidth,mpStreamImgInfo->mu4ImgHeight,

           mpStreamImgInfo->ms8ImgFormat.string(),mpStreamImgInfo->mi4ImgFormat,

            ::strerror(-err), -err

        );

        if ( ENODEV == err )

        {

            MY_LOGD("Preview surfaceabandoned!");

            mpStreamOps = NULL;

        }

        goto lbExit;

    }

    //

    //

    ret = true;

lbExit:

    profile.print_overtime(10,"ret(%d)", ret);

    return ret;

}

 

/*

mpStreamOp上面的已经分析清楚了,下面看看其方法,mpStreamOps->enqueue_buffer,在mHardware->setPreviewWindow(mPreviewWindow);后面我们是直接调过到了cam1devicebase.cpp里面去了。其实当然会经过\frameworks\av\services\camera\libcameraservice\device1\CameraHardwareInterface.h中的setPreviewWindow()

 

    /** Set the ANativeWindow to which previewframes are sent */

    status_t setPreviewWindow(constsp& buf)

    {

        ALOGV("%s(%s) buf %p",__FUNCTION__, mName.string(), buf.get());

 

        if(mDevice->ops->set_preview_window) {

            //!++

            if ( buf == 0 ) {

                ALOGD("set_preview_window(0)before mPreviewWindow = 0 \r\n");

               mDevice->ops->set_preview_window(mDevice, 0);

                mPreviewWindow = 0;

                return  OK;

            }

            //!--

            mPreviewWindow = buf;

            mHalPreviewWindow.user = this;

            ALOGV("%s&mHalPreviewWindow %p mHalPreviewWindow.user %p", __FUNCTION__,

                    &mHalPreviewWindow,mHalPreviewWindow.user);

            returnmDevice->ops->set_preview_window(mDevice,

                    buf.get() ?&mHalPreviewWindow.nw : 0);

        }

        return INVALID_OPERATION;

    }

所以传下去的windowmHalPreviewWindow.nw,对应方法如下:

 

    void initHalPreviewWindow()

    {

        mHalPreviewWindow.nw.cancel_buffer =__cancel_buffer;

        mHalPreviewWindow.nw.lock_buffer =__lock_buffer;

        mHalPreviewWindow.nw.dequeue_buffer =__dequeue_buffer;

        mHalPreviewWindow.nw.enqueue_buffer =__enqueue_buffer;

        mHalPreviewWindow.nw.set_buffer_count =__set_buffer_count;

       mHalPreviewWindow.nw.set_buffers_geometry = __set_buffers_geometry;

        mHalPreviewWindow.nw.set_crop =__set_crop;

        mHalPreviewWindow.nw.set_timestamp =__set_timestamp;

        mHalPreviewWindow.nw.set_usage =__set_usage;

        mHalPreviewWindow.nw.set_swap_interval= __set_swap_interval;

 

       mHalPreviewWindow.nw.get_min_undequeued_buffer_count =

               __get_min_undequeued_buffer_count;

}

这样mpStreamOps->enqueue_buffer就是\frameworks\av\services\camera\libcameraservice\device1\CameraHardwareInterface.h__enqueue_buffer

    static int__enqueue_buffer(struct preview_stream_ops* w,

                      buffer_handle_t* buffer)

    {

       ANativeWindow *a = anw(w);

        returna->queueBuffer(a,

                 container_of(buffer, ANativeWindowBuffer, handle), -1);

    }

*/

这样就把获取到的BUFF,传递到了ANativeWindow里面去了。后面的过程带下一步学习。

你可能感兴趣的:(CAMERA学习)