Camera startPreview流程 上接第一篇,这个过程的主要任务是,如何读取数据的,读取的数据是什么格式,最好能知道是如何去预览的
上层APP调用的是Camera.java的startPreview();,下面列出startPreview的调用流程
//Camera.java
public native final void startPreview();
//android_hardware_Camera.cpp
static void android_hardware_Camera_startPreview(JNIEnv *env, jobject thiz)
{
//这里的camera变量是,Camera.cpp类
camera->startPreview();
}
//Camera.cpp
status_t Camera::startPreview()
{
//此处的mCamera在CameraService.cpp的connect函数里被设置
//也就是CameraClient类
sp <ICamera> c = mCamera;
return c->startPreview();
}
CameraClient.cpp里的startPreview函数
status_t CameraClient::startPreview() {
return startCameraMode(CAMERA_PREVIEW_MODE);
}
status_t CameraClient::startCameraMode(camera_mode mode) {
switch(mode) {
case CAMERA_PREVIEW_MODE:
if (mSurface == 0 && mPreviewWindow == 0) {
LOG1("mSurface is not set yet.");
// still able to start preview in this case.
}
return startPreviewMode();
case CAMERA_RECORDING_MODE:
//...
}
}
//CameraClient.cpp
status_t CameraClient::startPreviewMode() {
mHardware->previewEnabled();
mHardware->setPreviewWindow(mPreviewWindow);
result = mHardware->startPreview();
}
//CameraHardwareInterface.h
status_t startPreview(){
//mDevice是Cam1DeviceFactory.cpp createCam1Device()返回的对象
//既是DefaultCam1Device类
return mDevice->ops->start_preview(mDevice);
}
上面的mDevice->ops->start_preview()函数对应的是DefaultCam1Device的父类Cam1Device类的操作函数。
Cam1DeviceBase::startPreview()
{
//(1) 此函数在其子类DefaultCam1Device中实现
//仅是初始化了CameraAdapter
onStartPreview();
//(2) 初始化DisplayClient,重要,稍后研究
enableDisplayClient();
//(3)
mpCamClient->startPreview();
//(4) 我们通过(3)和(4)开始研究,再返回去看(1)(2)
mpCamAdapter->startPreview();
enableMsgType(CAMERA_MSG_PREVIEW_METADATA);
//
mIsPreviewEnabled = true;
}
mpCamclient->startPreview()
CamClient::startPreview()
{
mpPreviewClient->startPreview();
}
//PreviewClient.cpp
PreviewClient::startPreview()
{
//获得预览参数,这里参数为800*480,yuv420sp
ms8PrvTgtFmt = mpParamsMgr->getPreviewFormat();
mpParamsMgr->getPreviewSize(&mi4PrvWidth, &mi4PrvHeight);
//初始化预览Buf
initBuffers();
//
return onStateChanged();
}
PreviewClient::initBuffers()
{
//预览数据的Buf
mpImgBufMgr = ImgBufManager::alloc(ms8PrvTgtFmt, mi4PrvWidth,
mi4PrvHeight, eMAX_PREVIEW_BUFFER_NUM,
"PreviewClientCb", mpCamMsgCbInfo->mRequestMemory,
0, 0);
//预览数据的处理类,这里只是保留了一个处理接口
//里面并没有什么东西,可自行填充
mpExtImgProc = ExtImgProc::createInstance();
mpExtImgProc->init();
}
PreviewClient::onStateChanged()
{ //发送了一个eID_WAKEUP的消息
postCommand(Command(Command::eID_WAKEUP));
}
//接收eID_WAKEUP消息
PreviewClient::threadLoop()
{
Command cmd;
if ( getCommand(cmd) )
{
switch (cmd.eId)
{
case Command::eID_WAKEUP:
case Command::eID_PREVIEW_FRAME:
case Command::eID_POSTVIEW_FRAME:
onClientThreadLoop(cmd);
break;
//
case Command::eID_EXIT:
//...
}
}
//开始处理数据Buf
PreviewClient::onClientThreadLoop(Command const& rCmd)
{
// (1) Get references to pool/queue before starting, so that nothing will be free during operations.
sp<ImgBufManager> pBufMgr = NULL;
sp<IImgBufQueue> pBufQueue = NULL;
{
Mutex::Autolock _l(mModuleMtx);
//
pBufMgr = mpImgBufMgr;
pBufQueue = mpImgBufQueue;
if ( pBufMgr == 0 || pBufQueue == 0 || ! isEnabledState() )
}
// (2) stop & clear all buffers so that we won't deque any undefined buffer.
pBufQueue->stopProcessor();
// (3) Prepare all TODO buffers. 准备buf
if ( ! prepareAllTodoBuffers(pBufQueue, pBufMgr) )
// (4) Start 这个函数只是发出了一个广播,通知等待者
if ( ! pBufQueue->startProcessor() )
// (5) Do until all wanted messages are disabled.
while (1)
{
// (.1)阻塞等待通知,并开始处理buf
waitAndHandleReturnBuffers(pBufQueue);
// (.2) break if disabled.
// add isProcessorRunning to make sure the former pauseProcessor
// is sucessfully processed.
if ( ! isEnabledState() || ! pBufQueue->isProcessorRunning() )
{
MY_LOGI("Preview client disabled");
break;
}
// (.3) re-prepare all TODO buffers, if possible,
// since some DONE/CANCEL buffers return.把Buf放回队列里
prepareAllTodoBuffers(pBufQueue, pBufMgr);
}
// (6) stop.
pBufQueue->pauseProcessor();
pBufQueue->flushProcessor(); // clear "TODO"
pBufQueue->stopProcessor(); // clear "DONE"
//
// (7) Cancel all un-returned buffers.
cancelAllUnreturnBuffers();
}
PreviewClient::waitAndHandleReturnBuffers(sp<IImgBufQueue>const& rpBufQueue)
{
Vector<ImgBufQueNode> vQueNode;
// (1) deque buffers from processor. 阻塞等待通知
rpBufQueue->dequeProcessor(vQueNode);
// (2) handle buffers dequed from processor.
ret = handleReturnBuffers(vQueNode);
}
我们找到了哪里开始处理数据,那问题来了,是如何处理数据,数据又是如何被显示的,而这些数据又是从哪里来的 先来看数据是如何处理的
PreviewClient::handleReturnBuffers(Vector<ImgBufQueNode>const& rvQueNode)
{
// (1) determine the index of the latest DONE buffer for callback.
int32_t idxToCallback = 0;
for ( idxToCallback = rvQueNode.size()-1; idxToCallback >= 0; idxToCallback-- )
{
if ( rvQueNode[idxToCallback].isDONE() )
break;
}
// Show Time duration.
if ( 0 <= idxToCallback )
{
nsecs_t const _timestamp1 = rvQueNode[idxToCallback].getImgBuf()->getTimestamp();
mProfile_buffer_timestamp.pulse(_timestamp1);
nsecs_t const _msDuration_buffer_timestamp = ::ns2ms(mProfile_buffer_timestamp.getDuration());
mProfile_buffer_timestamp.reset(_timestamp1);
//
mProfile_dequeProcessor.pulse();
nsecs_t const _msDuration_dequeProcessor = ::ns2ms(mProfile_dequeProcessor.getDuration());
mProfile_dequeProcessor.reset();
}
//
// (2) Remove from List and peform callback, one by one.
int32_t const queSize = rvQueNode.size();
for (int32_t i = 0; i < queSize; i++)
{
ImgBufQueNode const& rQueNode = rvQueNode[i];
sp<IImgBuf>const& rpQueImgBuf = rQueNode.getImgBuf(); // ImgBuf in Queue.
sp<ICameraImgBuf> pListImgBuf = NULL;
ImgBufNode const ListNode = *mImgBufList.begin(); // Node in List.
pListImgBuf = ListNode.getImgBuf(); // ImgBuf in List.
// (.4) Perform callback.
if ( i == idxToCallback ) {
//
if(mpExtImgProc != NULL)
{
if(mpExtImgProc->getImgMask() & ExtImgProc::BufType_PreviewCB)
{
IExtImgProc::ImgInfo img;
//
img.bufType = ExtImgProc::BufType_PreviewCB;
img.format = rpQueImgBuf->getImgFormat();
img.width = rpQueImgBuf->getImgWidth();
img.height = rpQueImgBuf->getImgHeight();
img.stride[0] = rpQueImgBuf->getImgWidthStride(0);
img.stride[1] = rpQueImgBuf->getImgWidthStride(1);
img.stride[2] = rpQueImgBuf->getImgWidthStride(2);
img.virtAddr = (MUINT32)(rpQueImgBuf->getVirAddr());
img.bufSize = rpQueImgBuf->getBufSize();
//预留的处理函数,现在这里是空
mpExtImgProc->doImgProc(img);
}
}
//对数据进行处理
performPreviewCallback(pListImgBuf, rQueNode.getCookieDE());
}
}
}
PreviewClient::performPreviewCallback(sp<ICameraImgBuf>const& pCameraImgBuf, int32_t const msgType)
{
if ( pCameraImgBuf != 0 )
{
// [2] Callback
sp<CamMsgCbInfo> pCamMsgCbInfo;
{
pCamMsgCbInfo = mpCamMsgCbInfo;
}
//调用处理函数
//这个mDataCb回调函数藏得很深
//在CameraClient.cpp的initialize()函数里
//mHardware->setCallbacks(notifyCallback, dataCallback, dataCallbackTimestamp, (void *)mCameraId);
//这句话其中的dataCallback,设置了mDataCb函数
pCamMsgCbInfo->mDataCb(
0 != msgType ? msgType : (int32_t)CAMERA_MSG_PREVIEW_FRAME,
pCameraImgBuf->get_camera_memory(),
pCameraImgBuf->getBufIndex(),
NULL,
pCamMsgCbInfo->mCbCookie
);
}
}
最后调用了mDataCb()回调函数。如果在APP层用了setPreviewCallback(),则会在此时调用回调函数,并把数据传回去给APP。注意,这里的msgType被设置成了CAMERA_MSG_PREVIEW_FRAME。
Cam1DeviceBase的setCallbacks()设置了很多回调函数,这些回调函数应该是挺有用的,有空了解一下。类似于startPreview()提供给Frameworks层调用。在Frameworks的CameraClient::initialize被调用
//设置Camera的各种回调函数
Cam1DeviceBase::setCallbacks(
camera_notify_callback notify_cb,
camera_data_callback data_cb,
camera_data_timestamp_callback data_cb_timestamp,
camera_request_memory get_memory,
void*user
)
{
mpCamMsgCbInfo->mCbCookie = user;
mpCamMsgCbInfo->mNotifyCb = notify_cb;
mpCamMsgCbInfo->mDataCb = data_cb;
mpCamMsgCbInfo->mDataCbTimestamp= data_cb_timestamp;
mpCamMsgCbInfo->mRequestMemory = get_memory;
//
if ( mpCamClient != 0 )
{
mpCamClient->setCallbacks(mpCamMsgCbInfo);
}
//
if ( mpCamAdapter != 0 )
{
mpCamAdapter->setCallbacks(mpCamMsgCbInfo);
}
}
Frameworks:
status_t CameraClient::initialize(camera_module_t *module) {
mHardware = new CameraHardwareInterface(camera_device_name);
res = mHardware->initialize(&module->common);
//Cam1DeviceBase的setCallbacks()
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
(void *)mCameraId);
}
dataCallback是个回调函数,也在CameraClient中。此函数中,接收了多种的msgType。这就表示数据回调中可以有多种的数据类型,有RAW的,有COMPRESSD的,等
void CameraClient::dataCallback(int32_t msgType,
const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata, void* user) {
switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) {
//!++
#if 1 // defined(MTK_CAMERA_BSP_SUPPORT)
case MTK_CAMERA_MSG_EXT_DATA:
client->handleMtkExtData(dataPtr, metadata);
break;
#endif
//!--
case CAMERA_MSG_PREVIEW_FRAME:
client->handlePreviewData(msgType, dataPtr, metadata);
break;
case CAMERA_MSG_POSTVIEW_FRAME:
client->handlePostview(dataPtr);
break;
case CAMERA_MSG_RAW_IMAGE:
client->handleRawPicture(dataPtr);
break;
case CAMERA_MSG_COMPRESSED_IMAGE:
client->handleCompressedPicture(dataPtr);
break;
default:
client->handleGenericData(msgType, dataPtr, metadata);
break;
}
}
我们的msgType是CAMERA_MSG_PREVIEW_FRAME。调用了handlePreviewData函数
void CameraClient::handlePreviewData(int32_t msgType,
const sp<IMemory>& mem,
camera_frame_metadata_t *metadata) {
//获取mem
sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
// is callback enabled? 判断
if (!(flags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)) {
// If the enable bit is off, the copy-out and one-shot bits are ignored
LOG2("frame callback is disabled");
return;
}
// hold a strong pointer to the client
sp<ICameraClient> c = mRemoteCallback;
// clear callback flags if no client or one-shot mode 判断
if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) {
LOG2("Disable preview callback");
}
//有两种方式把mem发送出去,一种是copy一种是直接把地址发送出去
if (c != 0) {
// Is the received frame copied out or not?
if (flags & CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK) {
LOG2("frame is copied");
copyFrameAndPostCopiedFrame(msgType, c, heap, offset, size, metadata);
} else {
LOG2("frame is forwarded");
c->dataCallback(msgType, mem, metadata);
}
}
}
最后是通过sp<ICameraClient> c = mRemoteCallback;
mRemoteCallback->dataCallback
发送出去的。
mRemoteCallback是ICameraClient类型和CameraClient两者都有dataCallback两者又是什么关系,如果有关系,这里此不是会死循环。
mRemoteCallback搜一下,是在
CameraClient::connect()被里初始化的,于是乎在这里找,找了大半天也没找到在哪里被初始化的。被坑了半天,原来在
CameraClient的构造函数里。 我们返回到Camera.cpp的connect函数里。发现,还是挺好玩的。再从
Camera.cpp的
connect()细跟一下
sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
int clientUid)
{
return CameraBaseT::connect(cameraId, clientPackageName, clientUid);
}
//CameraBaseT在CameraBase被定义成typedef CameraBase<TCam> CameraBaseT;
//CameraBase在Camera被初始化为CameraBase<Camera>,所以上面就相应于调用了CameraBase<Camera>::connect()
template <typename TCam, typename TCamTraits>
sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
const String16& clientPackageName, int clientUid)
{
//把所有的TCam替换成Camera,就是刚才Camera.cpp里的camera在这里被构造
sp<TCam> c = new TCam(cameraId);
//TCamCallbacks在Camera.h里被定义为ICameraClient,被赋值为Camera。Camera继承ICameraClient
sp<TCamCallbacks> cl = c;
const sp<ICameraService>& cs = getCameraService();
//fnConnectService在Camera被初始化为ICameraService::connect()
TCamConnectService fnConnectService = TCamTraits::fnConnectService;
//下面调用了CameraService::connect() c和cl是同一个值,做为两个不同的参数传进了CameraService::connect()
status = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid, /*out*/ c->mCamera);
}
//CameraService::connec()构造了一个CameraClient(),又是一个CameraClient,
//但是和上面的ICameraClient没有半毛线关系。只是名字相似,真的是一万个++泥马在奔腾。
status_t CameraService::connect(const sp<ICameraClient>& cameraClient, int cameraId,
const String16& clientPackageName, int clientUid, /*out*/sp<ICamera>& device) {
//CameraService::connec()就做了两件事情,初始化Camera里的mCamera和把Camera传给CameraClient
//CameraClient的构造函数会传到他的父类构造函数Client()里,Client却是在CameraService。Camera类就是mRemoteCallback
client = new CameraClient(this, cameraClient, clientPackageName, cameraId,
facing, callingPid, clientUid, getpid());
device = client;
return OK; }
所以,上面的c
->dataCallback。调用的是Camera的
dataCallback()
// callback from camera service when frame or image is ready
void Camera::dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata)
{
sp<CameraListener> listener;
listener = mListener;
listener->postData(msgType, dataPtr, metadata);
}
这里的mListener是在android_hardware_Camera_native_setup()里被设置的类型是MtkJNICameraContext。所以listener->postData()调用的是
MtkJNICameraContext的postData()。
void MtkJNICameraContext::postData(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata)
{
JNICameraContext::postData(msgType, dataPtr, metadata);
}
void JNICameraContext::postData(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata)
{
switch (dataMsgType) {
case CAMERA_MSG_VIDEO_FRAME:
//...
case CAMERA_MSG_RAW_IMAGE:
// There is no data.
case 0:
break;
default:
ALOGV("dataCallback(%d, %p)", dataMsgType, dataPtr.get());
copyAndPost(env, dataPtr, dataMsgType);
break;
}
// post frame metadata to Java
if (metadata && (msgType & CAMERA_MSG_PREVIEW_METADATA)) {
postMetadata(env, CAMERA_MSG_PREVIEW_METADATA, metadata);
}
}
void JNICameraContext::copyAndPost(JNIEnv* env, const sp<IMemory>& dataPtr, int msgType)
{
jbyteArray obj = NULL;
// allocate Java byte array and copy data
if (dataPtr != NULL) {
sp<IMemoryHeap> heap = dataPtr->getMemory(&offset, &size);
uint8_t *heapBase = (uint8_t*)heap->base();
const jbyte* data = reinterpret_cast<const jbyte*>(heapBase + offset);
if (msgType == CAMERA_MSG_RAW_IMAGE) {
obj = getCallbackBuffer(env, &mRawImageCallbackBuffers, size);
} else if (msgType == CAMERA_MSG_PREVIEW_FRAME && mManualBufferMode) {
//再次构造一个Buffer,并发送出去
obj = getCallbackBuffer(env, &mCallbackBuffers, size);
} else {
ALOGD("Allocating callback buffer");
obj = env->NewByteArray(size);
}
if (obj == NULL) {
ALOGE("Couldn't allocate byte array for JPEG data");
env->ExceptionClear();
} else {
env->SetByteArrayRegion(obj, 0, size, data);
}
} else {
ALOGE("image heap is NULL");
}
}
// post image data to Java
//主要是这句话,调用了一个Java函数,函数名为fields.post_event,post_event被初始化为postEventFromNative()
env->CallStaticVoidMethod(mCameraJClass, fields.post_event,
mCameraJObjectWeak, msgType, 0, 0, obj);
if (obj) {
env->DeleteLocalRef(obj);
}
}
//终于加到Java了
private static void postEventFromNative(Object camera_ref,
int what, int arg1, int arg2, Object obj)
{
//就是发送了一个msg,what为CAMERA_MSG_PREVIEW_FRAME。剩下的就不深究了
Message m = c.mEventHandler.obtainMessage(what, arg1, arg2, obj);
c.mEventHandler.sendMessage(m);
}
不过看到这里,我们好像并没有看出个所有然来,最后的回调仅仅是调用上层的回调函数,把数据往上丢,却
不显示也不处理。接下来我们还有两个问题,数据从哪里来?然后这些数据又是怎么显示的。请看下一篇
Camera startPreview流程 上接第一篇,这个过程的主要任务是,如何读取数据的,读取的数据是什么格式,最好能知道是如何去预览的
上层APP调用的是Camera.java的startPreview();,下面列出startPreview的调用流程
//Camera.java
public native final void startPreview();
//android_hardware_Camera.cpp
static void android_hardware_Camera_startPreview(JNIEnv *env, jobject thiz)
{
//这里的camera变量是,Camera.cpp类
camera->startPreview();
}
//Camera.cpp
status_t Camera::startPreview()
{
//此处的mCamera在CameraService.cpp的connect函数里被设置
//也就是CameraClient类
sp <ICamera> c = mCamera;
return c->startPreview();
}
CameraClient.cpp里的startPreview函数
status_t CameraClient::startPreview() {
return startCameraMode(CAMERA_PREVIEW_MODE);
}
status_t CameraClient::startCameraMode(camera_mode mode) {
switch(mode) {
case CAMERA_PREVIEW_MODE:
if (mSurface == 0 && mPreviewWindow == 0) {
LOG1("mSurface is not set yet.");
// still able to start preview in this case.
}
return startPreviewMode();
case CAMERA_RECORDING_MODE:
//...
}
}
//CameraClient.cpp
status_t CameraClient::startPreviewMode() {
mHardware->previewEnabled();
mHardware->setPreviewWindow(mPreviewWindow);
result = mHardware->startPreview();
}
//CameraHardwareInterface.h
status_t startPreview(){
//mDevice是Cam1DeviceFactory.cpp createCam1Device()返回的对象
//既是DefaultCam1Device类
return mDevice->ops->start_preview(mDevice);
}
上面的mDevice->ops->start_preview()函数对应的是DefaultCam1Device的父类Cam1Device类的操作函数。
Cam1DeviceBase::startPreview()
{
//(1) 此函数在其子类DefaultCam1Device中实现
//仅是初始化了CameraAdapter
onStartPreview();
//(2) 初始化DisplayClient,重要,稍后研究
enableDisplayClient();
//(3)
mpCamClient->startPreview();
//(4) 我们通过(3)和(4)开始研究,再返回去看(1)(2)
mpCamAdapter->startPreview();
enableMsgType(CAMERA_MSG_PREVIEW_METADATA);
//
mIsPreviewEnabled = true;
}
mpCamclient->startPreview()
CamClient::startPreview()
{
mpPreviewClient->startPreview();
}
//PreviewClient.cpp
PreviewClient::startPreview()
{
//获得预览参数,这里参数为800*480,yuv420sp
ms8PrvTgtFmt = mpParamsMgr->getPreviewFormat();
mpParamsMgr->getPreviewSize(&mi4PrvWidth, &mi4PrvHeight);
//初始化预览Buf
initBuffers();
//
return onStateChanged();
}
PreviewClient::initBuffers()
{
//预览数据的Buf
mpImgBufMgr = ImgBufManager::alloc(ms8PrvTgtFmt, mi4PrvWidth,
mi4PrvHeight, eMAX_PREVIEW_BUFFER_NUM,
"PreviewClientCb", mpCamMsgCbInfo->mRequestMemory,
0, 0);
//预览数据的处理类,这里只是保留了一个处理接口
//里面并没有什么东西,可自行填充
mpExtImgProc = ExtImgProc::createInstance();
mpExtImgProc->init();
}
PreviewClient::onStateChanged()
{ //发送了一个eID_WAKEUP的消息
postCommand(Command(Command::eID_WAKEUP));
}
//接收eID_WAKEUP消息
PreviewClient::threadLoop()
{
Command cmd;
if ( getCommand(cmd) )
{
switch (cmd.eId)
{
case Command::eID_WAKEUP:
case Command::eID_PREVIEW_FRAME:
case Command::eID_POSTVIEW_FRAME:
onClientThreadLoop(cmd);
break;
//
case Command::eID_EXIT:
//...
}
}
//开始处理数据Buf
PreviewClient::onClientThreadLoop(Command const& rCmd)
{
// (1) Get references to pool/queue before starting, so that nothing will be free during operations.
sp<ImgBufManager> pBufMgr = NULL;
sp<IImgBufQueue> pBufQueue = NULL;
{
Mutex::Autolock _l(mModuleMtx);
//
pBufMgr = mpImgBufMgr;
pBufQueue = mpImgBufQueue;
if ( pBufMgr == 0 || pBufQueue == 0 || ! isEnabledState() )
}
// (2) stop & clear all buffers so that we won't deque any undefined buffer.
pBufQueue->stopProcessor();
// (3) Prepare all TODO buffers. 准备buf
if ( ! prepareAllTodoBuffers(pBufQueue, pBufMgr) )
// (4) Start 这个函数只是发出了一个广播,通知等待者
if ( ! pBufQueue->startProcessor() )
// (5) Do until all wanted messages are disabled.
while (1)
{
// (.1)阻塞等待通知,并开始处理buf
waitAndHandleReturnBuffers(pBufQueue);
// (.2) break if disabled.
// add isProcessorRunning to make sure the former pauseProcessor
// is sucessfully processed.
if ( ! isEnabledState() || ! pBufQueue->isProcessorRunning() )
{
MY_LOGI("Preview client disabled");
break;
}
// (.3) re-prepare all TODO buffers, if possible,
// since some DONE/CANCEL buffers return.把Buf放回队列里
prepareAllTodoBuffers(pBufQueue, pBufMgr);
}
// (6) stop.
pBufQueue->pauseProcessor();
pBufQueue->flushProcessor(); // clear "TODO"
pBufQueue->stopProcessor(); // clear "DONE"
//
// (7) Cancel all un-returned buffers.
cancelAllUnreturnBuffers();
}
PreviewClient::waitAndHandleReturnBuffers(sp<IImgBufQueue>const& rpBufQueue)
{
Vector<ImgBufQueNode> vQueNode;
// (1) deque buffers from processor. 阻塞等待通知
rpBufQueue->dequeProcessor(vQueNode);
// (2) handle buffers dequed from processor.
ret = handleReturnBuffers(vQueNode);
}
我们找到了哪里开始处理数据,那问题来了,是如何处理数据,数据又是如何被显示的,而这些数据又是从哪里来的 先来看数据是如何处理的
PreviewClient::handleReturnBuffers(Vector<ImgBufQueNode>const& rvQueNode)
{
// (1) determine the index of the latest DONE buffer for callback.
int32_t idxToCallback = 0;
for ( idxToCallback = rvQueNode.size()-1; idxToCallback >= 0; idxToCallback-- )
{
if ( rvQueNode[idxToCallback].isDONE() )
break;
}
// Show Time duration.
if ( 0 <= idxToCallback )
{
nsecs_t const _timestamp1 = rvQueNode[idxToCallback].getImgBuf()->getTimestamp();
mProfile_buffer_timestamp.pulse(_timestamp1);
nsecs_t const _msDuration_buffer_timestamp = ::ns2ms(mProfile_buffer_timestamp.getDuration());
mProfile_buffer_timestamp.reset(_timestamp1);
//
mProfile_dequeProcessor.pulse();
nsecs_t const _msDuration_dequeProcessor = ::ns2ms(mProfile_dequeProcessor.getDuration());
mProfile_dequeProcessor.reset();
}
//
// (2) Remove from List and peform callback, one by one.
int32_t const queSize = rvQueNode.size();
for (int32_t i = 0; i < queSize; i++)
{
ImgBufQueNode const& rQueNode = rvQueNode[i];
sp<IImgBuf>const& rpQueImgBuf = rQueNode.getImgBuf(); // ImgBuf in Queue.
sp<ICameraImgBuf> pListImgBuf = NULL;
ImgBufNode const ListNode = *mImgBufList.begin(); // Node in List.
pListImgBuf = ListNode.getImgBuf(); // ImgBuf in List.
// (.4) Perform callback.
if ( i == idxToCallback ) {
//
if(mpExtImgProc != NULL)
{
if(mpExtImgProc->getImgMask() & ExtImgProc::BufType_PreviewCB)
{
IExtImgProc::ImgInfo img;
//
img.bufType = ExtImgProc::BufType_PreviewCB;
img.format = rpQueImgBuf->getImgFormat();
img.width = rpQueImgBuf->getImgWidth();
img.height = rpQueImgBuf->getImgHeight();
img.stride[0] = rpQueImgBuf->getImgWidthStride(0);
img.stride[1] = rpQueImgBuf->getImgWidthStride(1);
img.stride[2] = rpQueImgBuf->getImgWidthStride(2);
img.virtAddr = (MUINT32)(rpQueImgBuf->getVirAddr());
img.bufSize = rpQueImgBuf->getBufSize();
//预留的处理函数,现在这里是空
mpExtImgProc->doImgProc(img);
}
}
//对数据进行处理
performPreviewCallback(pListImgBuf, rQueNode.getCookieDE());
}
}
}
PreviewClient::performPreviewCallback(sp<ICameraImgBuf>const& pCameraImgBuf, int32_t const msgType)
{
if ( pCameraImgBuf != 0 )
{
// [2] Callback
sp<CamMsgCbInfo> pCamMsgCbInfo;
{
pCamMsgCbInfo = mpCamMsgCbInfo;
}
//调用处理函数
//这个mDataCb回调函数藏得很深
//在CameraClient.cpp的initialize()函数里
//mHardware->setCallbacks(notifyCallback, dataCallback, dataCallbackTimestamp, (void *)mCameraId);
//这句话其中的dataCallback,设置了mDataCb函数
pCamMsgCbInfo->mDataCb(
0 != msgType ? msgType : (int32_t)CAMERA_MSG_PREVIEW_FRAME,
pCameraImgBuf->get_camera_memory(),
pCameraImgBuf->getBufIndex(),
NULL,
pCamMsgCbInfo->mCbCookie
);
}
}
最后调用了mDataCb()回调函数。如果在APP层用了setPreviewCallback(),则会在此时调用回调函数,并把数据传回去给APP。注意,这里的msgType被设置成了CAMERA_MSG_PREVIEW_FRAME。
Cam1DeviceBase的setCallbacks()设置了很多回调函数,这些回调函数应该是挺有用的,有空了解一下。类似于startPreview()提供给Frameworks层调用。在Frameworks的CameraClient::initialize被调用
//设置Camera的各种回调函数
Cam1DeviceBase::setCallbacks(
camera_notify_callback notify_cb,
camera_data_callback data_cb,
camera_data_timestamp_callback data_cb_timestamp,
camera_request_memory get_memory,
void*user
)
{
mpCamMsgCbInfo->mCbCookie = user;
mpCamMsgCbInfo->mNotifyCb = notify_cb;
mpCamMsgCbInfo->mDataCb = data_cb;
mpCamMsgCbInfo->mDataCbTimestamp= data_cb_timestamp;
mpCamMsgCbInfo->mRequestMemory = get_memory;
//
if ( mpCamClient != 0 )
{
mpCamClient->setCallbacks(mpCamMsgCbInfo);
}
//
if ( mpCamAdapter != 0 )
{
mpCamAdapter->setCallbacks(mpCamMsgCbInfo);
}
}
Frameworks:
status_t CameraClient::initialize(camera_module_t *module) {
mHardware = new CameraHardwareInterface(camera_device_name);
res = mHardware->initialize(&module->common);
//Cam1DeviceBase的setCallbacks()
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
(void *)mCameraId);
}
dataCallback是个回调函数,也在CameraClient中。此函数中,接收了多种的msgType。这就表示数据回调中可以有多种的数据类型,有RAW的,有COMPRESSD的,等
void CameraClient::dataCallback(int32_t msgType,
const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata, void* user) {
switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) {
//!++
#if 1 // defined(MTK_CAMERA_BSP_SUPPORT)
case MTK_CAMERA_MSG_EXT_DATA:
client->handleMtkExtData(dataPtr, metadata);
break;
#endif
//!--
case CAMERA_MSG_PREVIEW_FRAME:
client->handlePreviewData(msgType, dataPtr, metadata);
break;
case CAMERA_MSG_POSTVIEW_FRAME:
client->handlePostview(dataPtr);
break;
case CAMERA_MSG_RAW_IMAGE:
client->handleRawPicture(dataPtr);
break;
case CAMERA_MSG_COMPRESSED_IMAGE:
client->handleCompressedPicture(dataPtr);
break;
default:
client->handleGenericData(msgType, dataPtr, metadata);
break;
}
}
我们的msgType是CAMERA_MSG_PREVIEW_FRAME。调用了handlePreviewData函数
void CameraClient::handlePreviewData(int32_t msgType,
const sp<IMemory>& mem,
camera_frame_metadata_t *metadata) {
//获取mem
sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
// is callback enabled? 判断
if (!(flags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)) {
// If the enable bit is off, the copy-out and one-shot bits are ignored
LOG2("frame callback is disabled");
return;
}
// hold a strong pointer to the client
sp<ICameraClient> c = mRemoteCallback;
// clear callback flags if no client or one-shot mode 判断
if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) {
LOG2("Disable preview callback");
}
//有两种方式把mem发送出去,一种是copy一种是直接把地址发送出去
if (c != 0) {
// Is the received frame copied out or not?
if (flags & CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK) {
LOG2("frame is copied");
copyFrameAndPostCopiedFrame(msgType, c, heap, offset, size, metadata);
} else {
LOG2("frame is forwarded");
c->dataCallback(msgType, mem, metadata);
}
}
}
最后是通过sp<ICameraClient> c = mRemoteCallback;
mRemoteCallback->dataCallback
发送出去的。
mRemoteCallback是ICameraClient类型和CameraClient两者都有dataCallback两者又是什么关系,如果有关系,这里此不是会死循环。
mRemoteCallback搜一下,是在
CameraClient::connect()被里初始化的,于是乎在这里找,找了大半天也没找到在哪里被初始化的。被坑了半天,原来在
CameraClient的构造函数里。 我们返回到Camera.cpp的connect函数里。发现,还是挺好玩的。再从
Camera.cpp的
connect()细跟一下
sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
int clientUid)
{
return CameraBaseT::connect(cameraId, clientPackageName, clientUid);
}
//CameraBaseT在CameraBase被定义成typedef CameraBase<TCam> CameraBaseT;
//CameraBase在Camera被初始化为CameraBase<Camera>,所以上面就相应于调用了CameraBase<Camera>::connect()
template <typename TCam, typename TCamTraits>
sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
const String16& clientPackageName, int clientUid)
{
//把所有的TCam替换成Camera,就是刚才Camera.cpp里的camera在这里被构造
sp<TCam> c = new TCam(cameraId);
//TCamCallbacks在Camera.h里被定义为ICameraClient,被赋值为Camera。Camera继承ICameraClient
sp<TCamCallbacks> cl = c;
const sp<ICameraService>& cs = getCameraService();
//fnConnectService在Camera被初始化为ICameraService::connect()
TCamConnectService fnConnectService = TCamTraits::fnConnectService;
//下面调用了CameraService::connect() c和cl是同一个值,做为两个不同的参数传进了CameraService::connect()
status = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid, /*out*/ c->mCamera);
}
//CameraService::connec()构造了一个CameraClient(),又是一个CameraClient,
//但是和上面的ICameraClient没有半毛线关系。只是名字相似,真的是一万个++泥马在奔腾。
status_t CameraService::connect(const sp<ICameraClient>& cameraClient, int cameraId,
const String16& clientPackageName, int clientUid, /*out*/sp<ICamera>& device) {
//CameraService::connec()就做了两件事情,初始化Camera里的mCamera和把Camera传给CameraClient
//CameraClient的构造函数会传到他的父类构造函数Client()里,Client却是在CameraService。Camera类就是mRemoteCallback
client = new CameraClient(this, cameraClient, clientPackageName, cameraId,
facing, callingPid, clientUid, getpid());
device = client;
return OK; }
所以,上面的c
->dataCallback。调用的是Camera的
dataCallback()
// callback from camera service when frame or image is ready
void Camera::dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata)
{
sp<CameraListener> listener;
listener = mListener;
listener->postData(msgType, dataPtr, metadata);
}
这里的mListener是在android_hardware_Camera_native_setup()里被设置的类型是MtkJNICameraContext。所以listener->postData()调用的是
MtkJNICameraContext的postData()。
void MtkJNICameraContext::postData(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata)
{
JNICameraContext::postData(msgType, dataPtr, metadata);
}
void JNICameraContext::postData(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata)
{
switch (dataMsgType) {
case CAMERA_MSG_VIDEO_FRAME:
//...
case CAMERA_MSG_RAW_IMAGE:
// There is no data.
case 0:
break;
default:
ALOGV("dataCallback(%d, %p)", dataMsgType, dataPtr.get());
copyAndPost(env, dataPtr, dataMsgType);
break;
}
// post frame metadata to Java
if (metadata && (msgType & CAMERA_MSG_PREVIEW_METADATA)) {
postMetadata(env, CAMERA_MSG_PREVIEW_METADATA, metadata);
}
}
void JNICameraContext::copyAndPost(JNIEnv* env, const sp<IMemory>& dataPtr, int msgType)
{
jbyteArray obj = NULL;
// allocate Java byte array and copy data
if (dataPtr != NULL) {
sp<IMemoryHeap> heap = dataPtr->getMemory(&offset, &size);
uint8_t *heapBase = (uint8_t*)heap->base();
const jbyte* data = reinterpret_cast<const jbyte*>(heapBase + offset);
if (msgType == CAMERA_MSG_RAW_IMAGE) {
obj = getCallbackBuffer(env, &mRawImageCallbackBuffers, size);
} else if (msgType == CAMERA_MSG_PREVIEW_FRAME && mManualBufferMode) {
//再次构造一个Buffer,并发送出去
obj = getCallbackBuffer(env, &mCallbackBuffers, size);
} else {
ALOGD("Allocating callback buffer");
obj = env->NewByteArray(size);
}
if (obj == NULL) {
ALOGE("Couldn't allocate byte array for JPEG data");
env->ExceptionClear();
} else {
env->SetByteArrayRegion(obj, 0, size, data);
}
} else {
ALOGE("image heap is NULL");
}
}
// post image data to Java
//主要是这句话,调用了一个Java函数,函数名为fields.post_event,post_event被初始化为postEventFromNative()
env->CallStaticVoidMethod(mCameraJClass, fields.post_event,
mCameraJObjectWeak, msgType, 0, 0, obj);
if (obj) {
env->DeleteLocalRef(obj);
}
}
//终于加到Java了
private static void postEventFromNative(Object camera_ref,
int what, int arg1, int arg2, Object obj)
{
//就是发送了一个msg,what为CAMERA_MSG_PREVIEW_FRAME。剩下的就不深究了
Message m = c.mEventHandler.obtainMessage(what, arg1, arg2, obj);
c.mEventHandler.sendMessage(m);
}
不过看到这里,我们好像并没有看出个所有然来,最后的回调仅仅是调用上层的回调函数,把数据往上丢,却
不显示也不处理。接下来我们还有两个问题,数据从哪里来?然后这些数据又是怎么显示的。请看下一篇