現在大部分Android Camera 應用都是基於Android標準API1/API2進行開發的,但是其實Android Camera應用也是可以直接在Android Native層進行開發,而且存在兩種方法:
- 直接和CameraSerive通信進行開發
- 直接和CameraProvider通信進行開發
直接在Android Native 層開發Camera應用的優缺點:
優點:
- 摒棄了Java層調用,能在性能方面存在一定的優化
- 由於高性能的算法大部分都是基於c/c++開發的,在Native層開發相機應用可以方便算法集成
缺點:
- 參考資料少,開發難度稍高,會遇到很多bug,當然都是可以解決的。
1. 直接和CameraSerive通信進行開發進行Native Camera開發
1.1 CameraSerive接口定義
CameraService
所有對外接口都ICameraService.aidl
文件中。
主要定義的接口有:
/**
* Open a camera device through the new camera API
* Only supported for device HAL versions >= 3.2
*/
ICameraDeviceUser connectDevice(ICameraDeviceCallbacks callbacks,
String cameraId,
String opPackageName,
int clientUid);
/**
* Add listener for changes to camera device and flashlight state.
*
* Also returns the set of currently-known camera IDs and state of each device.
* Adding a listener will trigger the torch status listener to fire for all
* devices that have a flash unit
*/
CameraStatus[] addListener(ICameraServiceListener listener);
/**
* Read the static camera metadata for a camera device.
* Only supported for device HAL versions >= 3.2
*/
CameraMetadataNative getCameraCharacteristics(String cameraId);
1.2 CameraService 獲取
首先需要從IServiceManager中獲CameraService,代碼如下:
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.camera"));
sp<ICameraService>mspService = interface_cast<ICameraService>(binder);
1.3 打開相機
sp<ICameraDeviceUser> device;
binder::Status res = mspService->connectDevice(callbacks, cameraId, clientPackageName,
ICameraService::USE_CALLING_UID, &device);
connectDevice會返回一個BpCameraDeviceUser 代理對象,表示一個相機設備,其繼承自ICameraDeviceUser,ICameraDeviceUser定義如下:
//ICameraDeviceUser.aidl
void disconnect();
const int NO_IN_FLIGHT_REPEATING_FRAMES = -1;
SubmitInfo submitRequest(in CaptureRequest request, boolean streaming);
SubmitInfo submitRequestList(in CaptureRequest[] requestList, boolean streaming);
/**
* Cancel the repeating request specified by requestId
* Returns the frame number of the last frame that will be produced from this
* repeating request, or NO_IN_FLIGHT_REPEATING_FRAMES if no frames were produced
* by this repeating request.
*
* Repeating request may be stopped by camera device due to an error. Canceling a stopped
* repeating request will trigger ERROR_ILLEGAL_ARGUMENT.
*/
long cancelRequest(int requestId);
/**
* Begin the device configuration.
*
* <p>
* beginConfigure must be called before any call to deleteStream, createStream,
* or endConfigure. It is not valid to call this when the device is not idle.
* <p>
*/
void beginConfigure();
void endConfigure(int operatingMode);
void deleteStream(int streamId);
/**
* Create an output stream
*
* <p>Create an output stream based on the given output configuration</p>
*
* @param outputConfiguration size, format, and other parameters for the stream
* @return new stream ID
*/
int createStream(in OutputConfiguration outputConfiguration);
1.4 創建Stream流
在打開相機後就可以創建或者刪除Stream了,在創建流Stream之前需要先準備Surface資源
1.4.1 Surface資源準備
- 如果預覽數據不修改顯示到真實的物理屏上,就可以通過
BufferQueue::createBufferQueue
創建出surface來,方法是:
void setupPreviewSurface()
{
ALOGD("%s,%d E .", __FUNCTION__,__LINE__);
// Setup a buffer queue
BufferQueue::createBufferQueue(&mspGbProducer, &mspGbConsumer);
sp<BufferItemConsumer> consumer;
consumer = new BufferItemConsumer(mspGbConsumer, GRALLOC_USAGE_SW_READ_NEVER,
/*maxImages*/ MAX_BUFFER_NUM, /*controlledByApp*/ true);
String8 consumerName = String8::format("ImageReader-%dx%df%xm%d-%d",
m_iWidth, m_iHeight, m_iFormat, MAX_BUFFER_NUM, getpid());
//創建幀回調監聽
mPreviewListener = new ConsumerBase::FrameAvailableListener();
consumer->setName(consumerName);
consumer->setDefaultBufferSize(m_iWidth, m_iHeight);
consumer->setDefaultBufferFormat(m_iFormat);
//給消費者註冊幀回調監聽
consumer->setFrameAvailableListener(mPreviewListener);
ALOGD("%s,%d X .", __FUNCTION__,__LINE__);
}
- 如果預覽幀數據需要顯示到真實的物理屏上,就需要向
SurfaceFlinger
申請一個Surface
sp<ANativeWindow> getSurfaceFromSF() {
status_t err;
sp<SurfaceComposerClient> surfaceComposerClient = new SurfaceComposerClient;
err = surfaceComposerClient->initCheck();
// Get main display parameters.
sp<IBinder> mainDpy = SurfaceComposerClient::getBuiltInDisplay(
ISurfaceComposer::eDisplayIdMain);
DisplayInfo mainDpyInfo;
err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
uint32_t width, height;
if (mainDpyInfo.orientation != DISPLAY_ORIENTATION_0 &&
mainDpyInfo.orientation != DISPLAY_ORIENTATION_180) {
// rotated
width = mainDpyInfo.h;
height = mainDpyInfo.w;
} else {
width = mainDpyInfo.w;
height = mainDpyInfo.h;
}
sp<SurfaceControl> sc = surfaceComposerClient->createSurface(
String8("mytest"), width, height,
PIXEL_FORMAT_RGBX_8888, ISurfaceComposerClient::eOpaque);
SurfaceComposerClient::openGlobalTransaction();
err = sc->setLayer(0x7FFFFFFF); // always on top
err = sc->show();
SurfaceComposerClient::closeGlobalTransaction();
sp<ANativeWindow> anw = sc->getSurface();
return anw
}
當然也存在其他方法,歡迎補充啊
1.4.2 Stream創建
int configureDevice()
{
//preview surface準備
setupPreviewSurface();
//capture surface準備
setupCaptureSurface();
ALOGD("waitUntilIdle E");
res = mspDevice->waitUntilIdle();
ALOGD("waitUntilIdle X");
res = mspDevice->beginConfigure();
ALOGD("create preview Stream E");
OutputConfiguration previewOutPut(mspGbProducer, /*rotation*/ 0);
res = mspDevice->createStream(previewOutPut, &mPreviewStreamId);
ALOGD("create preview Stream mPreviewStreamId = %d X",mPreviewStreamId);
ALOGD("create capture Stream E");
OutputConfiguration jpegOutPut = OutputConfiguration(mspJpegGbpProducer, /*rotation*/ 0);
res = mspDevice->createStream(jpegOutPut, &mCaptureStreamId);
ALOGD("create capture Stream mCaptureStreamId = %d X",mCaptureStreamId);
ALOGD("endConfigure E .");
res = mspDevice->endConfigure(/*isConstrainedHighSpeed*/ false);
ALOGD("endConfigure X .");
return 0;
}
1.4 預覽申請
創建流成功後,就可以開啓startpreview和takepicture了
int startPreview()
{
CameraMetadata mRequestTemplate;
int templateId = camera2::ICameraDeviceUser::TEMPLATE_PREVIEW;
//createDefaultRequest獲取的metadata只包含一些默認值
//如果想更新其裏邊的值,就需要更新metadata了
res = mspDevice->createDefaultRequest(templateId, /*out*/ &mRequestTemplate);
//for rbg camera, open qcom faceDetect feature defaultily.
if (m_iCameraId == CAMERA_RGB_ID) {
uint8_t faceDetectType = TYPE_BYTE;
uint8_t faceDetectValue = ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE;
updateMetaData("android.statistics.faceDetectMode",faceDetectType,&faceDetectValue,1);
}
std::vector< ::android::hardware::camera2::CaptureRequest>requestList;
sp<Surface> previewSurface = new Surface(mspGbProducer, /*controlledByApp*/ true);
//preview only
camera2::CaptureRequest previewTargetRequest;
previewTargetRequest.mMetadata = requestMeta;
previewTargetRequest.mIsReprocess = false;
previewTargetRequest.mSurfaceList.add(previewSurface);
requestList.push_back(previewTargetRequest);
camera2::utils::SubmitInfo info;
//申請開啓預覽
res = mspDevice->submitRequestList(requestList, /*streaming*/ true, /*out*/ &info);
int32_t requestId = info.mRequestId;
int64_t lastFrameNumber = info.mLastFrameNumber;
return 0;
}
1.5 Metadata更新
在需要修改默認的MetaData時,就需要跟新CameraMetadata ,更新方法如下
int updateMetaData(CameraMetadata* metaData,char* key,uint8_t type,
void* data,int count){
status_t ret;
uint32_t tag = 0;
sp<VendorTagDescriptor> vTags;
sp<VendorTagDescriptorCache> cache = VendorTagDescriptorCache::getGlobalVendorTagCache();
if (cache.get()) {
const camera_metadata_t *metaBuffer = metaData->getAndLock();
metadata_vendor_id_t vendorId = get_camera_metadata_vendor_id(metaBuffer);
metaData->unlock(metaBuffer);
cache->getVendorTagDescriptor(vendorId, &vTags);
}
ret = CameraMetadata::getTagFromName(key,vTags.get(), &tag);
ret = metaData->update(tag,reinterpret_cast<const T*>(data),count);
return ret;
}
至此完成了直接和CameraSerive通信進行開發進行Native Camera開發簡單介紹。
2. 直接和CameraProvider通信進行開發進行Native Camera開發
待續。。。。