本文首先研究下HFR功能的基本流程,然後在此基礎上研究實現了在不開啓錄像的情況下實現HFR功能
API2 啓動HFR功能學習總結:
1、HFR 配置流
snapdragoncamera 配置HFR流
//hfr開啓且最大幀率大於NORMAL_SESSION_MAX_FPS(60fps)
//創建的是createConstrainedHighSpeedCaptureSession
//否則是createCaptureSession
if (mHighSpeedCapture && ((int) mHighSpeedFPSRange.getUpper() > NORMAL_SESSION_MAX_FPS)) {
//創建高速流
mCameraDevice[cameraId].createConstrainedHighSpeedCaptureSession(surfaces,..);
} else {
//普通流,雖然是普通流,但是該過程設置了setOpModeForVideoStream,會導致config->operation_mode變化。
mCameraDevice[cameraId].createCaptureSession(surfaces,mCCSSateCallback,null);
}
進入到CameraDeviceImpl
//frameworks\base\core\java\android\hardware\camera2\impl\CameraDeviceImpl.java
public void createConstrainedHighSpeedCaptureSession(List<Surface> outputs,
android.hardware.camera2.CameraCaptureSession.StateCallback callback, Handler handler)
throws CameraAccessException {
if (outputs == null || outputs.size() == 0 || outputs.size() > 2) {
throw new IllegalArgumentException(
"Output surface list must not be null and the size must be no more than 2");
}
//獲取設備屬性
StreamConfigurationMap config =
getCharacteristics().get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
//1、檢查格式是否正確
//2、檢查fps是否是有效範圍等
SurfaceUtils.checkConstrainedHighSpeedSurfaces(outputs, /*fpsRange*/null, config);
List<OutputConfiguration> outConfigurations = new ArrayList<>(outputs.size());
for (Surface surface : outputs) {
outConfigurations.add(new OutputConfiguration(surface));
}
//創建session
createCaptureSessionInternal(null, outConfigurations, callback, handler,
/*operatingMode*/ICameraDeviceUser.CONSTRAINED_HIGH_SPEED_MODE);
}
SurfaceUtils.checkConstrainedHighSpeedSurfaces(outputs, /*fpsRange*/null, config);
主要是功能是:
- 檢查格式是否正確
- 檢查fps是否是有效範圍等
代碼如下:
//frameworks\base\core\java\android\hardware\camera2\utils\SurfaceUtils.java
public static void checkConstrainedHighSpeedSurfaces(Collection<Surface> surfaces,
Range<Integer> fpsRange, StreamConfigurationMap config) {
//hfr 的輸出流只能是1個或2個
if (surfaces == null || surfaces.size() == 0 || surfaces.size() > 2) {
throw new IllegalArgumentException("Output target surface list must not be null and"
+ " the size must be 1 or 2");
}
//檢查hfr設置的幀率範圍是否有效
List<Size> highSpeedSizes = null;
if (fpsRange == null) {
highSpeedSizes = Arrays.asList(config.getHighSpeedVideoSizes());
} else {
// Check the FPS range first if provided
Range<Integer>[] highSpeedFpsRanges = config.getHighSpeedVideoFpsRanges();
if(!Arrays.asList(highSpeedFpsRanges).contains(fpsRange)) {
throw new IllegalArgumentException("Fps range " + fpsRange.toString() + " in the"
+ " request is not a supported high speed fps range " +
Arrays.toString(highSpeedFpsRanges));
}
highSpeedSizes = Arrays.asList(config.getHighSpeedVideoSizesFor(fpsRange));
}
for (Surface surface : surfaces) {
checkHighSpeedSurfaceFormat(surface);
// Surface size must be supported high speed sizes.
//檢查surface時候支持hfr設置的尺寸大小
Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
if (!highSpeedSizes.contains(surfaceSize)) {
throw new IllegalArgumentException("Surface size " + surfaceSize.toString() + " is"
+ " not part of the high speed supported size list " +
Arrays.toString(highSpeedSizes.toArray()));
}
// Each output surface must be either preview surface or recording surface.
//hfr的輸出流只能是預覽流或者是錄像編碼流
if (!SurfaceUtils.isSurfaceForPreview(surface) &&
!SurfaceUtils.isSurfaceForHwVideoEncoder(surface)) {
throw new IllegalArgumentException("This output surface is neither preview nor "
+ "hardware video encoding surface");
}
if (SurfaceUtils.isSurfaceForPreview(surface) &&
SurfaceUtils.isSurfaceForHwVideoEncoder(surface)) {
throw new IllegalArgumentException("This output surface can not be both preview"
+ " and hardware video encoding surface");
}
}
// For 2 output surface case, they shouldn't be same type.
//hfr 的2個流不能相同
if (surfaces.size() == 2) {
// Up to here, each surface can only be either preview or recording.
Iterator<Surface> iterator = surfaces.iterator();
boolean isFirstSurfacePreview =
SurfaceUtils.isSurfaceForPreview(iterator.next());
boolean isSecondSurfacePreview =
SurfaceUtils.isSurfaceForPreview(iterator.next());
if (isFirstSurfacePreview == isSecondSurfacePreview) {
throw new IllegalArgumentException("The 2 output surfaces must have different"
+ " type");
}
}
}
檢查是不是預覽流的方法是:
//frameworks\base\core\java\android\hardware\camera2\legacy\LegacyCameraDevice.java
//檢查是不是預覽流的方法
//usageFlags 不能是RALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_RENDERSCRIPT |GRALLOC_USAGE_SW_READ_OFTEN
//usageFlags 只能是GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER |GRALLOC_USAGE_HW_RENDER
//之一
public static boolean isPreviewConsumer(Surface output) {
int usageFlags = detectSurfaceUsageFlags(output);
int disallowedFlags = GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_RENDERSCRIPT |
GRALLOC_USAGE_SW_READ_OFTEN;
//GRALLOC_USAGE_HW_TEXTURE見hardware/libhardware/include/hardware/gralloc.h
int allowedFlags = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER |
GRALLOC_USAGE_HW_RENDER;
boolean previewConsumer = ((usageFlags & disallowedFlags) == 0 &&
(usageFlags & allowedFlags) != 0);
int surfaceFormat = ImageFormat.UNKNOWN;
try {
surfaceFormat = detectSurfaceType(output);
} catch(BufferQueueAbandonedException e) {
throw new IllegalArgumentException("Surface was abandoned", e);
}
return previewConsumer;
}
GRALLOC_USAGE_HW_TEXTURE
定義見hardware/libhardware/include/hardware/gralloc.h
檢查是不是錄像編碼流
//frameworks\base\core\java\android\hardware\camera2\legacy\LegacyCameraDevice.java
public static boolean isVideoEncoderConsumer(Surface output) {
int usageFlags = detectSurfaceUsageFlags(output);
int disallowedFlags = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER |
GRALLOC_USAGE_RENDERSCRIPT | GRALLOC_USAGE_SW_READ_OFTEN;
//usageFlags 只能是GRALLOC_USAGE_HW_VIDEO_ENCODER
int allowedFlags = GRALLOC_USAGE_HW_VIDEO_ENCODER;
boolean videoEncoderConsumer = ((usageFlags & disallowedFlags) == 0 &&
(usageFlags & allowedFlags) != 0);
int surfaceFormat = ImageFormat.UNKNOWN;
try {
surfaceFormat = detectSurfaceType(output);
} catch(BufferQueueAbandonedException e) {
throw new IllegalArgumentException("Surface was abandoned", e);
}
return videoEncoderConsumer;
}
繼續分析創建session createCaptureSessionInternal
//frameworks\base\core\java\android\hardware\camera2\impl\CameraDeviceImpl.java
private void createCaptureSessionInternal(InputConfiguration inputConfig,
List<OutputConfiguration> outputConfigurations,
CameraCaptureSession.StateCallback callback, Handler handler,
int operatingMode) throws CameraAccessException {
synchronized(mInterfaceLock) {
if (DEBUG) {
Log.d(TAG, "createCaptureSessionInternal");
}
checkIfCameraClosedOrInError();
//createConstrainedHighSpeedCaptureSession創建時,isConstrainedHighSpeed爲true
boolean isConstrainedHighSpeed =
(operatingMode == ICameraDeviceUser.CONSTRAINED_HIGH_SPEED_MODE);
if (isConstrainedHighSpeed && inputConfig != null) {
throw new IllegalArgumentException("Constrained high speed session doesn't support"
+ " input configuration yet.");
}
// Notify current session that it's going away, before starting camera operations
// After this call completes, the session is not allowed to call into CameraDeviceImpl
if (mCurrentSession != null) {
mCurrentSession.replaceSessionClose();
}
// TODO: dont block for this
boolean configureSuccess = true;
CameraAccessException pendingException = null;
Surface input = null;
try {
// configure streams and then block until IDLE
//開始配置高速流
configureSuccess = configureStreamsChecked(inputConfig, outputConfigurations,
operatingMode);
if (configureSuccess == true && inputConfig != null) {
input = mRemoteDevice.getInputSurface();
}
} catch (CameraAccessException e) {
configureSuccess = false;
pendingException = e;
input = null;
if (DEBUG) {
Log.v(TAG, "createCaptureSession - failed with exception ", e);
}
}
// Fire onConfigured if configureOutputs succeeded, fire onConfigureFailed otherwise.
//如果是isConstrainedHighSpeed成功,則通過onConfigured
//返回相機應用CameraConstrainedHighSpeedCaptureSessionImpl對象
//否則返回相機應用CameraCaptureSessionImpl
//可見,相機應用createConstrainedHighSpeedCaptureSession
//獲取的是CameraConstrainedHighSpeedCaptureSessionImpl對象
//createCaptureSession獲取的是CameraCaptureSessionImpl對象
CameraCaptureSessionCore newSession = null;
if (isConstrainedHighSpeed) {
newSession = new CameraConstrainedHighSpeedCaptureSessionImpl(mNextSessionId++,
callback, handler, this, mDeviceHandler, configureSuccess,
mCharacteristics);
} else {
newSession = new CameraCaptureSessionImpl(mNextSessionId++, input,
callback, handler, this, mDeviceHandler,
configureSuccess);
}
// TODO: wait until current session closes, then create the new session
mCurrentSession = newSession;
if (pendingException != null) {
throw pendingException;
}
mSessionStateCallback = mCurrentSession.getDeviceStateCallback();
}
}
繼續分析configureStreamsChecked
//frameworks\base\core\java\android\hardware\camera2\impl\CameraDeviceImpl.java
public boolean configureStreamsChecked(InputConfiguration inputConfig,
List<OutputConfiguration> outputs, int operatingMode)
throws CameraAccessException {
// Treat a null input the same an empty list
...
try {
waitUntilIdle();
// 開始配置
mRemoteDevice.beginConfigure();
...
// Delete all streams first (to free up HW resources)
for (Integer streamId : deleteList) {
//刪除流
mRemoteDevice.deleteStream(streamId);
mConfiguredOutputs.delete(streamId);
}
// Add all new streams
for (OutputConfiguration outConfig : outputs) {
if (addSet.contains(outConfig)) {
//創建流
int streamId = mRemoteDevice.createStream(outConfig);
mConfiguredOutputs.put(streamId, outConfig);
}
}
//customOpMode 可以通過setOpModeForVideoStream改變
//CameraConstrainedHighSpeedCaptureSessionImpl沒有改變該值
operatingMode = (operatingMode | (customOpMode << 16));
//結束配置流
//mRemoteDevice類型是ICameraDeviceUserWrapper
//是在在打開相機時獲取的。
mRemoteDevice.endConfigure(operatingMode);
success = true;
} catch (IllegalArgumentException e) {
....
}
}
.....
return success;
}
上述,mRemoteDevice對象是ICameraDeviceUserWrapper類型,實在打開相機是獲取的,代碼如下:
//frameworks\base\core\java\android\hardware\camera2\CameraManager.java
private CameraDevice openCameraDeviceUserAsync(String cameraId,
CameraDevice.StateCallback callback, Handler handler, final int uid)
throws CameraAccessException {
CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
CameraDevice device = null;
synchronized (mLock) {
ICameraDeviceUser cameraUser = null;
//創建CameraDeviceImpl對象
android.hardware.camera2.impl.CameraDeviceImpl deviceImpl =
new android.hardware.camera2.impl.CameraDeviceImpl(
cameraId,
callback,
handler,
characteristics,
mContext.getApplicationInfo().targetSdkVersion);
ICameraDeviceCallbacks callbacks = deviceImpl.getCallbacks();
try {
if (supportsCamera2ApiLocked(cameraId)) {
// Use cameraservice's cameradeviceclient implementation for HAL3.2+ devices
//獲取cameraService代理對象
ICameraService cameraService = CameraManagerGlobal.get().getCameraService();
//通過cameraService代理對象打開相機,獲取ICameraDeviceUser cameraUser對象
cameraUser = cameraService.connectDevice(callbacks, cameraId,
mContext.getOpPackageName(), uid);
} else {
// Use legacy camera implementation for HAL1 devices
...
}
} catch (ServiceSpecificException e) {
...
}
// TODO: factor out callback to be non-nested, then move setter to constructor
// For now, calling setRemoteDevice will fire initial
// onOpened/onUnconfigured callbacks.
// This function call may post onDisconnected and throw CAMERA_DISCONNECTED if
// cameraUser dies during setup.
//將打開相機獲取的cameraUser對象設置到CameraDeviceImpl deviceImpl對象中
deviceImpl.setRemoteDevice(cameraUser);
device = deviceImpl;
}
//返回CameraDeviceImpl對象deviceImpl
return device;
}
接續分析configureStreamsChecked,其主要分爲3步:
- mRemoteDevice.beginConfigure();//開始配置
- mRemoteDevice.deleteStream(streamId)和mRemoteDevice.createStream(outConfig)//創建、刪除流
- mRemoteDevice.endConfigure(operatingMode);//結束配置,前兩步是準備工作,真是配置流的地方
主要分析下endConfigure
//frameworks\base\core\java\android\hardware\camera2\impl\ICameraDeviceUserWrapper.java
public void endConfigure(int operatingMode) throws CameraAccessException {
try {
mRemoteDevice.endConfigure(operatingMode);//Binder IPC
} catch (Throwable t) {
CameraManager.throwAsPublicException(t);
throw new UnsupportedOperationException("Unexpected exception", t);
}
}
**mRemoteDevice.endConfigure(operatingMode)
進過Binder IPC
進入cameraservice
中的CameraDeviceClient.cpp
**類。
//frameworks\av\services\camera\libcameraservice\api2\CameraDeviceClient.cpp
binder::Status CameraDeviceClient::endConfigure(int operatingMode) {
ATRACE_CALL();
ALOGE("%s: ending configure (%d input stream, %zu output surfaces)",
__FUNCTION__, mInputStream.configured ? 1 : 0,
mStreamMap.size());
.....
// Sanitize the high speed session against necessary capability bit.
//檢查是不是CONSTRAINED_HIGH_SPEED_MODE模式
bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
//檢查是否支持CONSTRAINED_HIGH_SPEED_MODE
if (isConstrainedHighSpeed) {
CameraMetadata staticInfo = mDevice->info();
camera_metadata_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
bool isConstrainedHighSpeedSupported = false;
for(size_t i = 0; i < entry.count; ++i) {
uint8_t capability = entry.data.u8[i];
if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
isConstrainedHighSpeedSupported = true;
break;
}
}
if (!isConstrainedHighSpeedSupported) {
String8 msg = String8::format(
"Camera %s: Try to create a constrained high speed configuration on a device"
" that doesn't support it.", mCameraIdStr.string());
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
msg.string());
}
}
//檢查通過後開始配置流
status_t err = mDevice->configureStreams(operatingMode);
....
return res;
}
進入 Camera3Device::configureStreamsLocked()
//frameworks\av\services\camera\libcameraservice\device3\Camera3Device.cpp
status_t Camera3Device::configureStreamsLocked(int operatingMode) {
ATRACE_CALL();
status_t res;
...
// 檢查是否是isConstrainedHighSpeed模式
bool isConstrainedHighSpeed =
static_cast<int>(StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE) ==
operatingMode;
...
// Start configuring the streams
//開始配置流
ALOGE("%s: Camera %s: mytest Starting stream configuration operatingMode = %0x,isConstrainedHighSpeed =%d", __FUNCTION__, mId.string(),operatingMode,isConstrainedHighSpeed);
camera3_stream_configuration config;
config.operation_mode = mOperatingMode;/將mOperatingMode賦值給config.operation_mode
config.num_streams = (mInputStream != NULL) + mOutputStreams.size();
Vector<camera3_stream_t*> streams;
streams.setCapacity(config.num_streams);
//輸入流配置,從來沒搞過輸入流,需要以後研究下
if (mInputStream != NULL) {
camera3_stream_t *inputStream;
inputStream = mInputStream->startConfiguration();
if (inputStream == NULL) {
CLOGE("Can't start input stream configuration");
cancelStreamsConfigurationLocked();
return INVALID_OPERATION;
}
streams.add(inputStream);
}
//輸出流配置
for (size_t i = 0; i < mOutputStreams.size(); i++) {
// Don't configure bidi streams twice, nor add them twice to the list
if (mOutputStreams[i].get() ==
static_cast<Camera3StreamInterface*>(mInputStream.get())) {
config.num_streams--;
continue;
}
camera3_stream_t *outputStream;
outputStream = mOutputStreams.editValueAt(i)->startConfiguration();
if (outputStream == NULL) {
CLOGE("Can't start output stream configuration");
cancelStreamsConfigurationLocked();
return INVALID_OPERATION;
}
streams.add(outputStream);
}
config.streams = streams.editArray();
// Do the HAL configuration; will potentially touch stream
// max_buffers, usage, priv fields.
//開始通知HAL層配置流
res = mInterface->configureStreams(&config);
....
return OK;
}
最總配置流的相關信息如下:
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:855 configure_streams() Number of streams: 2
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:871 configure_streams() stream[0] = 0xe5fad694 - info:
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:874 configure_streams() format : 34, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:876 configure_streams() width : 640
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:878 configure_streams() height : 480
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:881 configure_streams() stream_type : 00000000, CAMERA3_STREAM_OUTPUT
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:883 configure_streams() usage : 00000900
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:885 configure_streams() max_buffers : 0
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:888 configure_streams() rotation : 00000000, CAMERA3_STREAM_ROTATION_0
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:891 configure_streams() data_space : 00000000, HAL_DATASPACE_UNKNOWN
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:893 configure_streams() priv : 0x0
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:871 configure_streams() stream[1] = 0xe5fad6f4 - info:
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:874 configure_streams() format : 34, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:876 configure_streams() width : 640
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:878 configure_streams() height : 480
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:881 configure_streams() stream_type : 00000000, CAMERA3_STREAM_OUTPUT
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:883 configure_streams() usage : 00010000
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:885 configure_streams() max_buffers : 0
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:888 configure_streams() rotation : 00000000, CAMERA3_STREAM_ROTATION_0
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:891 configure_streams() data_space : 00000103, HAL_DATASPACE_BT601_525
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:893 configure_streams() priv : 0x0
06-22 12:33:54.059 586 1032 I CamX : [ INFO][HAL ] camxhal3.cpp:901 configure_streams() operation_mode: 1
總結:
HFR配置流時做的限制有:
- 通過createConstrainedHighSpeedCaptureSession配置高速流
- 只能配置一個或者兩個流,一個預覽流,一個是拍照流
- 對預覽流的限制是usage爲
GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_RENDER
- 對錄像流的限制是usage爲
GRALLOC_USAGE_HW_VIDEO_ENCODER
2、HFR配置流時,surface的準備
在createConstrainedHighSpeedCaptureSession時,需要準備1-2個surface。其必須滿足HFR配置流的限制。
snapdragonCaemra 爲HFR提供的兩個surface的方法是:
- 預覽流的surface是通過surfaceview獲取的
- 編碼流的surface是通過
MediaRecorder
的querySurfaceMediaSourceFromMediaServer
獲取的
MediaRecorder獲取surface的流程會單獨在“mediarecorder啓動流程學習總結”這個篇文章中總結。
本文研究的主要的是,如何在不依賴surfaceview
和MediaRecorder
的情況下,在Native層實現HFR的配置
2.1 預覽流surface中準備
//參考代碼見frameworks\av\media\libstagefright\omx\GraphicBufferSource.cpp
void configPreviewSurface()
{
ALOGD("%s,%d E .", __FUNCTION__,__LINE__);
// Setup a buffer queue
BufferQueue::createBufferQueue(&mspGbProducer, &mspGbConsumer);
mPreviewSurface = new Surface(mspGbProducer);
String8 consumerName = String8::format("ImageReader-%dx%df%xm%d-%d",
m_iWidth, m_iHeight, m_iFormat, MAX_BUFFER_NUM, getpid());
mspGbConsumer->setConsumerName(consumerName);
// Set default size and format
mspGbConsumer->setDefaultBufferSize(640, 480);
mspGbConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_YCbCr_420_888);
uint32_t consumerUsage;
consumerUsage |= GRALLOC_USAGE_HW_TEXTURE|GRALLOC_USAGE_HW_COMPOSER;
mspGbConsumer->setConsumerUsageBits(consumerUsage);
int32_t dataSpace= 0;
ALOGD("setting dataspace: %#x, acquired=%d", dataSpace, mNumOutstandingAcquires);
mspGbConsumer->setDefaultBufferDataSpace((android_dataspace)dataSpace);
wp<BufferQueue::ConsumerListener> listener =
static_cast<BufferQueue::ConsumerListener*>(this);
sp<IConsumerListener> proxy =
new BufferQueue::ProxyConsumerListener(listener);
mInitCheck = mspGbConsumer->consumerConnect(proxy, false);
ALOGD("%s,%d X .", __FUNCTION__,__LINE__);
}
2.2 錄像流surface中準備
//參考代碼見frameworks\av\media\libstagefright\omx\GraphicBufferSource.cpp
void configVideoSurface()
{
ALOGD("%s,%d E .", __FUNCTION__,__LINE__);
// Setup a buffer queue
BufferQueue::createBufferQueue(&mspGbProducer, &mspGbConsumer);
mVideoSurface = new Surface(mspGbProducer);
String8 consumerName = String8::format("ImageReader-%dx%df%xm%d-%d",
m_iWidth, m_iHeight, m_iFormat, MAX_BUFFER_NUM, getpid());
mspGbConsumer->setConsumerName(consumerName);
// Set default size and format
mspGbConsumer->setDefaultBufferSize(640, 480);
mspGbConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_YCbCr_420_888);
uint32_t consumerUsage;
consumerUsage |= GRALLOC_USAGE_HW_VIDEO_ENCODER;
mspGbConsumer->setConsumerUsageBits(consumerUsage);
int32_t dataSpace= HAL_DATASPACE_BT601_525;
ALOGD("setting dataspace: %#x, acquired=%d", dataSpace, mNumOutstandingAcquires);
mspGbConsumer->setDefaultBufferDataSpace((android_dataspace)dataSpace);
wp<BufferQueue::ConsumerListener> listener =
static_cast<BufferQueue::ConsumerListener*>(this);
sp<IConsumerListener> proxy =
new BufferQueue::ProxyConsumerListener(listener);
mInitCheck = mspGbConsumer->consumerConnect(proxy, false);
ALOGD("%s,%d X .", __FUNCTION__,__LINE__);
}
通過上述兩步即完成了預覽和錄像流的準備工作,而且實現了HFR和surfaceview和MediaRecorder的解耦
3、HFR預覽和錄像數據申請
配置完成高速流,就可以申請預覽和錄像數據,
HFR 預覽流的幀率和錄像流的幀率是成倍關係,倍數被稱爲batchSize,其相關信息是在HAL層做的定義:
//vendor\qcom\proprietary\camx\src\hwl\titan17x/camxtitan17xcontext.cpp
CamxResult Titan17xContext::GetStaticCaps(
PlatformStaticCaps* pCaps)
{
CamxResult result = CamxResultSuccess;
CSLCameraPlatform CSLPlatform = {};
UINT32 size = 0;
UINT32 numIPEs = 2;
UINT32 HFRPreviewFPS = 30;
// Initialize platform specific static capabilities
...
CAMX_LOG_INFO(CamxLogGroupHWL, "HFR Preview FPS %d", HFRPreviewFPS);
// Availiable HFR configurations
pCaps->numDefaultHFRVideoSizes = NumSupportedHFRVideoSizes;
for (UINT8 i = 0; i < pCaps->numDefaultHFRVideoSizes; i++)
{
pCaps->defaultHFRVideoSizes[i].width = SupportedHFRVideoSizes[i].width;
pCaps->defaultHFRVideoSizes[i].height = SupportedHFRVideoSizes[i].height;
pCaps->defaultHFRVideoSizes[i].minFPS = SupportedHFRVideoSizes[i].minFPS;
pCaps->defaultHFRVideoSizes[i].maxFPS = SupportedHFRVideoSizes[i].maxFPS;
pCaps->defaultHFRVideoSizes[i].batchSizeMax = SupportedHFRVideoSizes[i].maxFPS / HFRPreviewFPS;
}
...
return result;
}
通過上邊的信息可以得出的結論是:
HFR(high frame rate:高幀率)高的其實是錄像流的幀率,對預覽流的幀率並沒啥影響。
爲實現錄像流是預覽流的batchSize倍數,cameraAPI2 的實現方案是:
通過調用setRepeatingBurst
(java層的方法,對應的native方法是submitRequestList
)每次同時提交多個request申請。代碼如下:
camera2::CaptureRequest singleTargetRequest;//recoding
singleTargetRequest.mMetadata = mRequestTemplate;
//request.mSurfaceList.add(mspSurface);
singleTargetRequest.mSurfaceList.add(videoSurface);
singleTargetRequest.mIsReprocess = false;
camera2::CaptureRequest doubleTargetRequest;//recording+preview
doubleTargetRequest.mMetadata = mRequestTemplate;
doubleTargetRequest.mSurfaceList.add(mspSurface);
doubleTargetRequest.mSurfaceList.add(videoSurface);
doubleTargetRequest.mIsReprocess = false;
// 我做的demo preview爲 30fps,videorecording 爲120fps,batchsize 是4,即一次submitRequestList
//需要提交4個request,一個preview+recording,三個recording
const ::std::vector< ::android::hardware::camera2::CaptureRequest>requestList{
doubleTargetRequestBuilder/*first Request must be doubleTargetRequest*/,
singleTargetRequestBuilder,singleTargetRequestBuilder,singleTargetRequestBuilder};
camera2::utils::SubmitInfo info;
res = mspDevice->submitRequestList(requestList, /*streaming*/ true, /*out*/ &info);
至此HFR學習總結完成