前文回顾
上一篇文章,简单讲述initialize的过程
一、 configure_streams 流程
1.1 原文解读
* 4. The framework calls camera3_device_t->ops->configure_streams() with a list
* of input/output streams to the HAL device.
*
* 5. <= CAMERA_DEVICE_API_VERSION_3_1:
*
* The framework allocates gralloc buffers and calls
* camera3_device_t->ops->register_stream_buffers() for at least one of the
* output streams listed in configure_streams. The same stream is registered
* only once.
*
* >= CAMERA_DEVICE_API_VERSION_3_2:
*
* camera3_device_t->ops->register_stream_buffers() is not called and must
* be NULL.4、配流:framework调用camera_device方法ops调用camera3_device_ops的configure_streams方法配流,camera3_device_t-> ops-> configure_streams(),并把input stream&output stream 的列表作为参数送到Hal层。
5、注册流buffer:API3.1->framework分配gralloc buffer和在configure_streams中,调用camera_device方法ops调用camera3_device_ops的register_stream_buffers方法注册stream buffer,camera3_device_t-> ops-> register_stream_buffers()至少有一个输出流。 同一流仅注册一次。API3.2-> 没有调用camera3_device_t-> ops-> register_stream_buffers(),并且必须为NULL。
1.2 官网文档
《80-pc212-1_a_chi_api_specifications_for_qualcomm_spectra_2xx_camera.pdf》简单介绍configure stream的过程
1.3 代码分析
配流uml内容来自Android : Camera之camx hal架构 - sheldon_blogs - 博客园
vendor/qcom/proprietary/camx/src/core/hal/camxhal3entry.cpp
int configure_streams(
const struct camera3_device* pCamera3DeviceAPI,
camera3_stream_configuration_t* pStreamConfigsAPI)
{
JumpTableHAL3* pHAL3 = static_cast<JumpTableHAL3*>(g_dispatchHAL3.GetJumpTable());
return pHAL3->configure_streams(pCamera3DeviceAPI, pStreamConfigsAPI);
}
(1)camxhal3entry.cpp这块 ops都是用JumpTable来获取到 camxhal3.cpp 中的JumpTableHAL3的跳转,configure_streams也不例外
vendor/qcom/proprietary/camx/src/core/hal/camxhal3.cpp
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// configure_streams
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
static int configure_streams(
const struct camera3_device* pCamera3DeviceAPI,
camera3_stream_configuration_t* pStreamConfigsAPI)
{
CAMX_ENTRYEXIT_SCOPE(CamxLogGroupHAL, SCOPEEventHAL3ConfigureStreams);
CamxResult result = CamxResultSuccess;
CAMX_ASSERT(NULL != pCamera3DeviceAPI);
CAMX_ASSERT(NULL != pCamera3DeviceAPI->priv);
CAMX_ASSERT(NULL != pStreamConfigsAPI);
CAMX_ASSERT(pStreamConfigsAPI->num_streams > 0);
CAMX_ASSERT(NULL != pStreamConfigsAPI->streams);
if ((NULL != pCamera3DeviceAPI) &&
(NULL != pCamera3DeviceAPI->priv) &&
(NULL != pStreamConfigsAPI) &&
(pStreamConfigsAPI->num_streams > 0) &&
(NULL != pStreamConfigsAPI->streams))
{
CAMX_LOG_INFO(CamxLogGroupHAL, "Number of streams: %d", pStreamConfigsAPI->num_streams);
HALDevice* pHALDevice = GetHALDevice(pCamera3DeviceAPI);
CAMX_LOG_CONFIG(CamxLogGroupHAL, "HalOp: Begin CONFIG: %p, logicalCameraId: %d, cameraId: %d",
pCamera3DeviceAPI, pHALDevice->GetCameraId(), pHALDevice->GetFwCameraId());
for (UINT32 stream = 0; stream < pStreamConfigsAPI->num_streams; stream++)
{
CAMX_ASSERT(NULL != pStreamConfigsAPI->streams[stream]);
if (NULL == pStreamConfigsAPI->streams[stream])
{
CAMX_LOG_ERROR(CamxLogGroupHAL, "Invalid argument 2 for configure_streams()");
// HAL interface requires -EINVAL (EInvalidArg) for invalid arguments
result = CamxResultEInvalidArg;
break;
}
else
{
CAMX_LOG_INFO(CamxLogGroupHAL, " stream[%d] = %p - info:", stream,
pStreamConfigsAPI->streams[stream]);
CAMX_LOG_INFO(CamxLogGroupHAL, " format : %d, %s",
pStreamConfigsAPI->streams[stream]->format,
FormatToString(pStreamConfigsAPI->streams[stream]->format));
CAMX_LOG_INFO(CamxLogGroupHAL, " width : %d",
pStreamConfigsAPI->streams[stream]->width);
CAMX_LOG_INFO(CamxLogGroupHAL, " height : %d",
pStreamConfigsAPI->streams[stream]->height);
CAMX_LOG_INFO(CamxLogGroupHAL, " stream_type : %08x, %s",
pStreamConfigsAPI->streams[stream]->stream_type,
StreamTypeToString(pStreamConfigsAPI->streams[stream]->stream_type));
CAMX_LOG_INFO(CamxLogGroupHAL, " usage : %08x",
pStreamConfigsAPI->streams[stream]->usage);
CAMX_LOG_INFO(CamxLogGroupHAL, " max_buffers : %d",
pStreamConfigsAPI->streams[stream]->max_buffers);
CAMX_LOG_INFO(CamxLogGroupHAL, " rotation : %08x, %s",
pStreamConfigsAPI->streams[stream]->rotation,
RotationToString(pStreamConfigsAPI->streams[stream]->rotation));
CAMX_LOG_INFO(CamxLogGroupHAL, " data_space : %08x, %s",
pStreamConfigsAPI->streams[stream]->data_space,
DataSpaceToString(pStreamConfigsAPI->streams[stream]->data_space));
CAMX_LOG_INFO(CamxLogGroupHAL, " priv : %p",
pStreamConfigsAPI->streams[stream]->priv);
#if (defined(CAMX_ANDROID_API) && (CAMX_ANDROID_API >= 28)) // Android-P or better
CAMX_LOG_INFO(CamxLogGroupHAL, " physical_camera_id : %s",
pStreamConfigsAPI->streams[stream]->physical_camera_id);
#endif // Android-P or better
pStreamConfigsAPI->streams[stream]->reserved[0] = NULL;
pStreamConfigsAPI->streams[stream]->reserved[1] = NULL;
}
}
CAMX_LOG_INFO(CamxLogGroupHAL, " operation_mode: %d", pStreamConfigsAPI->operation_mode);
Camera3StreamConfig* pStreamConfigs = reinterpret_cast<Camera3StreamConfig*>(pStreamConfigsAPI);
result = pHALDevice->ConfigureStreams(pStreamConfigs);
if ((CamxResultSuccess != result) && (CamxResultEInvalidArg != result))
{
// HAL interface requires -ENODEV (EFailed) if a fatal error occurs
result = CamxResultEFailed;
}
if (CamxResultSuccess == result)
{
for (UINT32 stream = 0; stream < pStreamConfigsAPI->num_streams; stream++)
{
CAMX_ASSERT(NULL != pStreamConfigsAPI->streams[stream]);
if (NULL == pStreamConfigsAPI->streams[stream])
{
CAMX_LOG_ERROR(CamxLogGroupHAL, "Invalid argument 2 for configure_streams()");
// HAL interface requires -EINVAL (EInvalidArg) for invalid arguments
result = CamxResultEInvalidArg;
break;
}
else
{
CAMX_LOG_CONFIG(CamxLogGroupHAL, " FINAL stream[%d] = %p - info:", stream,
pStreamConfigsAPI->streams[stream]);
CAMX_LOG_CONFIG(CamxLogGroupHAL, " format : %d, %s",
pStreamConfigsAPI->streams[stream]->format,
FormatToString(pStreamConfigsAPI->streams[stream]->format));
CAMX_LOG_CONFIG(CamxLogGroupHAL, " width : %d",
pStreamConfigsAPI->streams[stream]->width);
CAMX_LOG_CONFIG(CamxLogGroupHAL, " height : %d",
pStreamConfigsAPI->streams[stream]->height);
CAMX_LOG_CONFIG(CamxLogGroupHAL, " stream_type : %08x, %s",
pStreamConfigsAPI->streams[stream]->stream_type,
StreamTypeToString(pStreamConfigsAPI->streams[stream]->stream_type));
CAMX_LOG_CONFIG(CamxLogGroupHAL, " usage : %08x",
pStreamConfigsAPI->streams[stream]->usage);
CAMX_LOG_CONFIG(CamxLogGroupHAL, " max_buffers : %d",
pStreamConfigsAPI->streams[stream]->max_buffers);
CAMX_LOG_CONFIG(CamxLogGroupHAL, " rotation : %08x, %s",
pStreamConfigsAPI->streams[stream]->rotation,
RotationToString(pStreamConfigsAPI->streams[stream]->rotation));
CAMX_LOG_CONFIG(CamxLogGroupHAL, " data_space : %08x, %s",
pStreamConfigsAPI->streams[stream]->data_space,
DataSpaceToString(pStreamConfigsAPI->streams[stream]->data_space));
CAMX_LOG_CONFIG(CamxLogGroupHAL, " priv : %p",
pStreamConfigsAPI->streams[stream]->priv);
CAMX_LOG_CONFIG(CamxLogGroupHAL, " reserved[0] : %p",
pStreamConfigsAPI->streams[stream]->reserved[0]);
CAMX_LOG_CONFIG(CamxLogGroupHAL, " reserved[1] : %p",
pStreamConfigsAPI->streams[stream]->reserved[1]);
Camera3HalStream* pHalStream =
reinterpret_cast<Camera3HalStream*>(pStreamConfigsAPI->streams[stream]->reserved[0]);
if (pHalStream != NULL)
{
if (TRUE == HwEnvironment::GetInstance()->GetStaticSettings()->enableHALFormatOverride)
{
pStreamConfigsAPI->streams[stream]->format =
static_cast<HALPixelFormat>(pHalStream->overrideFormat);
}
CAMX_LOG_CONFIG(CamxLogGroupHAL,
" pHalStream: %p format : 0x%x, overrideFormat : 0x%x consumer usage: %llx, producer usage: %llx",
pHalStream, pStreamConfigsAPI->streams[stream]->format,
pHalStream->overrideFormat, pHalStream->consumerUsage, pHalStream->producerUsage);
}
}
}
}
CAMX_LOG_CONFIG(CamxLogGroupHAL, "HalOp: End CONFIG, logicalCameraId: %d, cameraId: %d",
pHALDevice->GetCameraId(), pHALDevice->GetFwCameraId());
}
else
{
CAMX_LOG_ERROR(CamxLogGroupHAL, "Invalid argument(s) for configure_streams()");
// HAL interface requires -EINVAL (EInvalidArg) for invalid arguments
result = CamxResultEInvalidArg;
}
return Utils::CamxResultToErrno(result);
}
(1)根据pCamera3DeviceAPI 获取 HalDevice的指针
(2)去配流前后,都打印流的结构体
vendor/qcom/proprietary/camx/src/core/hal/camxhaldevice.cpp
CamxResult HALDevice::ConfigureStreams(
Camera3StreamConfig* pStreamConfigs)
{
CamxResult result = CamxResultSuccess;
// Validate the incoming stream configurations
result = CheckValidStreamConfig(pStreamConfigs);
if ((StreamConfigModeConstrainedHighSpeed == pStreamConfigs->operationMode) ||
(StreamConfigModeSuperSlowMotionFRC == pStreamConfigs->operationMode))
{
SearchNumBatchedFrames (pStreamConfigs, &m_usecaseNumBatchedFrames, &m_FPSValue);
CAMX_ASSERT(m_usecaseNumBatchedFrames > 1);
}
else
{
// Not a HFR usecase batch frames value need to set to 1.
m_usecaseNumBatchedFrames = 1;
}
if (CamxResultSuccess == result)
{
ClearFrameworkRequestBuffer();
m_numPipelines = 0;
if (TRUE == m_bCHIModuleInitialized)
{
GetCHIAppCallbacks()->chi_teardown_override_session(reinterpret_cast<camera3_device*>(&m_camera3Device), 0, NULL);
}
m_bCHIModuleInitialized = CHIModuleInitialize(pStreamConfigs);
if (FALSE == m_bCHIModuleInitialized)
{
CAMX_LOG_ERROR(CamxLogGroupHAL, "CHI Module failed to configure streams");
result = CamxResultEFailed;
}
else
{
CAMX_LOG_VERBOSE(CamxLogGroupHAL, "CHI Module configured streams ... CHI is in control!");
}
}
return result;
}
(1)如果之前有过配流的操作,m_bCHIModuleInitialized会被赋值,然后销毁 session的操作
(2)同文件下调用CHIModuleInitialize函数操作,然后m_bCHIModuleInitialized赋值
BOOL HALDevice::CHIModuleInitialize(
Camera3StreamConfig* pStreamConfigs)
{
BOOL isOverrideEnabled = FALSE;
if (TRUE == HAL3Module::GetInstance()->IsCHIOverrideModulePresent())
{
/// @todo (CAMX-1518) Handle private data from Override module
VOID* pPrivateData;
chi_hal_callback_ops_t* pCHIAppCallbacks = GetCHIAppCallbacks();
pCHIAppCallbacks->chi_initialize_override_session(GetCameraId(),
reinterpret_cast<const camera3_device_t*>(&m_camera3Device),
&m_HALCallbacks,
reinterpret_cast<camera3_stream_configuration_t*>(pStreamConfigs),
&isOverrideEnabled,
&pPrivateData);
}
return isOverrideEnabled;
}
vendor/qcom/proprietary/chi-cdk/vendor/chioverride/default/chxextensioninterface.cpp
static CDKResult chi_initialize_override_session(
uint32_t cameraId,
const camera3_device_t* camera3_device,
const chi_hal_ops_t* chiHalOps,
camera3_stream_configuration_t* stream_config,
int* override_config,
void** priv)
{
ExtensionModule* pExtensionModule = ExtensionModule::GetInstance();
pExtensionModule->InitializeOverrideSession(cameraId, camera3_device, chiHalOps, stream_config, override_config, priv);
return CDKResultSuccess;
}
vendor/qcom/proprietary/chi-cdk/vendor/chioverride/default/chxextensionmodule.cpp
CDKResult ExtensionModule::InitializeOverrideSession(
uint32_t logicalCameraId,
const camera3_device_t* pCamera3Device,
const chi_hal_ops_t* chiHalOps,
camera3_stream_configuration_t* pStreamConfig,
int* pIsOverrideEnabled,
VOID** pPrivate)
{
CDKResult result = CDKResultSuccess;
UINT32 modeCount = 0;
ChiSensorModeInfo* pAllModes = NULL;
UINT32 fps = *m_pDefaultMaxFPS;
BOOL isVideoMode = FALSE;
uint32_t operation_mode;
static BOOL fovcModeCheck = EnableFOVCUseCase();
UsecaseId selectedUsecaseId = UsecaseId::NoMatch;
UINT minSessionFps = 0;
UINT maxSessionFps = 0;
*pPrivate = NULL;
*pIsOverrideEnabled = FALSE;
m_aFlushInProgress = FALSE;
m_firstResult = FALSE;
m_hasFlushOccurred = FALSE;
if (NULL == m_hCHIContext)
{
m_hCHIContext = g_chiContextOps.pOpenContext();
}
ChiVendorTagsOps vendorTagOps = { 0 };
g_chiContextOps.pTagOps(&vendorTagOps);
operation_mode = pStreamConfig->operation_mode >> 16;
operation_mode = operation_mode & 0x000F;
pStreamConfig->operation_mode = pStreamConfig->operation_mode & 0xFFFF;
for (UINT32 stream = 0; stream < pStreamConfig->num_streams; stream++)
{
if (0 != (pStreamConfig->streams[stream]->usage & GrallocUsageHwVideoEncoder))
{
isVideoMode = TRUE;
break;
}
}
if ((isVideoMode == TRUE) && (operation_mode != 0))
{
UINT32 numSensorModes = m_logicalCameraInfo[logicalCameraId].m_cameraCaps.numSensorModes;
CHISENSORMODEINFO* pAllSensorModes = m_logicalCameraInfo[logicalCameraId].pSensorModeInfo;
if ((operation_mode - 1) >= numSensorModes)
{
result = CDKResultEOverflow;
CHX_LOG_ERROR("operation_mode: %d, numSensorModes: %d", operation_mode, numSensorModes);
}
else
{
fps = pAllSensorModes[operation_mode - 1].frameRate;
}
}
m_pResourcesUsedLock->Lock();
if (m_totalResourceBudget > CostOfAnyCurrentlyOpenLogicalCameras())
{
UINT32 myLogicalCamCost = CostOfLogicalCamera(logicalCameraId, pStreamConfig);
if (myLogicalCamCost > (m_totalResourceBudget - CostOfAnyCurrentlyOpenLogicalCameras()))
{
CHX_LOG_ERROR("Insufficient HW resources! myLogicalCamCost = %d, remaining cost = %d",
myLogicalCamCost, (m_totalResourceBudget - CostOfAnyCurrentlyOpenLogicalCameras()));
result = CamxResultEResource;
}
else
{
m_IFEResourceCost[logicalCameraId] = myLogicalCamCost;
m_resourcesUsed += myLogicalCamCost;
}
}
else
{
CHX_LOG_ERROR("Insufficient HW resources! TotalResourceCost = %d, CostOfAnyCurrentlyOpencamera = %d",
m_totalResourceBudget, CostOfAnyCurrentlyOpenLogicalCameras());
result = CamxResultEResource;
}
m_pResourcesUsedLock->Unlock();
if (CDKResultSuccess == result)
{
#if defined(CAMX_ANDROID_API) && (CAMX_ANDROID_API >= 28) //Android-P or better
camera_metadata_t *metadata = const_cast<camera_metadata_t*>(pStreamConfig->session_parameters);
camera_metadata_entry_t entry = { 0 };
entry.tag = ANDROID_CONTROL_AE_TARGET_FPS_RANGE;
// The client may choose to send NULL sesssion parameter, which is fine. For example, torch mode
// will have NULL session param.
if (metadata != NULL)
{
int ret = find_camera_metadata_entry(metadata, entry.tag, &entry);
if(ret == 0) {
minSessionFps = entry.data.i32[0];
maxSessionFps = entry.data.i32[1];
m_usecaseMaxFPS = maxSessionFps;
}
}
#endif
CHIHANDLE staticMetaDataHandle = const_cast<camera_metadata_t*>(
m_logicalCameraInfo[logicalCameraId].m_cameraInfo.static_camera_characteristics);
UINT32 metaTagPreviewFPS = 0;
UINT32 metaTagVideoFPS = 0;
CHITAGSOPS vendorTagOps;
m_previewFPS = 0;
m_videoFPS = 0;
GetInstance()->GetVendorTagOps(&vendorTagOps);
result = vendorTagOps.pQueryVendorTagLocation("org.quic.camera2.streamBasedFPS.info", "PreviewFPS",
&metaTagPreviewFPS);
if (CDKResultSuccess == result)
{
vendorTagOps.pGetMetaData(staticMetaDataHandle, metaTagPreviewFPS, &m_previewFPS,
sizeof(m_previewFPS));
}
result = vendorTagOps.pQueryVendorTagLocation("org.quic.camera2.streamBasedFPS.info", "VideoFPS", &metaTagVideoFPS);
if (CDKResultSuccess == result)
{
vendorTagOps.pGetMetaData(staticMetaDataHandle, metaTagVideoFPS, &m_videoFPS,
sizeof(m_videoFPS));
}
if ((StreamConfigModeConstrainedHighSpeed == pStreamConfig->operation_mode) ||
(StreamConfigModeSuperSlowMotionFRC == pStreamConfig->operation_mode))
{
SearchNumBatchedFrames(logicalCameraId, pStreamConfig,
&m_usecaseNumBatchedFrames, &m_usecaseMaxFPS, maxSessionFps);
if (480 > m_usecaseMaxFPS)
{
m_CurrentpowerHint = PERF_LOCK_POWER_HINT_VIDEO_ENCODE_HFR;
}
else
{
// For 480FPS or higher, require more aggresive power hint
m_CurrentpowerHint = PERF_LOCK_POWER_HINT_VIDEO_ENCODE_HFR_480FPS;
}
}
else
{
// Not a HFR usecase, batch frames value need to be set to 1.
m_usecaseNumBatchedFrames = 1;
if (maxSessionFps == 0)
{
m_usecaseMaxFPS = fps;
}
if (TRUE == isVideoMode)
{
if (30 >= m_usecaseMaxFPS)
{
m_CurrentpowerHint = PERF_LOCK_POWER_HINT_VIDEO_ENCODE;
}
else
{
m_CurrentpowerHint = PERF_LOCK_POWER_HINT_VIDEO_ENCODE_60FPS;
}
}
else
{
m_CurrentpowerHint = PERF_LOCK_POWER_HINT_PREVIEW;
}
}
if ((NULL != m_pPerfLockManager[logicalCameraId]) && (m_CurrentpowerHint != m_previousPowerHint))
{
m_pPerfLockManager[logicalCameraId]->ReleasePerfLock(m_previousPowerHint);
}
// Example [B == batch]: (240 FPS / 4 FPB = 60 BPS) / 30 FPS (Stats frequency goal) = 2 BPF i.e. skip every other stats
*m_pStatsSkipPattern = m_usecaseMaxFPS / m_usecaseNumBatchedFrames / 30;
if (*m_pStatsSkipPattern < 1)
{
*m_pStatsSkipPattern = 1;
}
m_VideoHDRMode = (StreamConfigModeVideoHdr == pStreamConfig->operation_mode);
m_torchWidgetUsecase = (StreamConfigModeQTITorchWidget == pStreamConfig->operation_mode);
// this check is introduced to avoid set *m_pEnableFOVC == 1 if fovcEnable is disabled in
// overridesettings & fovc bit is set in operation mode.
// as well as to avoid set,when we switch Usecases.
if (TRUE == fovcModeCheck)
{
*m_pEnableFOVC = ((pStreamConfig->operation_mode & StreamConfigModeQTIFOVC) == StreamConfigModeQTIFOVC) ? 1 : 0;
}
SetHALOps(chiHalOps, logicalCameraId);
m_logicalCameraInfo[logicalCameraId].m_pCamera3Device = pCamera3Device;
selectedUsecaseId = m_pUsecaseSelector->GetMatchingUsecase(&m_logicalCameraInfo[logicalCameraId],
pStreamConfig);
CHX_LOG_CONFIG("Session_parameters FPS range %d:%d, previewFPS %d, videoFPS %d,"
"BatchSize: %u FPS: %u SkipPattern: %u,"
"cameraId = %d selected use case = %d",
minSessionFps,
maxSessionFps,
m_previewFPS,
m_videoFPS,
m_usecaseNumBatchedFrames,
m_usecaseMaxFPS,
*m_pStatsSkipPattern,
logicalCameraId,
selectedUsecaseId);
// FastShutter mode supported only in ZSL usecase.
if ((pStreamConfig->operation_mode == StreamConfigModeFastShutter) &&
(UsecaseId::PreviewZSL != selectedUsecaseId))
{
pStreamConfig->operation_mode = StreamConfigModeNormal;
}
m_operationMode[logicalCameraId] = pStreamConfig->operation_mode;
}
if (UsecaseId::NoMatch != selectedUsecaseId)
{
m_pSelectedUsecase[logicalCameraId] =
m_pUsecaseFactory->CreateUsecaseObject(&m_logicalCameraInfo[logicalCameraId],
selectedUsecaseId, pStreamConfig);
if (NULL != m_pSelectedUsecase[logicalCameraId])
{
m_pStreamConfig[logicalCameraId] = static_cast<camera3_stream_configuration_t*>(
CHX_CALLOC(sizeof(camera3_stream_configuration_t)));
m_pStreamConfig[logicalCameraId]->streams = static_cast<camera3_stream_t**>(
CHX_CALLOC(sizeof(camera3_stream_t*) * pStreamConfig->num_streams));
m_pStreamConfig[logicalCameraId]->num_streams = pStreamConfig->num_streams;
for (UINT32 i = 0; i< m_pStreamConfig[logicalCameraId]->num_streams; i++)
{
m_pStreamConfig[logicalCameraId]->streams[i] = pStreamConfig->streams[i];
}
m_pStreamConfig[logicalCameraId]->operation_mode = pStreamConfig->operation_mode;
// use camera device / used for recovery only for regular session
m_SelectedUsecaseId[logicalCameraId] = (UINT32)selectedUsecaseId;
CHX_LOG_CONFIG("Logical cam Id = %d usecase addr = %p", logicalCameraId, m_pSelectedUsecase[
logicalCameraId]);
m_pCameraDeviceInfo[logicalCameraId].m_pCamera3Device = pCamera3Device;
*pIsOverrideEnabled = TRUE;
}
else
{
CHX_LOG_ERROR("For cameraId = %d CreateUsecaseObject failed", logicalCameraId);
m_logicalCameraInfo[logicalCameraId].m_pCamera3Device = NULL;
m_pResourcesUsedLock->Lock();
// Reset the m_resourcesUsed & m_IFEResourceCost if usecase creation failed
if (m_resourcesUsed >= m_IFEResourceCost[logicalCameraId]) // to avoid underflow
{
m_resourcesUsed -= m_IFEResourceCost[logicalCameraId]; // reduce the total cost
m_IFEResourceCost[logicalCameraId] = 0;
}
m_pResourcesUsedLock->Unlock();
}
}
return result;
}
(1) 判断是否是视频模式,做帧率的操作
(2) 获取camera资源消耗情况,并对相关参数赋值
(3)根据logicalCameraId 匹配 usecase
(4)根据usecaseId 创建usecase
vendor/qcom/proprietary/chi-cdk/vendor/chioverride/default/chxutils.cpp
UsecaseId UsecaseSelector::GetMatchingUsecase(
const LogicalCameraInfo* pCamInfo,
camera3_stream_configuration_t* pStreamConfig)
{
UsecaseId usecaseId = UsecaseId::Default;
UINT32 VRDCEnable = ExtensionModule::GetInstance()->GetDCVRMode();
if ((pStreamConfig->num_streams == 2) && IsQuadCFASensor(pCamInfo) &&
(LogicalCameraType_Default == pCamInfo->logicalCameraType))
{
// need to validate preview size <= binning size, otherwise return error
/// If snapshot size is less than sensor binning size, select defaut zsl usecase.
/// Only if snapshot size is larger than sensor binning size, select QuadCFA usecase.
/// Which means for snapshot in QuadCFA usecase,
/// - either do upscale from sensor binning size,
/// - or change sensor mode to full size quadra mode.
if (TRUE == QuadCFAMatchingUsecase(pCamInfo, pStreamConfig))
{
usecaseId = UsecaseId::QuadCFA;
CHX_LOG_CONFIG("Quad CFA usecase selected");
return usecaseId;
}
}
if (pStreamConfig->operation_mode == StreamConfigModeSuperSlowMotionFRC)
{
usecaseId = UsecaseId::SuperSlowMotionFRC;
CHX_LOG_CONFIG("SuperSlowMotionFRC usecase selected");
return usecaseId;
}
/// Reset the usecase flags
VideoEISV2Usecase = 0;
VideoEISV3Usecase = 0;
GPURotationUsecase = FALSE;
GPUDownscaleUsecase = FALSE;
if ((NULL != pCamInfo) && (pCamInfo->numPhysicalCameras > 1) && VRDCEnable)
{
CHX_LOG_CONFIG("MultiCameraVR usecase selected");
usecaseId = UsecaseId::MultiCameraVR;
}
else if ((NULL != pCamInfo) && (pCamInfo->numPhysicalCameras > 1) && (pStreamConfig->num_streams > 1))
{
CHX_LOG_CONFIG("MultiCamera usecase selected");
usecaseId = UsecaseId::MultiCamera;
}
else
{
switch (pStreamConfig->num_streams)
{
case 2:
if (TRUE == IsRawJPEGStreamConfig(pStreamConfig))
{
CHX_LOG_CONFIG("Raw + JPEG usecase selected");
usecaseId = UsecaseId::RawJPEG;
break;
}
/// @todo Enable ZSL by setting overrideDisableZSL to FALSE
if (FALSE == m_pExtModule->DisableZSL())
{
if (TRUE == IsPreviewZSLStreamConfig(pStreamConfig))
{
usecaseId = UsecaseId::PreviewZSL;
CHX_LOG_CONFIG("ZSL usecase selected");
}
}