////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Copyright (c) 2017-2018 Qualcomm Technologies, Inc.
// All Rights Reserved.
// Confidential and Proprietary - Qualcomm Technologies, Inc.
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// @file  camxchicontext.cpp
/// @brief Definitions for ChiContext class containing CHI API specific information
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

#include "camxcommontypes.h"
#include "camxchi.h"
#include "camxchisession.h"
#include "camxhwcontext.h"
#include "camxhwenvironment.h"
#include "camximagesensormoduledata.h"
#include "camximagesensordata.h"
#include "camxpipeline.h"
#include "camxsession.h"
#include "camxsettingsmanager.h"
#include "camxvendortags.h"
#include "g_camxsettings.h"
#include "camxchicontext.h"

CAMX_NAMESPACE_BEGIN

// These macros match the definitions found in hardware/camera_common.h and hardware/hardware.h
#define CAMERA_DEVICE_HALAPI_VERSION(major, minor) ((((major) & 0xFF) << 8) | ((minor) & 0xFF))
#define CAMERA_DEVICE_API_VERSION_3_3           CAMERA_DEVICE_HALAPI_VERSION(3, 3)

static const UINT MaxTrackedSessions = 128;
static const UINT MaxTrackedPipelines = 512;

// We can allocate max of 8*8 = 64 buffers for 240fps HFR case
// but capping to 48 only because frameworks allows max of 64 buffer
// which includes camera HAL buffers and video buffers (of count 9)
static const UINT MaxNumberOfBuffersAllowed = 48;

static const UINT MaxNumberOfOpenFiles      = 2048;

struct ChiFenceCallbackData
{
    ChiFence*           pChiFence;  ///< Chi fence
    PFNCHIFENCECALLBACK pCallback;  ///< Callback pointer called when fence signaled
    VOID*               pUserData;  ///< User data
};

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::Create
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
ChiContext* ChiContext::Create()
{
    ChiContext* pChiContext = CAMX_NEW ChiContext;

    if (NULL != pChiContext)
    {
        CamxResult result = CamxResultSuccess;

        result = pChiContext->Initialize();

        if (CamxResultSuccess != result)
        {
            pChiContext->Destroy();
            pChiContext = NULL;
        }
    }

    return pChiContext;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::Initialize
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::Initialize()
{
    CamxResult result        = CamxResultSuccess;
    BOOL       isTOFEnabled  = FALSE;
    UINT32     numThreads    = 0;

    m_pHwEnvironment = HwEnvironment::GetInstance();

    if (NULL != m_pHwEnvironment)
    {
        isTOFEnabled = GetStaticSettings()->enableTOFInterface;
        numThreads   = GetStaticSettings()->numberOfCHIThreads;

        if (TRUE == isTOFEnabled)
        {
            /// Increase number of threads by 1 since TOF sensor needs a dedicated thread.
            numThreads += 1;
        }

        result = ThreadManager::Create(&m_pThreadManager, "SoloThreadManager", numThreads);
    }
    else
    {
        result = CamxResultENoMemory;
        CAMX_LOG_ERROR(CamxLogGroupHAL, "Out of memory");
    }

    if (CamxResultSuccess == result)
    {
        DeferredRequestQueueCreateData deferredCreateData;
        deferredCreateData.numPipelines      = 0;
        deferredCreateData.pThreadManager    = m_pThreadManager;
        deferredCreateData.requestQueueDepth = DefaultRequestQueueDepth;

        m_pDeferredRequestQueue = DeferredRequestQueue::Create(&deferredCreateData);
        if (NULL == m_pDeferredRequestQueue)
        {
            result = CamxResultENoMemory;
            CAMX_LOG_ERROR(CamxLogGroupHAL, "Out of memory");
        }
    }

    if (CamxResultSuccess == result)
    {
        HwContextCreateData createData = { 0 };

        createData.pHwEnvironment = m_pHwEnvironment;

        result = HwContext::Create(&createData);

        if (CamxResultSuccess == result)
        {
            CAMX_ASSERT(NULL != createData.pHwContext);

            m_pHwContext = createData.pHwContext;
        }
    }

    if (CamxResultSuccess == result)
    {
        /// default max num of open files is 1024, increase it to MaxNumberOfOpenFiles(2048)
        CAMX_LOG_INFO(CamxLogGroupHAL, "Set max open files to %d", MaxNumberOfOpenFiles);
        result = OsUtils::SetFDLimit(MaxNumberOfOpenFiles);
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::~ChiContext
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
ChiContext::~ChiContext()
{
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::Destroy
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID ChiContext::Destroy()
{
    for (UINT32 cameraId = 0; cameraId < MaxNumCameras; cameraId++)
    {
        MetadataPool* pStaticMetadataPool = m_perCameraInfo[cameraId].pStaticMetadataPool;

        if (NULL != pStaticMetadataPool)
        {
            pStaticMetadataPool->Destroy();
            pStaticMetadataPool = NULL;
            m_perCameraInfo[cameraId].pStaticMetadataPool = NULL;
        }
    }

    if (NULL != m_pHwContext)
    {
        m_pHwContext->Destroy();
        m_pHwContext = NULL;
    }

    if (NULL != m_pDeferredRequestQueue)
    {
        m_pDeferredRequestQueue->Destroy();
        m_pDeferredRequestQueue = NULL;
    }

    /// @todo (CAMX-2491) Is there a need to wait for all threads to retire or is it good to assume so
    if (NULL != m_pThreadManager)
    {
        m_pThreadManager->Destroy();
        m_pThreadManager = NULL;
    }

    CAMX_DELETE this;

}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::GetNumCameras
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
UINT32 ChiContext::GetNumCameras()
{
    return m_pHwEnvironment->GetNumCameras();
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::EnumerateSensorModes
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::EnumerateSensorModes(
    UINT32             cameraId,
    UINT32             numSensorModes,
    ChiSensorModeInfo* pSensorModeInfo)
{
    ImageSensorModuleData*       pImageSensorModuleData =
    // NOWHINE CP036a: Since the function is const, had to add the const_cast
        const_cast<ImageSensorModuleData*>(m_pHwEnvironment->GetImageSensorModuleData(cameraId));
    ImageSensorData*             pImageSensorData       = pImageSensorModuleData->GetSensorDataObject();
    const ResolutionInformation* pResolutionInfo        = pImageSensorData->GetResolutionInfo();
    ResolutionData*              pResolutionData        = pResolutionInfo->resolutionData;

    CAMX_ASSERT(NULL != pSensorModeInfo);
    CAMX_ASSERT(numSensorModes == pResolutionInfo->resolutionDataCount);

    if (numSensorModes != pResolutionInfo->resolutionDataCount)
    {
        CAMX_LOG_ERROR(CamxLogGroupHAL, "Can't enumerate all the sensor modes");
        return CamxResultEInvalidArg;
    }
    else
    {
        for (UINT i = 0; i < pResolutionInfo->resolutionDataCount; i++)
        {
            UINT streamCount = pResolutionData[i].streamInfo.streamConfigurationCount;

            for (UINT streamIndex = 0; streamIndex < streamCount; streamIndex++)
            {
                StreamConfiguration* pStreamConfiguration = &pResolutionData[i].streamInfo.streamConfiguration[streamIndex];

                // sensor mode is selected based on IMAGE type only
                if (StreamType::IMAGE == pStreamConfiguration->type)
                {
                    pSensorModeInfo[i].modeIndex             = i;
                    pSensorModeInfo[i].frameDimension.left   =  pStreamConfiguration->frameDimension.xStart;
                    pSensorModeInfo[i].frameDimension.top    =   pStreamConfiguration->frameDimension.yStart;
                    pSensorModeInfo[i].frameDimension.width  = pStreamConfiguration->frameDimension.width;
                    pSensorModeInfo[i].frameDimension.height = pStreamConfiguration->frameDimension.height;
                    pSensorModeInfo[i].streamtype            = static_cast<CHISENSORSTREAMTYPE>(pStreamConfiguration->type);
                    pSensorModeInfo[i].frameRate             = static_cast<UINT32>(pResolutionData[i].frameRate);
                    pSensorModeInfo[i].bpp                   = pStreamConfiguration->bitWidth;

                    for (UINT j = 0; j < pResolutionData[i].capabilityCount; j++)
                    {
                        pSensorModeInfo[i].sensorModeCaps.value = 0;

                        switch (pResolutionData[i].capability[j])
                        {
                            case SensorCapability::NORMAL:
                                pSensorModeInfo[i].sensorModeCaps.u.Normal  = 1;
                                break;
                            case SensorCapability::HFR:
                                pSensorModeInfo[i].sensorModeCaps.u.HFR     = 1;
                                break;
                            case SensorCapability::IHDR:
                                pSensorModeInfo[i].sensorModeCaps.u.IHDR    = 1;
                                break;
                            case SensorCapability::PDAF:
                                pSensorModeInfo[i].sensorModeCaps.u.PDAF    = 1;
                                break;
                            case SensorCapability::QUADCFA:
                                pSensorModeInfo[i].sensorModeCaps.u.QuadCFA = 1;
                                break;
                            case SensorCapability::ZZHDR:
                                pSensorModeInfo[i].sensorModeCaps.u.ZZHDR   = 1;
                                break;
                            default:
                                CAMX_LOG_ERROR(CamxLogGroupHAL, "Unsupported capability");
                                break;
                        }
                    }
                    pSensorModeInfo[i].cropInfo.left         = pResolutionData[i].cropInfo.left;
                    pSensorModeInfo[i].cropInfo.top          = pResolutionData[i].cropInfo.top;
                    pSensorModeInfo[i].cropInfo.width        =
                        pStreamConfiguration->frameDimension.width - pResolutionData[i].cropInfo.right - 1;
                    pSensorModeInfo[i].cropInfo.height       =
                        pStreamConfiguration->frameDimension.height - pResolutionData[i].cropInfo.bottom - 1;

                }
            }
        }
    }

    return CamxResultSuccess;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// ChiContext::ProcessCameraOpen
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::ProcessCameraOpen(
    UINT32 cameraId)
{
    CamxResult result = CamxResultSuccess;

    if (cameraId >= GetNumCameras())
    {
        CAMX_LOG_ERROR(CamxLogGroupHAL, "Invalid camera id: %d", cameraId);
        result = CamxResultEInvalidArg;
    }
    else if (TRUE == IsCameraOpened(cameraId))
    {
        CAMX_LOG_ERROR(CamxLogGroupHAL, "Camera id already in use: %d", cameraId);
        result = CamxResultEBusy;
    }
    else if ((m_numCamerasOpened + 1) > MaxConcurrentDevices)
    {
        CAMX_LOG_ERROR(CamxLogGroupHAL, "Too many concurrent devices to open camera id: %d", cameraId);
        result = CamxResultETooManyUsers;
    }
    else
    {
        m_perCameraInfo[cameraId].isCameraOpened = TRUE;
        m_numCamerasOpened++;
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// ChiContext::ProcessCameraClose
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::ProcessCameraClose(
    UINT32 cameraId)
{
    CamxResult result = CamxResultSuccess;

    if (GetNumCameras() <= cameraId)
    {
        CAMX_LOG_ERROR(CamxLogGroupHAL, "Invalid camera id: %d", cameraId);
        result = CamxResultEInvalidArg;
    }
    else if (FALSE == IsCameraOpened(cameraId))
    {
        CAMX_LOG_ERROR(CamxLogGroupHAL, "Camera id is closed already: %d", cameraId);
        result = CamxResultEBusy;
    }
    else
    {
        m_perCameraInfo[cameraId].isCameraOpened = FALSE;
        m_numCamerasOpened--;
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::InitializeStaticMetadataPool
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::InitializeStaticMetadataPool(
    UINT32  cameraId)
{
    CamxResult    result     = CamxResultSuccess;
    HwCameraInfo  cameraInfo = {};

    CAMX_ASSERT(cameraId <  GetNumCameras());
    CAMX_ASSERT(NULL     != m_pHwEnvironment);

    result = m_pHwEnvironment->GetCameraInfo(cameraId, &cameraInfo);

    // Populate static metadata from platform capabilities.
    if (CamxResultSuccess == result)
    {
        HwCameraInfo* pCameraInfo = &cameraInfo;

        // Static Pool has only one slot ever
        MetadataSlot* pStaticMetadataSlot = m_perCameraInfo[cameraId].pStaticMetadataPool->GetSlot(0);

        pStaticMetadataSlot->SetSlotRequestId(1);

        CAMX_ASSERT(NULL != pStaticMetadataSlot);

        /// @todo (CAMX-961) Add check to see if valid static metadata is already filled.

        // Fill in all supported characteristics keys
        UINT numCharacteristicsKeys = cameraInfo.pPlatformCaps->numCharacteristicsKeys;

        // NOWHINE GR004 {} Suspress the switch case indent problem with curly brackets
        for (UINT key = 0; key < numCharacteristicsKeys; key++)
        {
            UINT count = 0;
            /// @todo (CAMX-79) - Populate static capabilities from sensor module.
            /// @todo (CAMX-961)- Fill in the remaining metadata.
            // NOWHINE CP036a {} Google API requires const type
            switch (pCameraInfo->pPlatformCaps->characteristicsKeys[key])
            {
                // Color correction
                case ColorCorrectionAvailableAberrationModes:
                {
                    UINT8 availableAbberationsModes[ColorCorrectionAberrationModeEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numAbberationsModes;

                    for (UINT i = 0; i < count; i++)
                    {
                        availableAbberationsModes[i] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->abberationModes[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(ColorCorrectionAvailableAberrationModes,
                                                                   static_cast<VOID*>(availableAbberationsModes),
                                                                   count);
                    break;

                }

                // Control
                case ControlAEAvailableAntibandingModes:
                {
                    UINT8 availableAntibandingModes[ControlAEAntibandingModeEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numAntibandingModes;

                    for (UINT i = 0; i < count; i++)
                    {
                        availableAntibandingModes[i] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->antibandingModes[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(ControlAEAvailableAntibandingModes,
                                                                   static_cast<VOID*>(availableAntibandingModes),
                                                                   count);
                    break;

                }

                case ControlAEAvailableModes:
                {
                    UINT8 availableAEModes[ControlAEModeEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numAEModes;

                    UINT capsCount = 0;

                    for (UINT i = 0; i < count; i++)
                    {
                        if ((ControlAEModeOnAutoFlash       == pCameraInfo->pPlatformCaps->AEModes[i]) ||
                            (ControlAEModeOnAlwaysFlash     == pCameraInfo->pPlatformCaps->AEModes[i]) ||
                            (ControlAEModeOnAutoFlashRedeye == pCameraInfo->pPlatformCaps->AEModes[i]))
                        {
                            if (TRUE == pCameraInfo->pSensorCaps->hasFlash)
                            {
                                availableAEModes[capsCount++] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->AEModes[i]);
                            }
                        }
                        else
                        {
                            availableAEModes[capsCount++] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->AEModes[i]);
                        }
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(ControlAEAvailableModes,
                                                                   static_cast<VOID*>(availableAEModes),
                                                                   capsCount);
                    break;

                }

                case ControlAEAvailableTargetFPSRanges:
                {
                    count = pCameraInfo->pHwEnvironmentCaps->numAETargetFPSRanges * (sizeof(RangeINT32) / sizeof(INT32));

                    result = pStaticMetadataSlot->SetMetadataByTag(ControlAEAvailableTargetFPSRanges,
                                                                   static_cast<VOID*>(const_cast<RangeINT32*>(
                                                                   pCameraInfo->pHwEnvironmentCaps->AETargetFPSRanges)),
                                                                   count);
                    break;

                }

                case ControlAECompensationRange:
                {
                    // Update.AE compensation range. min and max values
                    INT32 availableAECompensationRange[] =
                    {
                        pCameraInfo->pPlatformCaps->minAECompensationValue,
                        pCameraInfo->pPlatformCaps->maxAECompensationValue
                    };
                    count = sizeof(availableAECompensationRange) / sizeof(availableAECompensationRange[0]);

                    result = pStaticMetadataSlot->SetMetadataByTag(ControlAECompensationRange,
                                                                   static_cast<VOID*>(availableAECompensationRange),
                                                                   count);
                    break;

                }

                case ControlAECompensationStep:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(ControlAECompensationStep,
                                                                   static_cast<VOID*>(const_cast<Rational*>
                                                                   (&(pCameraInfo->pPlatformCaps->AECompensationSteps))),
                                                                   1);
                    break;

                }

                case ControlAFAvailableModes:
                {
                    UINT8 availableAFModes[ControlAFModeEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numAFModes;

                    if (FALSE == pCameraInfo->pSensorCaps->isFixedFocus)
                    {
                        for (UINT i = 0; i < count; i++)
                        {
                            availableAFModes[i] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->AFModes[i]);
                        }
                    }
                    else
                    {
                        count = 1;
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(ControlAFAvailableModes,
                                                                   static_cast<VOID*>(availableAFModes),
                                                                   count);
                    break;

                }

                case ControlAvailableEffects:
                {
                    UINT8 availableEffectModes[ControlEffectModeEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numEffectModes;

                    for (UINT i = 0; i < count; i++)
                    {
                        availableEffectModes[i] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->effectModes[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(ControlAvailableEffects,
                                                                   static_cast<VOID*>(availableEffectModes),
                                                                   count);
                    break;

                }

                case ControlAvailableSceneModes:
                {
                    UINT8 availableSceneModes[ControlSceneModeEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numSceneModes;

                    for (UINT i = 0; i < count; i++)
                    {
                        availableSceneModes[i] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->sceneModes[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(ControlAvailableSceneModes,
                                                                   static_cast<VOID*>(availableSceneModes),
                                                                   count);
                    break;

                }

                case ControlAvailableVideoStabilizationModes:
                {
                    UINT8 availableVideoStabilizationModes[ControlVideoStabilizationModeEnd] = { 0 };
                    count = 0;

                    for (UINT i = 0; i < pCameraInfo->pPlatformCaps->numVideoStabilizationModes; i++)
                    {
                        if ((ImageSensorFacingExternal       != pCameraInfo->imageSensorFacing) ||
                            (ControlVideoStabilizationModeOn != pCameraInfo->pPlatformCaps->videoStabilizationsModes[i]))
                        {
                            availableVideoStabilizationModes[i] = static_cast<UINT8>(
                                pCameraInfo->pPlatformCaps->videoStabilizationsModes[i]);
                            count++;
                        }
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(ControlAvailableVideoStabilizationModes,
                                                                   static_cast<VOID*>(availableVideoStabilizationModes),
                                                                   count);
                    break;

                }

                case ControlAWBAvailableModes:
                {
                    UINT8 availableAWBModes[ControlAWBModeEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numAWBModes;

                    for (UINT i = 0; i < count; i++)
                    {
                        availableAWBModes[i] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->AWBModes[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(ControlAWBAvailableModes,
                                                                   static_cast<VOID*>(availableAWBModes),
                                                                   count);
                    break;

                }

                case ControlMaxRegions:
                {
                    // Update.Available Max Regions for AE, AWB and AF respectively.
                    INT32 availableMaxRegions[] =
                    {
                        pCameraInfo->pPlatformCaps->maxRegionsAE,
                        pCameraInfo->pPlatformCaps->maxRegionsAWB,
                        pCameraInfo->pPlatformCaps->maxRegionsAF,
                    };

                    if (TRUE == pCameraInfo->pSensorCaps->isFixedFocus)
                    {
                        availableMaxRegions[2] = 0;
                    }

                    count = sizeof(availableMaxRegions) / sizeof(availableMaxRegions[0]);

                    result = pStaticMetadataSlot->SetMetadataByTag(ControlMaxRegions,
                                                                   static_cast<VOID*>(availableMaxRegions),
                                                                   count);
                    break;

                }

                case ControlSceneModeOverrides:
                {
                    // Update Available Scene Mode overrides for AE, AWB and AF.
                    UINT8 availableSceneModeOverrides[ControlSceneModeEnd][3];
                    count = pCameraInfo->pPlatformCaps->numSceneModes * 3;

                    for (UINT i = 0; i < pCameraInfo->pPlatformCaps->numSceneModes; i++)
                    {
                        availableSceneModeOverrides[i][0] =
                            static_cast<UINT8>(pCameraInfo->pPlatformCaps->sceneModeOverride[i].AEModeOverride);
                        availableSceneModeOverrides[i][1] =
                            static_cast<UINT8>(pCameraInfo->pPlatformCaps->sceneModeOverride[i].AWBModeOverride);
                        availableSceneModeOverrides[i][2] =
                            static_cast<UINT8>(pCameraInfo->pPlatformCaps->sceneModeOverride[i].AFModeOverride);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(ControlSceneModeOverrides,
                                                                   static_cast<VOID*>(availableSceneModeOverrides),
                                                                   count);
                    break;

                }

                case ControlAvailableHighSpeedVideoConfigurations:
                {
                    if (pCameraInfo->pHwEnvironmentCaps->numHFRRanges > 0)
                    {
                        count = pCameraInfo->pHwEnvironmentCaps->numHFRRanges *
                                    (sizeof(HFRConfigurationParams) / sizeof(INT32));

                        result = pStaticMetadataSlot->SetMetadataByTag(ControlAvailableHighSpeedVideoConfigurations,
                                                                       static_cast<VOID*>(const_cast
                                                                       <HFRConfigurationParams*>(
                                                                       pCameraInfo->pHwEnvironmentCaps->HFRVideoSizes)),
                                                                       count);
                    }
                    break;
                }

                case ControlAELockAvailable:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(ControlAELockAvailable,
                                                                   static_cast<VOID*>(const_cast<BOOL*>(
                                                                   &(pCameraInfo->pPlatformCaps->lockAEAvailable))),
                                                                   1);
                    break;

                }

                case ControlAWBLockAvailable:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(ControlAWBLockAvailable,
                                                                   static_cast<VOID*>(const_cast<BOOL*>(
                                                                   &(pCameraInfo->pPlatformCaps->lockAWBAvailable))),
                                                                   1);
                    break;

                }

                case ControlAvailableModes:
                {
                    UINT8 availableModes[ControlModeEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numAvailableModes;

                    for (UINT i = 0; i < count; i++)
                    {
                        availableModes[i] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->availableModes[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(ControlAvailableModes,
                                                                   static_cast<VOID*>(availableModes),
                                                                   count);
                    break;

                }

                case ControlPostRawSensitivityBoostRange:
                {
                    RangeINT32 sensitivityRange;

                    sensitivityRange.min = pCameraInfo->pPlatformCaps->minPostRawSensitivityBoost;
                    sensitivityRange.max = pCameraInfo->pPlatformCaps->maxPostRawSensitivityBoost;

                    result = pStaticMetadataSlot->SetMetadataByTag(ControlPostRawSensitivityBoostRange,
                                                                   static_cast<VOID*>(&sensitivityRange),
                                                                   sizeof(RangeINT32) / sizeof(INT32));
                    break;

                }

                // Edge
                case EdgeAvailableEdgeModes:
                {
                    UINT8 availableEdgeModes[EdgeModeEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numEdgeModes;

                    /// @todo (CAMX-1015)- check efficient way to eliminate array copy.
                    /// do the same thing for other enum array in this file.
                    for (UINT i = 0; i < count; i++)
                    {
                        availableEdgeModes[i] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->edgeModes[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(EdgeAvailableEdgeModes,
                                                                   static_cast<VOID*>(availableEdgeModes),
                                                                   count);
                    break;
                }

                // Flash
                case FlashInfoAvailable:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(FlashInfoAvailable,
                                                                   static_cast<VOID*>(const_cast<BOOL*>(
                                                                   &(pCameraInfo->pSensorCaps->hasFlash))),
                                                                   1);
                    break;
                }
                case FlashInfoChargeDuration:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for FlashInfoChargeDuration");
                    break;

                case FlashColorTemperature:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for FlashColorTemperature");
                    break;

                case FlashMaxEnergy:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for FlashMaxEnergy");
                    break;

                // Hot pixel
                case HotPixelAvailableHotPixelModes:
                {
                    UINT8 availableHotPixelModes[HotPixelModeEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numHotPixelModes;

                    for (UINT i = 0; i < count; i++)
                    {
                        availableHotPixelModes[i] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->hotPixelModes[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(HotPixelAvailableHotPixelModes,
                                                                   static_cast<VOID*>(availableHotPixelModes),
                                                                   count);

                    break;
                }

                // JPEG
                case JPEGAvailableThumbnailSizes:
                {
                    count = pCameraInfo->pPlatformCaps->numJPEGThumbnailSizes * (sizeof(DimensionCap) / sizeof(INT32));

                    result = pStaticMetadataSlot->SetMetadataByTag(JPEGAvailableThumbnailSizes,
                                                                   static_cast<VOID*>(const_cast<DimensionCap*>(
                                                                   pCameraInfo->pPlatformCaps->JPEGThumbnailSizes)),
                                                                   count);
                    break;

                }

                case JPEGMaxSize:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(JPEGMaxSize,
                                                                   static_cast<VOID*>(const_cast<INT32*>(
                                                                   &(pCameraInfo->pHwEnvironmentCaps->JPEGMaxSizeInBytes))),
                                                                   1);
                    break;

                }

                // Lens
                case LensInfoAvailableApertures:
                {
                    count   = pCameraInfo->pSensorCaps->numAperatures;
                    CAMX_ASSERT(count <= MaxTagValues);

                    result = pStaticMetadataSlot->SetMetadataByTag(LensInfoAvailableApertures,
                                                                   static_cast<VOID*>(const_cast<FLOAT*>(
                                                                   &(pCameraInfo->pSensorCaps->aperatures[0]))),
                                                                   count);
                    break;

                }

                case LensInfoAvailableFilterDensities:
                {
                    count   = pCameraInfo->pSensorCaps->numNDFs;
                    CAMX_ASSERT(count <= MaxTagValues);

                    result = pStaticMetadataSlot->SetMetadataByTag(LensInfoAvailableFilterDensities,
                                                                   static_cast<VOID*>(const_cast<FLOAT*>(
                                                                   &(pCameraInfo->pSensorCaps->NDFs[0]))),
                                                                   count);
                    break;

                }

                case LensInfoAvailableFocalLengths:
                {
                    count   = pCameraInfo->pSensorCaps->numFocalLengths;
                    if (count > 0)
                    {
                        CAMX_ASSERT(count <= MaxTagValues);

                        result = pStaticMetadataSlot->SetMetadataByTag(LensInfoAvailableFocalLengths,
                                                                       static_cast<VOID*>(const_cast<FLOAT*>(
                                                                       &(pCameraInfo->pSensorCaps->focalLengths[0]))),
                                                                       count);
                    }
                    else
                    {
                        CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for LensInfoAvailableFocalLengths");
                    }

                    break;
                }

                case LensInfoAvailableOpticalStabilization:
                {
                    UINT8 availableOisModes[LensOpticalStabilizationModeEnd] =
                    {
                        LensOpticalStabilizationModeOff,
                        LensOpticalStabilizationModeOn
                    };

                    // HAL must support LensOpticalStabilizationModeOff, and check if lens support optical stabilization
                    count = (TRUE == pCameraInfo->pSensorCaps->hasOIS) ? 2 : 1;
                    CAMX_ASSERT(count <= LensOpticalStabilizationModeEnd);

                    result = pStaticMetadataSlot->SetMetadataByTag(LensInfoAvailableOpticalStabilization,
                                                                   static_cast<VOID*>(availableOisModes),
                                                                   count);
                    break;

                }

                case LensInfoHyperfocalDistance:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(LensInfoHyperfocalDistance,
                                                                   static_cast<VOID*>(const_cast<FLOAT*>(
                                                                   &(pCameraInfo->pSensorCaps->hyperfocalDistance))),
                                                                   1);
                    break;
                }

                case LensInfoMinimumFocusDistance:
                {
                    FLOAT   minimumFocusDistance = 0.0;
                    if (FALSE == pCameraInfo->pSensorCaps->isFixedFocus)
                    {
                        minimumFocusDistance = pCameraInfo->pSensorCaps->minimumFocusDistance;
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(LensInfoMinimumFocusDistance,
                                                                   static_cast<VOID*>(const_cast<FLOAT*>(
                                                                   &(minimumFocusDistance))),
                                                                   1);
                    break;
                }

                case LensInfoShadingMapSize:
                {
                    count = sizeof(DimensionCap) / sizeof(INT32);

                    result = pStaticMetadataSlot->SetMetadataByTag(LensInfoShadingMapSize,
                                                                   static_cast<VOID*>(const_cast<DimensionCap*>(
                                                                   &(pCameraInfo->pSensorCaps->lensShadingMapSize))),
                                                                   count);
                    break;
                }

                case LensInfoFocusDistanceCalibration:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(LensInfoFocusDistanceCalibration,
                                                                   static_cast<VOID*>
                                                                   (const_cast<LensInfoFocusDistanceCalibrationValues*>
                                                                   (&(pCameraInfo->pSensorCaps->focusDistanceCalibration))),
                                                                   1);
                    break;
                }

                case LensFacing:
                {
                    LensFacingValues lensFacing = LensFacingInvalid;

                    // NOWHINE CF007 {} Ignore warning as whiner expect 8 spaces indent between switch and case below
                    switch (pCameraInfo->imageSensorFacing)
                    {
                        case ImageSensorFacingBack:
                            lensFacing = LensFacingBack;
                            break;
                        case ImageSensorFacingFront:
                            lensFacing = LensFacingFront;
                            break;
                        case ImageSensorFacingExternal:
                            lensFacing = LensFacingExternal;
                            break;
                        default:
                            CAMX_ASSERT_ALWAYS_MESSAGE("Invalid Lens facing info: %d", pCameraInfo->imageSensorFacing);
                            break;
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(LensFacing,
                        static_cast<VOID*>(&lensFacing),
                        1);
                    break;

                }

                case LensPoseRotation:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for LensPoseRotation");
                    break;

                case LensPoseTranslation:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for LensPoseTranslation");
                    break;

                case LensIntrinsicCalibration:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for LensIntrinsicCalibration");
                    break;

                case LensRadialDistortion:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for LensRadialDistortion");
                    break;

                // Noise reduction
                case NoiseReductionAvailableNoiseReductionModes:
                {
                    UINT8 availableNoiseReductionMode[NoiseReductionEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numNoiseReductionModes;

                    for (UINT i = 0; i < count; i++)
                    {
                        availableNoiseReductionMode[i] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->noiseReductionModes[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(NoiseReductionAvailableNoiseReductionModes,
                                                                   static_cast<VOID*>(availableNoiseReductionMode),
                                                                   count);
                    break;
                }

                // Request
                case RequestMaxNumOutputStreams:
                {
                    // Update Available Max output streams for Raw formats,
                    // non stalling Processed streams and stalling Processed streams respectively.
                    INT32 availableMaxNumOutputStreams[] =
                    {
                        pCameraInfo->pPlatformCaps->maxRawStreams,
                        pCameraInfo->pPlatformCaps->maxProcessedStreams,
                        pCameraInfo->pPlatformCaps->maxProcessedStallingStreams,
                    };

                    count = sizeof(availableMaxNumOutputStreams) / sizeof(availableMaxNumOutputStreams[0]);

                    result = pStaticMetadataSlot->SetMetadataByTag(RequestMaxNumOutputStreams,
                                                                   static_cast<VOID*>(availableMaxNumOutputStreams),
                                                                   count);
                    break;
                }

                case RequestMaxNumReprocessStreams:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for RequestMaxNumReprocessStreams");
                    break;

                case RequestMaxNumInputStreams:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(RequestMaxNumInputStreams,
                                                                   static_cast<VOID*>(const_cast<INT32*>(
                                                                   &(pCameraInfo->pPlatformCaps->maxInputStreams))),
                                                                   1);
                    break;
                }

                case RequestPipelineMaxDepth:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(RequestPipelineMaxDepth,
                                                                   static_cast<VOID*>(const_cast<UINT8*>(
                                                                   &(pCameraInfo->pPlatformCaps->maxPipelineDepth))),
                                                                   1);
                    break;
                }

                case RequestPartialResultCount:
                {
                    const StaticSettings* pStaticSettings = GetStaticSettings();
                    INT32 numPartialResultCount =
                        Utils::MinUINT32(pStaticSettings->numMetadataResults, pCameraInfo->pPlatformCaps->partialResultCount);

                    result = pStaticMetadataSlot->SetMetadataByTag(RequestPartialResultCount,
                                                                   static_cast<VOID*>(const_cast<INT32*>(
                                                                   &(numPartialResultCount))),
                                                                   1);
                    break;
                }

                case RequestAvailableCapabilities:
                {
                    UINT8 availableRequestCaps[RequestAvailableCapabilitiesEnd] = { 0 };
                    const StaticSettings* pStaticSettings = GetStaticSettings();
                    UINT numRequestCaps = pCameraInfo->pPlatformCaps->numRequestCaps;
                    UINT capsCount = 0;
                    BOOL isHighSpeedVideoSupported = FALSE;

                    for (UINT configIndex = 0; configIndex < pCameraInfo->pSensorCaps->numSensorConfigs; configIndex++)
                    {
                        if (static_cast<UINT>(pCameraInfo->pSensorCaps->sensorConfigs[configIndex].maxFPS) > 30)
                        {
                            isHighSpeedVideoSupported = TRUE;
                            break;
                        }
                    }

                    for (UINT i = 0; i < numRequestCaps; i++)
                    {
                        UINT8 requestCap = static_cast<UINT8>(pCameraInfo->pPlatformCaps->requestCaps[i]);

                        if ((FALSE == pStaticSettings->enableRAWProcessing) &&
                            (requestCap == RequestAvailableCapabilitiesRaw))
                        {
                            continue;
                        }

                        if (!pCameraInfo->pSensorCaps->isDepthSensor &&
                            (requestCap == RequestAvailableCapabilitiesDepthOutput))
                        {
                            continue;
                        }

                        if ((FALSE == isHighSpeedVideoSupported) &&
                            (RequestAvailableCapabilitiesConstrainedHighSpeedVideo == requestCap))
                        {
                            // skip HFR supported if max fps is <=30
                            continue;
                        }

                        availableRequestCaps[capsCount++] = requestCap;
                    }
                    result = pStaticMetadataSlot->SetMetadataByTag(RequestAvailableCapabilities,
                                                                   static_cast<VOID*>(availableRequestCaps),
                                                                   capsCount);
                    break;
                }

                case RequestAvailableRequestKeys:
                {
                    INT32 availableRequestKeys[MaxMetadataTagCount] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numRequestKeys;

                    UINT capsCount = 0;

                    for (UINT i = 0; i < count; i++)
                    {
                        if ((TRUE == pCameraInfo->pSensorCaps->isFixedFocus) &&
                            (ControlAFRegions == pCameraInfo->pPlatformCaps->requestKeys[i]))
                        {
                            // skip adding AF regions if camera is a fixed-focus
                            continue;
                        }
                        else
                        {
                            availableRequestKeys[capsCount++] =
                                static_cast<INT32>(pCameraInfo->pPlatformCaps->requestKeys[i]);
                        }
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(RequestAvailableRequestKeys,
                                                                   static_cast<VOID*>(availableRequestKeys),
                                                                   capsCount);
                    break;
                }

                case RequestAvailableResultKeys:
                {
                    INT32 availableResultKeys[MaxMetadataTagCount] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numResultKeys;

                    UINT capsCount = 0;
                    for (UINT i = 0; i < count; i++)
                    {
                        if ((TRUE == pCameraInfo->pSensorCaps->isFixedFocus) &&
                            (ControlAFRegions == pCameraInfo->pPlatformCaps->resultKeys[i]))
                        {
                            // skip adding AF regions if camera is a fixed-focus
                            continue;
                        }
                        else
                        {
                            availableResultKeys[capsCount++] = static_cast<INT32>(pCameraInfo->pPlatformCaps->resultKeys[i]);
                        }
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(RequestAvailableResultKeys,
                                                                   static_cast<VOID*>(availableResultKeys),
                                                                   capsCount);
                    break;
                }

                case RequestAvailableCharacteristicsKeys:
                {
                    INT32 availableCharacteristicsKeys[MaxMetadataTagCount] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numCharacteristicsKeys;

                    for (UINT i = 0; i < count; i++)
                    {
                        availableCharacteristicsKeys[i] = static_cast<INT32>(pCameraInfo->pPlatformCaps->
                                                                                characteristicsKeys[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(RequestAvailableCharacteristicsKeys,
                                                                   static_cast<VOID*>(availableCharacteristicsKeys),
                                                                   count);
                    break;

                }

                // Scaler
                case ScalerAvailableJPEGSizes:
                {
                    /// @todo (CAMX-961)- Check if this tag should be deprecated.
                    DimensionCap  JPEGSizes[MaxResolutions];
                    count = pCameraInfo->pPlatformCaps->numDefaultImageSizes * (sizeof(DimensionCap) / sizeof(INT32));

                    for (UINT i = 0; i < pCameraInfo->pPlatformCaps->numDefaultImageSizes; i++)
                    {
                        JPEGSizes[i].width  = pCameraInfo->pPlatformCaps->defaultImageSizes[i].width;
                        JPEGSizes[i].height = pCameraInfo->pPlatformCaps->defaultImageSizes[i].height;
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(ScalerAvailableJPEGSizes,
                                                                   static_cast<VOID*>(JPEGSizes),
                                                                   count);
                    break;

                }

                case ScalerAvailableFormats:
                {
                    /// @todo (CAMX-961)- Check if this tag should be deprecated.
                    INT32 availablescalerFormats[MaxScalerFormats] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numScalerFormats;

                    for (UINT i = 0; i < count; i++)
                    {
                        availablescalerFormats[i] = static_cast<INT32>(pCameraInfo->pPlatformCaps->scalerFormats[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(ScalerAvailableFormats,
                                                                   static_cast<VOID*>(availablescalerFormats),
                                                                   count);
                    break;

                }

                case ScalerAvailableMaxDigitalZoom:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(ScalerAvailableMaxDigitalZoom,
                                                                   static_cast<VOID*>(const_cast<FLOAT*>(
                                                                   &(pCameraInfo->pPlatformCaps->maxDigitalZoom))),
                                                                   1);
                    break;


                }

                case ScalerAvailableInputOutputFormatsMap:
                {
                    count = pCameraInfo->pPlatformCaps->numInputOutputFormatMaps;

                    result = pStaticMetadataSlot->SetMetadataByTag(ScalerAvailableInputOutputFormatsMap,
                                                                   static_cast<VOID*>(const_cast<INT32*>(
                                                                   (pCameraInfo->pPlatformCaps->inputOutputFormatMap))),
                                                                   count);
                    break;

                }

                case ScalerAvailableStreamConfigurations:
                {
                    count = pCameraInfo->pHwEnvironmentCaps->numStreamConfigs *
                            (sizeof(ScalerStreamConfig) / sizeof(INT32));

                    result = pStaticMetadataSlot->SetMetadataByTag(ScalerAvailableStreamConfigurations,
                                                                   static_cast<VOID*>(const_cast<
                                                                   ScalerStreamConfig*>(pCameraInfo->
                                                                   pHwEnvironmentCaps->streamConfigs)),
                                                                   count);
                    break;

                }

                case ScalerAvailableMinFrameDurations:
                {
                    count = pCameraInfo->pHwEnvironmentCaps->numMinFrameDurations *
                            (sizeof(ScalerFrameDurationINT64) / sizeof(INT64));

                    result = pStaticMetadataSlot->SetMetadataByTag(ScalerAvailableMinFrameDurations,
                                                                   static_cast<VOID*>(const_cast<
                                                                   ScalerFrameDurationINT64*>(pCameraInfo->
                                                                   pHwEnvironmentCaps->minFrameDurations)),
                                                                   count);
                    break;

                }

                case ScalerAvailableStallDurations:
                {
                    count = pCameraInfo->pHwEnvironmentCaps->numStallDurations *
                            (sizeof(ScalerStallDurationINT64) / sizeof(INT64));

                    result = pStaticMetadataSlot->SetMetadataByTag(ScalerAvailableStallDurations,
                                                                   static_cast<VOID*>(const_cast<
                                                                   ScalerStallDurationINT64*>(pCameraInfo->
                                                                   pHwEnvironmentCaps->minStallDurations)),
                                                                   count);
                    break;

                }

                case ScalerCroppingType:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(ScalerCroppingType,
                                                                   static_cast<VOID*>
                                                                   (const_cast<ScalerCroppingTypeValues*>
                                                                   (&(pCameraInfo->pPlatformCaps->croppingSupport))),
                                                                   1);
                    break;
                }

                // Sensor
                case SensorInfoActiveArraySize:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(SensorInfoActiveArraySize,
                                                                   static_cast<VOID*>(const_cast<Region*>(
                                                                   &(pCameraInfo->pSensorCaps->activeArraySize))),
                                                                   sizeof(Region) / sizeof(INT32));
                    break;

                }

                case SensorInfoSensitivityRange:
                {
                    RangeINT32 sensitivityRange;

                    sensitivityRange.min = pCameraInfo->pSensorCaps->minISOSensitivity;
                    sensitivityRange.max = pCameraInfo->pSensorCaps->maxISOSensitivity;

                    result = pStaticMetadataSlot->SetMetadataByTag(SensorInfoSensitivityRange,
                                                                   static_cast<VOID*>(&sensitivityRange),
                                                                   sizeof(RangeINT32) / sizeof(INT32));
                    break;

                }

                case SensorInfoColorFilterArrangement:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(SensorInfoColorFilterArrangement,
                                                                   static_cast<VOID*>(
                                                                   const_cast<SensorInfoColorFilterArrangementValues*>(
                                                                   &(pCameraInfo->pSensorCaps->colorFilterArrangement))),
                                                                   1);
                    break;
                }

                case SensorInfoExposureTimeRange:
                {
                    if (InfoSupportedHardwareLevelFull == (pCameraInfo->pPlatformCaps->supportedHwLevel))
                    {
                        // For FULL capability devices (android.info.supportedHardwareLevel == FULL),
                        // the maximum of the range SHOULD be at least 1 second (1e9), MUST be at least 100ms.
                        CAMX_ASSERT(pCameraInfo->pSensorCaps->maxExposureTime >= 100000000);
                    }

                    RangeINT64 timeRange;

                    timeRange.min = pCameraInfo->pSensorCaps->minExposureTime;
                    timeRange.max = pCameraInfo->pSensorCaps->maxExposureTime;

                    result = pStaticMetadataSlot->SetMetadataByTag(SensorInfoExposureTimeRange,
                                                                   static_cast<VOID*>(&timeRange),
                                                                   sizeof(RangeINT64) / sizeof(INT64));
                    break;

                }

                case SensorInfoMaxFrameDuration:
                {
                    if (InfoSupportedHardwareLevelFull == (pCameraInfo->pPlatformCaps->supportedHwLevel))
                    {
                        // For FULL capability devices(android.info.supportedHardwareLevel == FULL),
                        // the maximum of the range SHOULD be at least 1 second(1e9), MUST be at least 100ms(100e6).
                        CAMX_ASSERT(pCameraInfo->pSensorCaps->maxFrameDuration >= 100000000);
                    }

                    // android.sensor.info.maxFrameDuration must be greater or equal to the
                    // android.sensor.info.exposureTimeRange max value (since exposure time overrides frame duration).
                    CAMX_ASSERT(pCameraInfo->pSensorCaps->maxFrameDuration >= pCameraInfo->pSensorCaps->maxExposureTime);

                    result = pStaticMetadataSlot->SetMetadataByTag(SensorInfoMaxFrameDuration,
                                                                   static_cast<VOID*>(const_cast<UINT64*>(
                                                                   &(pCameraInfo->pSensorCaps->maxFrameDuration))),
                                                                   1);
                    break;

                }

                case SensorInfoPhysicalSize:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(SensorInfoPhysicalSize,
                                                                   static_cast<VOID*>(const_cast<DimensionCapFloat*>(
                                                                   &(pCameraInfo->pSensorCaps->physicalSensorSize))),
                                                                   sizeof(DimensionCapFloat) / sizeof(FLOAT));
                    break;

                }

                case SensorInfoPixelArraySize:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(SensorInfoPixelArraySize,
                                                                   static_cast<VOID*>(const_cast<DimensionCap*>(
                                                                   &(pCameraInfo->pSensorCaps->pixelArraySize))),
                                                                   sizeof(DimensionCap) / sizeof(INT32));
                    break;

                }

                case SensorInfoWhiteLevel:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(SensorInfoWhiteLevel,
                                                                   static_cast<VOID*>(const_cast<INT32*>(
                                                                   &(pCameraInfo->pSensorCaps->whiteLevel))),
                                                                   1);
                    break;

                }

                case SensorInfoTimestampSource:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(SensorInfoTimestampSource,
                                                                   static_cast<VOID*>(
                                                                   const_cast<SensorInfoTimestampSourceValues*>(
                                                                   &(pCameraInfo->pPlatformCaps->timestampSource))),
                                                                   1);
                    break;

                }

                case SensorInfoLensShadingApplied:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(SensorInfoLensShadingApplied,
                                                                   static_cast<VOID*>(const_cast<BOOL*>(
                                                                   &(pCameraInfo->pSensorCaps->lensShadingAppliedInSensor))),
                                                                   1);
                    break;

                }

                case SensorInfoPreCorrectionActiveArraySize:
                {
                    CAMX_ASSERT(pCameraInfo->pSensorCaps->preCorrectionActiveArraySize.xMin >= 0);
                    CAMX_ASSERT(pCameraInfo->pSensorCaps->preCorrectionActiveArraySize.yMin >= 0);
                    CAMX_ASSERT(pCameraInfo->pSensorCaps->preCorrectionActiveArraySize.width <=
                                pCameraInfo->pSensorCaps->pixelArraySize.width);
                    CAMX_ASSERT(pCameraInfo->pSensorCaps->preCorrectionActiveArraySize.height <=
                                pCameraInfo->pSensorCaps->pixelArraySize.height);
                    Region preCorrectionArraySize = pCameraInfo->pSensorCaps->preCorrectionActiveArraySize;

                    if (TRUE == pCameraInfo->pSensorCaps->isQuadCFASensor)
                    {
                        preCorrectionArraySize.width  = pCameraInfo->pSensorCaps->QuadCFADim.width >> 1;
                        preCorrectionArraySize.height = pCameraInfo->pSensorCaps->QuadCFADim.height >> 1;
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(SensorInfoPreCorrectionActiveArraySize,
                                                                   static_cast<VOID*>(const_cast<Region*>(
                                                                   &(preCorrectionArraySize))),
                                                                   sizeof(Region) / sizeof(INT32));
                    break;

                }

                case SensorReferenceIlluminant1:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(SensorReferenceIlluminant1,
                                                                   static_cast<VOID*>(
                                                                   const_cast<SensorReferenceIlluminantValues*>(
                                                                   &(pCameraInfo->pSensorCaps->referenceIlluminant1))),
                                                                   1);
                    break;

                }

                case SensorReferenceIlluminant2:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(SensorReferenceIlluminant2,
                                                                   static_cast<VOID*>(
                                                                   const_cast<SensorReferenceIlluminantValues*>(
                                                                   &(pCameraInfo->pSensorCaps->referenceIlluminant2))),
                                                                   1);
                    break;

                }

                case SensorCalibrationTransform1:
                {
                    count = sizeof(pCameraInfo->pSensorCaps->calibrationTransform1) / sizeof(Rational);

                    result = pStaticMetadataSlot->SetMetadataByTag(SensorCalibrationTransform1,
                                                                   static_cast<VOID*>(const_cast<Rational*>(
                                                                   &(pCameraInfo->pSensorCaps->
                                                                   calibrationTransform1[0][0]))),
                                                                   count);
                    break;

                }

                case SensorCalibrationTransform2:
                {
                    count = sizeof(pCameraInfo->pSensorCaps->calibrationTransform2) / sizeof(Rational);

                    result = pStaticMetadataSlot->SetMetadataByTag(SensorCalibrationTransform2,
                                                                   static_cast<VOID*>(const_cast<Rational*>(
                                                                   &(pCameraInfo->pSensorCaps->
                                                                   calibrationTransform2[0][0]))),
                                                                   count);
                    break;

                }

                case SensorColorTransform1:
                {
                    count = sizeof(pCameraInfo->pSensorCaps->colorTransform1) / sizeof(Rational);

                    result = pStaticMetadataSlot->SetMetadataByTag(SensorColorTransform1,
                                                                   static_cast<VOID*>(const_cast<Rational*>(
                                                                   &(pCameraInfo->pSensorCaps->colorTransform1[0][0]))),
                                                                   count);
                    break;

                }

                case SensorColorTransform2:
                {
                    count = sizeof(pCameraInfo->pSensorCaps->colorTransform2) / sizeof(Rational);

                    result = pStaticMetadataSlot->SetMetadataByTag(SensorColorTransform2,
                                                                   static_cast<VOID*>(const_cast<Rational*>(
                                                                   &(pCameraInfo->pSensorCaps->colorTransform2[0][0]))),
                                                                   count);
                    break;

                }

                case SensorForwardMatrix1:
                {
                    count = sizeof(pCameraInfo->pSensorCaps->forwardMatrix1) / sizeof(Rational);

                    result = pStaticMetadataSlot->SetMetadataByTag(SensorForwardMatrix1,
                                                                   static_cast<VOID*>(const_cast<Rational*>(
                                                                   &(pCameraInfo->pSensorCaps->forwardMatrix1[0][0]))),
                                                                   count);
                    break;

                }

                case SensorForwardMatrix2:
                {
                    count = sizeof(pCameraInfo->pSensorCaps->forwardMatrix2) / sizeof(Rational);

                    result = pStaticMetadataSlot->SetMetadataByTag(SensorForwardMatrix2,
                                                                   static_cast<VOID*>(const_cast<Rational*>(
                                                                   &(pCameraInfo->pSensorCaps->forwardMatrix2[0][0]))),
                                                                   count);
                    break;

                }

                case SensorBaseGainFactor:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for SensorBaseGainFactor");
                    break;

                case SensorBlackLevelPattern:
                {
                    count = sizeof(pCameraInfo->pSensorCaps->blackLevelPattern) / sizeof(INT32);

                    result = pStaticMetadataSlot->SetMetadataByTag(SensorBlackLevelPattern,
                                                                   static_cast<VOID*>(const_cast<INT32*>(
                                                                   &(pCameraInfo->pSensorCaps->blackLevelPattern[0]))),
                                                                   count);
                    break;

                }

                case SensorMaxAnalogSensitivity:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(SensorMaxAnalogSensitivity,
                                                                   static_cast<VOID*>(const_cast<INT32*>(
                                                                   &(pCameraInfo->pSensorCaps->maxAnalogSensitivity))),
                                                                   1);
                    break;

                }

                case SensorOrientation:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(SensorOrientation,
                                                                   static_cast<VOID*>(&(pCameraInfo->imageOrientation)),
                                                                   1);
                    break;

                }

                case SensorProfileHueSaturationMapDimensions:
                {
                    count = sizeof(ProfileHueSaturationMapDimensions) / sizeof(INT32);

                    result = pStaticMetadataSlot->SetMetadataByTag(SensorProfileHueSaturationMapDimensions,
                                                                   static_cast<VOID*>(
                                                                   const_cast<ProfileHueSaturationMapDimensions*>(
                                                                   &(pCameraInfo->pSensorCaps->
                                                                   profileHueSaturationMapDimensions))),
                                                                   count);
                    break;

                }

                case SensorAvailableTestPatternModes:
                {
                    count = pCameraInfo->pSensorCaps->numTestPatterns;

                    result = pStaticMetadataSlot->SetMetadataByTag(SensorAvailableTestPatternModes,
                                                                   static_cast<VOID*>(const_cast<INT32*>(
                                                                   &(pCameraInfo->pSensorCaps->testPatterns[0]))),
                                                                   count);
                    break;

                }

                // Shading
                case ShadingAvailableModes:
                {
                    UINT8 availableShadingModes[HotPixelModeEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numShadingModes;

                    for (UINT i = 0; i < count; i++)
                    {
                        availableShadingModes[i] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->shadingModes[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(ShadingAvailableModes,
                                                                   static_cast<VOID*>(availableShadingModes),
                                                                   count);

                    break;
                }

                // Statistics
                case StatisticsInfoAvailableFaceDetectModes:
                {
                    UINT8 availableFaceDetectionModes[StatisticsFaceDetectModeEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numFaceDetectModes;

                    for (UINT i = 0; i < count; i++)
                    {
                        availableFaceDetectionModes[i] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->faceDetectModes[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(StatisticsInfoAvailableFaceDetectModes,
                                                                   static_cast<VOID*>(availableFaceDetectionModes),
                                                                   count);

                    break;
                }

                case StatisticsInfoHistogramBucketCount:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for StatisticsInfoHistogramBucketCount");
                    break;

                case StatisticsInfoMaxFaceCount:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(StatisticsInfoMaxFaceCount,
                                                                   static_cast<VOID*>(const_cast<INT32*>(
                                                                   &(pCameraInfo->pPlatformCaps->maxFaceCount))),
                                                                   1);

                    break;
                }

                case StatisticsInfoMaxHistogramCount:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for StatisticsInfoMaxHistogramCount");
                    break;

                case StatisticsInfoMaxSharpnessMapValue:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for StatisticsInfoMaxSharpnessMapValue");
                    break;

                case StatisticsInfoSharpnessMapSize:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for StatisticsInfoSharpnessMapSize");
                    break;

                case StatisticsInfoAvailableHotPixelMapModes:
                {
                    UINT8 availableHotPixelMapModes[StatisticsHotPixelMapModeEnd] =
                    {
                        StatisticsHotPixelMapModeOff,
                        StatisticsHotPixelMapModeOn
                    };

                    // HAL must support StatisticsHotPixelMapModeOff, and check if lens support hot pixel map
                    count = (TRUE == pCameraInfo->pSensorCaps->hotPixelMapAvailable) ? 2 : 1;
                    CAMX_ASSERT(count <= StatisticsHotPixelMapModeEnd);

                    result = pStaticMetadataSlot->SetMetadataByTag(StatisticsInfoAvailableHotPixelMapModes,
                                                                   static_cast<VOID*>(availableHotPixelMapModes),
                                                                   count);
                    break;
                }

                case StatisticsInfoAvailableLensShadingMapModes:
                {
                    UINT8 availableLensShadingMapModes[StatisticsLensShadingMapModeEnd] =
                    {
                        StatisticsLensShadingMapModeOff,
                        StatisticsLensShadingMapModeOn
                    };

                    // HAL must support StatisticsLensShadingMapModeOff, and check if lens support shading
                    count = (TRUE == pCameraInfo->pSensorCaps->lensShadingAppliedInSensor) ? 2 : 1;
                    CAMX_ASSERT(count <= StatisticsLensShadingMapModeEnd);

                    result = pStaticMetadataSlot->SetMetadataByTag(StatisticsInfoAvailableLensShadingMapModes,
                                                                   static_cast<VOID*>(availableLensShadingMapModes),
                                                                   count);
                    break;
                }

                // Tonemap
                case TonemapMaxCurvePoints:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(TonemapMaxCurvePoints,
                                                                   static_cast<VOID*>(const_cast<UINT*>(
                                                                   &(pCameraInfo->pPlatformCaps->maxTonemapCurvePoints))),
                                                                   1);

                    break;
                }

                case TonemapAvailableToneMapModes:
                {
                    UINT8 availableToneMapModes[TonemapModeEnd] = { 0 };
                    count = pCameraInfo->pPlatformCaps->numTonemapModes;

                    for (UINT i = 0; i < count; i++)
                    {
                        availableToneMapModes[i] = static_cast<UINT8>(pCameraInfo->pPlatformCaps->tonemapModes[i]);
                    }

                    result = pStaticMetadataSlot->SetMetadataByTag(TonemapAvailableToneMapModes,
                                                                   static_cast<VOID*>(availableToneMapModes),
                                                                   count);

                    break;
                }

                // LED
                case LedAvailableLeds:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for LedAvailableLeds");
                    break;

                // Info
                case InfoSupportedHardwareLevel:
                {
                    UINT8 pSupportedHwLevel = static_cast<UINT8>(pCameraInfo->pPlatformCaps->supportedHwLevel);

                    result = pStaticMetadataSlot->SetMetadataByTag(InfoSupportedHardwareLevel,
                                                                   static_cast<VOID*>(&pSupportedHwLevel),
                                                                   1);
                    break;

                }

                // Sync
                case SyncMaxLatency:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(SyncMaxLatency,
                                                                   static_cast<VOID*>
                                                                   (const_cast<SyncMaxLatencyValues*>
                                                                   (&(pCameraInfo->pPlatformCaps->syncMaxLatency))),
                                                                   1);
                    break;

                }

                // Reprocess
                case ReprocessMaxCaptureStall:
                {
                    result = pStaticMetadataSlot->SetMetadataByTag(ReprocessMaxCaptureStall,
                                                                   static_cast<VOID*>(const_cast<INT32*>(
                                                                   &(pCameraInfo->pPlatformCaps->maxCaptureStall))),
                                                                   1);
                    break;
                }

                // Depth
                case DepthMaxDepthSamples:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for DepthMaxDepthSamples");
                    break;

                case DepthAvailableDepthStreamConfigurations:
                {
                    if (pCameraInfo->pSensorCaps->isDepthSensor)
                    {
                        count = pCameraInfo->pHwEnvironmentCaps->numStreamConfigs *
                            (sizeof(ScalerStreamConfig) / sizeof(INT32));

                        result = pStaticMetadataSlot->SetMetadataByTag(DepthAvailableDepthStreamConfigurations,
                                                                       static_cast<VOID*>(const_cast<
                                                                       ScalerStreamConfig*>(pCameraInfo->
                                                                       pHwEnvironmentCaps->streamConfigs)),
                                                                       count);
                    }
                    break;
                }

                case DepthAvailableDepthMinFrameDurations:
                {
                    if (pCameraInfo->pSensorCaps->isDepthSensor)
                    {
                        count = pCameraInfo->pHwEnvironmentCaps->numMinFrameDurations *
                            (sizeof(ScalerFrameDurationINT64) / sizeof(INT64));

                        result = pStaticMetadataSlot->SetMetadataByTag(DepthAvailableDepthMinFrameDurations,
                                                                       static_cast<VOID*>(const_cast<
                                                                       ScalerFrameDurationINT64*>(pCameraInfo->
                                                                       pHwEnvironmentCaps->minFrameDurations)),
                                                                       count);
                    }
                    break;
                }

                case DepthAvailableDepthStallDurations:
                {
                    if (pCameraInfo->pSensorCaps->isDepthSensor)
                    {
                        count = pCameraInfo->pHwEnvironmentCaps->numStallDurations *
                            (sizeof(ScalerStallDurationINT64) / sizeof(INT64));

                        result = pStaticMetadataSlot->SetMetadataByTag(DepthAvailableDepthStallDurations,
                                                                       static_cast<VOID*>(const_cast<
                                                                       ScalerStallDurationINT64*>(pCameraInfo->
                                                                       pHwEnvironmentCaps->minStallDurations)),
                                                                       count);
                    }

                    break;
                }

                case DepthDepthIsExclusive:
                    CAMX_LOG_ERROR(CamxLogGroupHAL, "No static metadata value for DepthDepthIsExclusive");
                    break;

                default:
                {
                    CAMX_ASSERT_ALWAYS_MESSAGE("Invalid static metadata key: %d",
                                                pCameraInfo->pPlatformCaps->characteristicsKeys[key]);
                    break;
                }
            }

            if (CamxResultSuccess != result)
            {
                CAMX_LOG_ERROR(CamxLogGroupHAL, "Initialize StaticMetadata key: %d failed with error %s",
                                pCameraInfo->pPlatformCaps->characteristicsKeys[key],
                                CamxResultStrings[result]);
                break;
            }
        }

        UINT32      exposureMeteringVendorTag;
        UINT32      saturationVendorTag;
        UINT32      isoVendorTag;
        UINT32      EEPROMInfoTag;
        CamxResult  resultTag                   = CamxResultSuccess;
        UINT        count                       = 0;
        UINT32      sharpnessVendorTag;
        UINT32      histogramBucketsVendorTag;
        UINT32      histogramCountVendorTag;
        UINT32      instantAecVendorTag;

        // Saturation Range
        INT32 availableSaturationRange[4] = { 0 };
        availableSaturationRange[0]       = pCameraInfo->pPlatformCaps->saturationRange.minValue;
        availableSaturationRange[1]       = pCameraInfo->pPlatformCaps->saturationRange.maxValue;
        availableSaturationRange[2]       = pCameraInfo->pPlatformCaps->saturationRange.defaultValue;
        availableSaturationRange[3]       = pCameraInfo->pPlatformCaps->saturationRange.step;
        /// @note This result is needed only for setting metadata
        resultTag = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.saturation", "range",
                                                             &saturationVendorTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag location for saturation");
        if (CamxResultSuccess == resultTag)
        {
            /// @note This can be ignored becaues all standard tag set properly
            resultTag = pStaticMetadataSlot->SetMetadataByTag(saturationVendorTag,
                                                              static_cast<VOID*>(availableSaturationRange),
                                                              4);
            /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
            CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to write vendor tag saturation to static metadata.");
        }

        // Exposure Metering Modes
        INT32   availableExposureMetering[ExposureMeteringEnd]  = { 0 };
        count = pCameraInfo->pPlatformCaps->numExposureMeteringModes;
        CAMX_ASSERT(count < ExposureMeteringEnd);
        for (UINT i = 0; i < count; i++)
        {
            availableExposureMetering[i] = static_cast<INT32>(pCameraInfo->pPlatformCaps->exposureMeteringModes[i]);
        }
        /// @note This result is needed only for setting metadata
        resultTag = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.exposure_metering", "available_modes",
                                                             &exposureMeteringVendorTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag location for exposure_metering");
        if (CamxResultSuccess == resultTag)
        {
            /// @note This can be ignored becaues all standard tag set properly
            resultTag = pStaticMetadataSlot->SetMetadataByTag(exposureMeteringVendorTag,
                                                              static_cast<VOID*>(availableExposureMetering),
                                                              count);
            /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
            CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag,
                                "Failed to write vendor tag Exposure Metering to static metadata.");
        }

        // ISO Modes
        INT32   availableISOModes[ISOModeEnd] = { 0 };
        count = pCameraInfo->pPlatformCaps->numISOAvailableModes;
        for (UINT i = 0; i < count; i++)
        {
            availableISOModes[i] = static_cast<INT32>(pCameraInfo->pPlatformCaps->ISOAvailableModes[i]);
        }
        /// @note This result is needed only for setting metadata
        resultTag = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.iso_exp_priority",
                                                             "iso_available_modes",
                                                             &isoVendorTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag location for iso_available_modes");
        if (CamxResultSuccess == resultTag)
        {
            /// @note This can be ignored becaues all standard tag set properly
            resultTag = pStaticMetadataSlot->SetMetadataByTag(isoVendorTag,
                                                              static_cast<VOID*>(availableISOModes),
                                                              count);
            /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
            CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to write vendor tag ISO to static metadata.");
        }
        // Exposure Time
        INT64   availExposureTimeRange[2] = { 0 };
        UINT32  exposureTimeVendorTag;
        availExposureTimeRange[0] = pCameraInfo->pSensorCaps->minExposureTime;
        availExposureTimeRange[1] = pCameraInfo->pSensorCaps->maxExposureTime;

        /// @note This result is needed only for setting metadata
        resultTag = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.iso_exp_priority",
                                                             "exposure_time_range",
                                                             &exposureTimeVendorTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag location for exposure_time_range");
        if (CamxResultSuccess == resultTag)
        {
            /// @note This can be ignored becaues all standard tag set properly
            resultTag = pStaticMetadataSlot->SetMetadataByTag(exposureTimeVendorTag,
                                                              static_cast<VOID*>(availExposureTimeRange),
                                                              2);
            /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
            CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag,
                                "Failed to write vendor tag Exposure Time to static metadata.");
        }

        EEPROMInformation   EEPROMInfo = { { 0 } };

        resultTag = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.sensor_meta_data",
                                                             "EEPROMInformation",
                                                             &EEPROMInfoTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag for EEPROMInfo");

        Utils::Memcpy(&EEPROMInfo, &pCameraInfo->pSensorCaps->OTPData.EEPROMInfo, sizeof(EEPROMInformation));

        resultTag = pStaticMetadataSlot->SetMetadataByTag(EEPROMInfoTag,
                                                          static_cast<VOID*>(&EEPROMInfo), sizeof(EEPROMInformation));
        /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to write vendor tag EEPROMInfo to static metadata.");

        // Sharpness Range
        INT32 availableSharpnessRange[2] = { 0 };
        availableSharpnessRange[0]       = pCameraInfo->pPlatformCaps->sharpnessRange.minValue;
        availableSharpnessRange[1]       = pCameraInfo->pPlatformCaps->sharpnessRange.maxValue;
        /// @note This result is needed only for setting metadata
        resultTag = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.sharpness", "range",
                                                             &sharpnessVendorTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag location for Sharpness");
        if (CamxResultSuccess == resultTag)
        {
            /// @note This can be ignored becaues all standard tag set properly
            resultTag = pStaticMetadataSlot->SetMetadataByTag(sharpnessVendorTag,
                                                              static_cast<VOID*>(availableSharpnessRange),
                                                              2);
            /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
            CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to write vendor tag Sharpness to static metadata.");
        }
        // Histogram Buckets
        INT32 histBuckets = pCameraInfo->pPlatformCaps->histogramBuckets;
        /// @note This result is needed only for setting metadata
        resultTag = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.histogram", "buckets",
                                                             &histogramBucketsVendorTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag location for Histogram Buckets");
        if (CamxResultSuccess == resultTag)
        {
            /// @note This can be ignored becaues all standard tag set properly
            resultTag = pStaticMetadataSlot->SetMetadataByTag(histogramBucketsVendorTag,
                                                              static_cast<VOID*>(&histBuckets),
                                                              1);
            /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
            CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag,
                                "Failed to write vendor tag Histogram Buckets to static metadata.");
        }
        // Histogram Counts
        INT32 histCount = pCameraInfo->pPlatformCaps->histogramCount;
        /// @note This result is needed only for setting metadata
        resultTag = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.histogram", "max_count",
                                                             &histogramCountVendorTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag location for Histogram Counts");
        if (CamxResultSuccess == resultTag)
        {
            /// @note This can be ignored becaues all standard tag set properly
            resultTag = pStaticMetadataSlot->SetMetadataByTag(histogramCountVendorTag,
                                                              static_cast<VOID*>(&histCount),
                                                              1);
            /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
            CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag,
                                "Failed to write vendor tag Histogram Counts to static metadata.");
        }

        // Instant AEC Modes
        INT32   availableInstantAec[InstantAecEnd]  = { 0 };
        count = pCameraInfo->pPlatformCaps->numInstantAecModes;
        CAMX_ASSERT(InstantAecEnd > count);
        for (UINT i = 0; i < count; i++)
        {
            availableInstantAec[i] = static_cast<INT32>(pCameraInfo->pPlatformCaps->instantAecAvailableModes[i]);
        }
        /// @note This result is needed only for setting metadata
        resultTag = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.instant_aec",
                                                             "instant_aec_available_modes",
                                                             &instantAecVendorTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag location for Instant AEC");
        if (CamxResultSuccess == resultTag)
        {
            /// @note This can be ignored becaues all standard tag set properly
            resultTag = pStaticMetadataSlot->SetMetadataByTag(instantAecVendorTag,
                                                              static_cast<VOID*>(availableInstantAec),
                                                              count);
            /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
            CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to write vendor tag Instant AEC to static metadata.");
        }

        // Publish mount angle in the vendor tags
        UINT32 mountAngleTag;
        UINT32 mountAngle;
        resultTag = VendorTagManager::QueryVendorTagLocation(
            "org.codeaurora.qcamera3.sensor_meta_data", "mountAngle", &mountAngleTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag for mountAngle");
        mountAngle = pCameraInfo->mountAngle;
        resultTag = pStaticMetadataSlot->SetMetadataByTag(
            mountAngleTag, static_cast<VOID*>(&mountAngle), 1);
        /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to write vendor tag mountAngle to static metadata.");

        // Publish camera position in the vendor tags
        UINT32 cameraPositionTag;
        UINT   cameraPosition;
        resultTag = VendorTagManager::QueryVendorTagLocation(
            "org.codeaurora.qcamera3.sensor_meta_data", "cameraPosition", &cameraPositionTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag for cameraPosition");
        cameraPosition = pCameraInfo->pSensorCaps->position;
        resultTag = pStaticMetadataSlot->SetMetadataByTag(
            cameraPositionTag, static_cast<VOID*>(&cameraPosition), 1);
        /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to write vendor tag cameraPosition to static metadata.");

        if (pCameraInfo->pSensorCaps->isQuadCFASensor)
        {
            CAMX_LOG_INFO(CamxLogGroupHAL, "Add Quad CFA flag and Quad CFA dim (%dx%d) to vendor tag.",
                pCameraInfo->pSensorCaps->QuadCFADim.width,
                pCameraInfo->pSensorCaps->QuadCFADim.height);

            UINT32 QuadCFASensorVendorTag;
            /// @note This result is needed only for setting metadata
            resultTag = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.quadra_cfa",
                                                                 "is_qcfa_sensor",
                                                                 &QuadCFASensorVendorTag);
            CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag location for is_qcfa_sensor");
            if (CamxResultSuccess == resultTag)
            {
                UINT8  isQuadCFASensor = TRUE;
                /// @note This can be ignored becaues all standard tag set properly
                resultTag = pStaticMetadataSlot->SetMetadataByTag(QuadCFASensorVendorTag,
                                                                  static_cast<VOID*>(&isQuadCFASensor),
                                                                  1);
                /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
                CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag,
                                    "Failed to write vendor tag is_qcfa_sensor to static metadata.");
            }

            UINT32 QuadCFADimVendorTag;
            /// @note This result is needed only for setting metadata
            resultTag = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.quadra_cfa",
                                                                 "qcfa_dimension",
                                                                 &QuadCFADimVendorTag);
            CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag location for qcfa_dimension");
            if (CamxResultSuccess == resultTag)
            {
                INT32 dim[2] = { 0 };
                dim[0]       = pCameraInfo->pSensorCaps->QuadCFADim.width;
                dim[1]       = pCameraInfo->pSensorCaps->QuadCFADim.height;

                /// @note This can be ignored becaues all standard tag set properly
                resultTag = pStaticMetadataSlot->SetMetadataByTag(QuadCFADimVendorTag,
                                                                  static_cast<VOID*>(dim),
                                                                  2);
                /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
                CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag,
                                    "Failed to write vendor tag qcfa_dimension to static metadata.");
            }
        }
        if (pCameraInfo->pSensorCaps->isZZHDRSupported)
        {
            UINT32 videoHdrVendorTag;
            /// @note This result is needed only for setting metadata
            resultTag = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.available_video_hdr_modes",
                                                                 "video_hdr_modes",
                                                                 &videoHdrVendorTag);
            if (CamxResultSuccess == resultTag)
            {
                INT32 modes[3] = {0};
                modes[0] = VideoHdrOff;
                modes[1] = VideoHdrOn;
                modes[2] = VideoHdrEnd;
                resultTag = pStaticMetadataSlot->SetMetadataByTag(videoHdrVendorTag,
                                                                  static_cast<VOID*>(modes),
                                                                  2);
                CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag,
                    "Failed to write vendor tag video_hdr_modes to static metadata.");
            }
        }
        // Color Temperature Range
        UINT32 colorTemperatureVendorTag;
        INT32 availableColorTemperatureRange[2] = { 0 };
        availableColorTemperatureRange[0] = pCameraInfo->pPlatformCaps->colorTemperatureRange.minValue;
        availableColorTemperatureRange[1] = pCameraInfo->pPlatformCaps->colorTemperatureRange.maxValue;

        /// @note This result is needed only for setting metadata
        resultTag = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.manualWB",
                                                             "color_temperature_range",
                                                             &colorTemperatureVendorTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag location for Color Temperature");
        if (CamxResultSuccess == resultTag)
        {
            /// @note This can be ignored becaues all standard tag set properly
            resultTag = pStaticMetadataSlot->SetMetadataByTag(colorTemperatureVendorTag,
                                                              static_cast<VOID*>(availableColorTemperatureRange),
                                                              2);
            /// @note This failure means the app has failed to call get_vendor_tag_ops (HAL or CHI)
            CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag,
                                "Failed to write vendor tag Color Temperature to static metadata.");
        }

        // Publish Sensor Modes in Vendor Tags
        UINT32 sensorModeTag;
        // Table Contains sensor modes (width, height, fps)
        // first 2 elements of table are Number of sensor modes and size of each entries
        // corresponding to sensor mode each mode can be accessed by i * SensorModeTableEntrySize + 2
        INT32 availableCustomVideoFpsVals[MaxResolutions * SensorModeTableEntrySize + 2];

        ImageSensorModuleData*       pImageSensorModuleData =
            // NOWHINE CP036a: Since the function is const, had to add the const_cast
            const_cast<ImageSensorModuleData*>
            (m_pHwEnvironment->GetImageSensorModuleData(cameraId));
        ImageSensorData*             pImageSensorData       =
            pImageSensorModuleData->GetSensorDataObject();
        const ResolutionInformation* pResolutionInfo        = pImageSensorData->GetResolutionInfo();
        UINT32 numSensorModes = pResolutionInfo->resolutionDataCount;
        availableCustomVideoFpsVals[0] = numSensorModes;
        availableCustomVideoFpsVals[1] = SensorModeTableEntrySize;
        ChiSensorModeInfo* pSensorModeInfo;
        pSensorModeInfo = static_cast<CHISENSORMODEINFO*>
            (CAMX_CALLOC(sizeof(CHISENSORMODEINFO) * numSensorModes));  // Release the buffer
        EnumerateSensorModes(cameraId, numSensorModes, pSensorModeInfo);
        INT j = 2;
        for (UINT i = 0; i < numSensorModes ; i++, j++)
        {
            availableCustomVideoFpsVals[j++] =
                static_cast<INT32>(pSensorModeInfo[i].frameDimension.width);
            availableCustomVideoFpsVals[j++] =
                static_cast<INT32>(pSensorModeInfo[i].frameDimension.height);
            availableCustomVideoFpsVals[j] = static_cast<INT32>(pSensorModeInfo[i].frameRate);
        }
        resultTag = VendorTagManager::QueryVendorTagLocation(
            "org.quic.camera2.sensormode.info", "SensorModeTable", &sensorModeTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get vendor tag");
        if (CamxResultSuccess == resultTag)
        {
            /// @note This can be ignored becaues all standard tag set properly
            resultTag = pStaticMetadataSlot->SetMetadataByTag(sensorModeTag,
                static_cast<VOID*>(availableCustomVideoFpsVals),
                (numSensorModes * SensorModeTableEntrySize + 2));
            CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag,
                    "Failed to write vendor tag Color Temperature to static metadata.");
        }
        CAMX_FREE(pSensorModeInfo);

        // Publish Supported 60, 90 Fps resolution in Vendor Tags
        UINT32 customHFRFpsTag;
        HFRCustomParams customHFRfps[MaxCustomHFRSizes];
        count = pCameraInfo->pHwEnvironmentCaps->numCustomHFRParams *
            (sizeof(HFRCustomParams) / sizeof(INT32));
        for (UINT i = 0; i < pCameraInfo->pHwEnvironmentCaps->numCustomHFRParams; i++)
        {
            customHFRfps[i] = pCameraInfo->pHwEnvironmentCaps->customHFRParams[i];
        }
        // publish table has each entry <width, height, fps>
        resultTag = VendorTagManager::QueryVendorTagLocation(
            "org.quic.camera2.customhfrfps.info", "CustomHFRFpsTable", &customHFRFpsTag);
        CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag, "Failed to get Custom HFR vendor tag");
        if (CamxResultSuccess == resultTag)
        {
            /// @note This can be ignored becaues all standard tag set properly
            resultTag = pStaticMetadataSlot->SetMetadataByTag(customHFRFpsTag,
                static_cast<VOID*>(customHFRfps),
                count);
            CAMX_ASSERT_MESSAGE(CamxResultSuccess == resultTag,
                "Failed to write vendor tag Custom HFR to static metadata.");
        }
    }
    else
    {
        CAMX_ASSERT_ALWAYS_MESSAGE("Failed to initialize static metadata pool!");
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// ChiContext::GetStaticMetadataPool
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
MetadataPool* ChiContext::GetStaticMetadataPool(
    UINT32 cameraId)
{
    CAMX_ASSERT(cameraId < MaxNumCameras);

    CamxResult result = CamxResultSuccess;

    if (NULL == m_perCameraInfo[cameraId].pStaticMetadataPool)
    {
        // Static MetadataPool not belong to Pipeline, so give pipeline index as -1, and not parallelize the creation
        m_perCameraInfo[cameraId].pStaticMetadataPool = MetadataPool::Create(PoolType::Static, UINT32_MAX, NULL, 1);

        CAMX_ASSERT(NULL != m_perCameraInfo[cameraId].pStaticMetadataPool);

        if (NULL != m_perCameraInfo[cameraId].pStaticMetadataPool)
        {
            result = InitializeStaticMetadataPool(cameraId);
        }
    }

    if (CamxResultSuccess != result)
    {
        CAMX_LOG_ERROR(CamxLogGroupHAL, "Unable to Initialize Static Metadata Pool!");
    }

    return m_perCameraInfo[cameraId].pStaticMetadataPool;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// ChiContext::GetStaticMetadata
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
const Metadata* ChiContext::GetStaticMetadata(
    UINT32 cameraId)
{
    const Metadata* pStaticMetadata     = NULL;
    MetadataPool*   pStaticMetadataPool = GetStaticMetadataPool(cameraId);

    if (NULL != pStaticMetadataPool)
    {
        pStaticMetadata = pStaticMetadataPool->GetSlot(0)->GetMetadata();
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupHAL, "Unable to get Static Metadata Pool!");
    }

    return pStaticMetadata;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::InitializeStaticMetadata
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::InitializeStaticMetadata(
    UINT32         cameraId,
    ChiCameraInfo* pChiCameraInfo)
{
    CamxResult   result     = CamxResultSuccess;
    HwCameraInfo cameraInfo = {};

    if ((NULL != pChiCameraInfo) && (cameraId < GetNumCameras()))
    {
        result = m_pHwEnvironment->GetCameraInfo(cameraId, &cameraInfo);
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupHAL,
                       "Invalid arguments cameraId = %d, m_numImageSensors = %d, pCameraInfo = %p",
                       cameraId,
                       GetNumCameras(),
                       pChiCameraInfo);

        result = CamxResultEInvalidArg;
    }

    if (CamxResultSuccess == result)
    {
        CameraInfo* pCameraInfo = static_cast<CameraInfo*>(pChiCameraInfo->pLegacy);

        pCameraInfo->imageSensorFacing        = cameraInfo.imageSensorFacing;
        pCameraInfo->imageOrientation         = cameraInfo.imageOrientation;
        pCameraInfo->deviceVersion            = CAMERA_DEVICE_API_VERSION_3_3;    /// Only support CHI
        /// @todo (CAMX-79) - Build static metadata from ImageSensorData and assign to pCameraInfo->pStaticCameraInfo.
        ///                   Satic metadata must stay valid and not change for the life of the CHIModule object.
        pCameraInfo->pStaticCameraInfo        = NULL;                             /// Use static metadata from above
        /// @todo (CAMX-541) Populate resource costing and conflicting devices.
        pCameraInfo->resourceCost             = 100;
        pCameraInfo->ppConflictingDevices     = NULL;
        pCameraInfo->conflictingDevicesLength = 0;

        // Finally update the static metadata info pointer to report it to frameworks.
        const Metadata* pMetadata = GetStaticMetadata(cameraId);

        pCameraInfo->pStaticCameraInfo = pMetadata;

        pChiCameraInfo->sensorCaps.positionType = static_cast<CHISENSORPOSITIONTYPE>(
            cameraInfo.pSensorCaps->position + 1);

        // This setting enable multi camera support.
        // two physical and one logical camera will be visiable.
        if (GetStaticSettings()->multiCameraEnable)
        {
            CAMX_LOG_INFO(CamxLogGroupHAL, "Enabled multi camera %d Logical with front %d",
                             GetStaticSettings()->multiCameraEnable,
                             GetStaticSettings()->multiCameraEnableFront);
            if ((TRUE == GetStaticSettings()->multiCameraEnableFront) &&
                (FRONT == pChiCameraInfo->sensorCaps.positionType))
            {
                // Add +1 to make position type as REAR AUX for forming a logical camera with front
                pChiCameraInfo->sensorCaps.positionType = REAR_AUX;
            }
        }
        else
        {
            if (FRONT_AUX == pChiCameraInfo->sensorCaps.positionType)
            {
                pChiCameraInfo->sensorCaps.positionType = FRONT;
            }
            else if (REAR_AUX == pChiCameraInfo->sensorCaps.positionType)
            {
                pChiCameraInfo->sensorCaps.positionType = REAR;
            }
        }

        CAMX_LOG_VERBOSE(CamxLogGroupHAL, "CameraId:%d has position:%d",
            cameraId,
            pChiCameraInfo->sensorCaps.positionType);
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::GetCameraInfo
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::GetCameraInfo(
    UINT32         cameraId,
    ChiCameraInfo* pCameraInfo)
{
    CamxResult result = CamxResultSuccess;

    if ((NULL != pCameraInfo) && (cameraId < GetNumCameras()))
    {
        result = InitializeStaticMetadata(cameraId, pCameraInfo);

        ImageSensorModuleData*       pImageSensorModuleData =
            // NOWHINE CP036a: Since the function is const, had to add the const_cast
            const_cast<ImageSensorModuleData*>(m_pHwEnvironment->GetImageSensorModuleData(cameraId));
        ImageSensorData*             pImageSensorData       = pImageSensorModuleData->GetSensorDataObject();
        const ResolutionInformation* pResolutionInfo        = pImageSensorData->GetResolutionInfo();

        pCameraInfo->numSensorModes = pResolutionInfo->resolutionDataCount;

        SensorModuleStaticCaps staticCaps = { 0 };
        pImageSensorData->GetSensorStaticCapability(&staticCaps, cameraId);

        pCameraInfo->sensorCaps.pixelSize          = staticCaps.pixelSize;
        pCameraInfo->sensorCaps.activeArray.left   = staticCaps.activeArraySize.xMin;
        pCameraInfo->sensorCaps.activeArray.top    = staticCaps.activeArraySize.yMin;
        pCameraInfo->sensorCaps.activeArray.width  = staticCaps.activeArraySize.width;
        pCameraInfo->sensorCaps.activeArray.height = staticCaps.activeArraySize.height;
        pCameraInfo->sensorCaps.pRawOTPData        = staticCaps.OTPData.EEPROMInfo.rawOTPData.pRawData;
        pCameraInfo->sensorCaps.rawOTPDataSize     = staticCaps.OTPData.EEPROMInfo.rawOTPData.rawDataSize;
        pCameraInfo->sensorCaps.size               = sizeof(CHISENSORCAPS);
        if (NULL != pImageSensorModuleData->GetLensInfo())
        {
            pCameraInfo->lensCaps.focalLength  = static_cast<FLOAT>(pImageSensorModuleData->GetLensInfo()->focalLength);
            pCameraInfo->lensCaps.horViewAngle = static_cast<FLOAT>(pImageSensorModuleData->GetLensInfo()->horizontalViewAngle);
        }
        pCameraInfo->lensCaps.size          = sizeof(CHILENSCAPS);
        pCameraInfo->lensCaps.isFixedFocus  = (pImageSensorModuleData->GetActuatorDataObject() == NULL) ? TRUE : FALSE;

        pCameraInfo->size = sizeof(CHICAMERAINFO);
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupHAL,
                       "Invalid arguments cameraId = %d, m_numImageSensors = %d, pCameraInfo = %p",
                       cameraId,
                       GetNumCameras(),
                       pCameraInfo);

        result = CamxResultEInvalidArg;
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::SetPipelineDescriptorOutput
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID ChiContext::SetPipelineDescriptorOutput(
    PipelineDescriptor*      pPipelineDescriptor,
    UINT                     numOutputs,
    ChiPortBufferDescriptor* pOutputBufferDescriptor)
{
    pPipelineDescriptor->numOutputs = numOutputs;

    for (UINT i = 0; i < numOutputs; i++)
    {
        Utils::Memcpy(&pPipelineDescriptor->outputData[i].nodePort,
                      &pOutputBufferDescriptor[i].nodePort,
                      sizeof(ChiLinkNodeDescriptor));

        pPipelineDescriptor->outputData[i].pOutputStreamWrapper =
            reinterpret_cast<ChiStreamWrapper*>(pOutputBufferDescriptor[i].pStream->pPrivateInfo);
    }
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::SetPipelineDescriptorInputOptions
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID ChiContext::SetPipelineDescriptorInputOptions(
    PipelineDescriptor*      pPipelineDescriptor,
    UINT                     numInputs,
    ChiPipelineInputOptions* pChiPipelineInputOptions)
{
    pPipelineDescriptor->numInputs = numInputs;

    for (UINT i = 0; i < numInputs; i++)
    {
        Utils::Memcpy(&(pPipelineDescriptor->inputData[i].nodePort),
                      &(pChiPipelineInputOptions[i].nodePort),
                      sizeof(ChiLinkNodeDescriptor));

        Utils::Memcpy(&(pPipelineDescriptor->inputData[i].bufferOptions),
                      &(pChiPipelineInputOptions[i].bufferOptions),
                      sizeof(ChiBufferOptions));
    }
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::SetPipelineDescriptorInputStream
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID ChiContext::SetPipelineDescriptorInputStream(
    PipelineDescriptor*            pPipelineDescriptor,
    const ChiPortBufferDescriptor* pBufferDescriptor,
    BOOL                           isWrapperOwner)
{
    for (UINT input = 0; input < pPipelineDescriptor->numInputs; input++)
    {
        PipelineInputData* pPipelineInputData = &pPipelineDescriptor->inputData[input];

        /// @todo (CAMX-1015) Add a function IsSameNodePort since its used in other places as well
        if ((pPipelineInputData->nodePort.nodeId         == pBufferDescriptor->nodePort.nodeId)         &&
            (pPipelineInputData->nodePort.nodeInstanceId == pBufferDescriptor->nodePort.nodeInstanceId) &&
            (pPipelineInputData->nodePort.nodePortId     == pBufferDescriptor->nodePort.nodePortId))
        {
            pPipelineInputData->pInputStreamWrapper =
                reinterpret_cast<ChiStreamWrapper*>(pBufferDescriptor->pStream->pPrivateInfo);
            /// @todo (CAMX-1512) Session can contain all the created Wrappers that it can clean up when it is destroyed
            pPipelineInputData->isWrapperOwner      = isWrapperOwner;
            pPipelineDescriptor->inputData[input].isWrapperOwner = isWrapperOwner;
            break;
        }
    }
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::CreatePipelineDescriptor
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
PipelineDescriptor* ChiContext::CreatePipelineDescriptor(
    const ChiPipelineCreateDescriptor* pPipelineCreateDescriptor,
    UINT32                             numOutputs,
    ChiPortBufferDescriptor*           pOutputBufferDescriptor,
    UINT32                             numInputs,
    CHIPIPELINEINPUTOPTIONS*           pPipelineInputOptions)
{
    CamxResult               result                    = CamxResultSuccess;
    PipelineCreateInputData  pipelineCreateInputData   = { 0 };
    PipelineCreateOutputData pipelineCreateOutputData  = { 0 };
    PipelineDescriptor*      pPipelineDescriptor       = NULL;

    pPipelineDescriptor = static_cast<PipelineDescriptor*>(CAMX_CALLOC(sizeof(PipelineDescriptor)));

    if (NULL != pPipelineDescriptor)
    {
        pPipelineDescriptor->flags.isRealTime = pPipelineCreateDescriptor->isRealTime;

        UINT numBatchedFrames   = pPipelineCreateDescriptor->numBatchedFrames;
        UINT maxFPSValue        = pPipelineCreateDescriptor->maxFPSValue;

        CAMX_LOG_INFO(CamxLogGroupHAL, "numBatchedFrames:%d maxFPSValue:%d", numBatchedFrames, maxFPSValue);

        pPipelineDescriptor->numBatchedFrames = numBatchedFrames;
        pPipelineDescriptor->maxFPSValue      = maxFPSValue;
        pPipelineDescriptor->cameraId         = pPipelineCreateDescriptor->cameraId;

        for (UINT streamId = 0; streamId < numOutputs; streamId++)
        {
            /// @todo (CAMX-1797) Need to fix the reinterpret_cast
            ChiStream*          pChiStream          = pOutputBufferDescriptor[streamId].pStream;
            Camera3Stream*      pHAL3Stream         = reinterpret_cast<Camera3Stream*>(pChiStream);
            ChiStreamWrapper*   pChiStreamWrapper   = NULL;
            Format              selectedFormat      = SelectFormat(pChiStream,
                                                        pOutputBufferDescriptor[streamId].bIsOverrideImplDefinedWithRaw);

            pChiStreamWrapper = CAMX_NEW ChiStreamWrapper(pHAL3Stream, streamId, selectedFormat);

            if (0 != (pChiStream->grallocUsage & GrallocUsageProtected))
            {
                pPipelineDescriptor->flags.isSecureMode = TRUE;
            }

            if (numBatchedFrames > 1)
            {
                pPipelineDescriptor->flags.isHFRMode = TRUE;
            }

            CAMX_ASSERT(NULL != pChiStreamWrapper);

            if (NULL != pChiStreamWrapper)
            {
                SetChiStreamInfo(pChiStreamWrapper, numBatchedFrames);
                pChiStream->pPrivateInfo = pChiStreamWrapper;
            }
            else
            {
                result = CamxResultENoMemory;
                break;
            }
        }
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupChi, "Out of memory");
        result = CamxResultENoMemory;
    }

    if (result == CamxResultSuccess)
    {
        // Unfortunately, we don't know the lifetime of the objects being pointed to, so we have to assume they will not
        // exist after this function call, and certainly not by the call to CamX::Session::Initialize, so we might as well
        // perform the conversion here, and keep the data in the format we expect
        ProcessPipelineCreateDesc(pPipelineCreateDescriptor,
                                  numOutputs,
                                  pOutputBufferDescriptor,
                                  pPipelineDescriptor);

        SetPipelineDescriptorOutput(pPipelineDescriptor, numOutputs, pOutputBufferDescriptor);

        pipelineCreateInputData.pPipelineDescriptor    = pPipelineDescriptor;
        pipelineCreateInputData.pChiContext            = this;
        pipelineCreateOutputData.pPipelineInputOptions = pPipelineInputOptions;

        result = Pipeline::Create(&pipelineCreateInputData, &pipelineCreateOutputData);

        if (CamxResultSuccess == result)
        {
            CAMX_ASSERT(NULL != pipelineCreateOutputData.pPipeline);

            pipelineCreateOutputData.pPipeline->Destroy();
            pipelineCreateOutputData.pPipeline = NULL;
        }
    }

    if (CamxResultSuccess == result)
    {
        if ((FALSE == pPipelineCreateDescriptor->isRealTime) && (numInputs < pipelineCreateOutputData.numInputs))
        {
            CAMX_LOG_ERROR(CamxLogGroupHAL, "Number inputs %d are not matching per pipeline descriptor", numInputs);
        }
        SetPipelineDescriptorInputOptions(pPipelineDescriptor, pipelineCreateOutputData.numInputs, pPipelineInputOptions);

        LDLLNode* pNode = static_cast<LDLLNode*>(CAMX_CALLOC(sizeof(LDLLNode)));

        if (NULL != pNode)
        {
            pNode->pData = pPipelineDescriptor;
            m_pipelineTracking.InsertToTail(pNode);
        }
    }
    else
    {
        DestroyPipelineDescriptor(pPipelineDescriptor);
        pPipelineDescriptor = NULL;
        CAMX_LOG_ERROR(CamxLogGroupChi, "Pipeline descriptor creation failed");
    }

    return pPipelineDescriptor;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::DestroyPipelineDescriptor
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID ChiContext::DestroyPipelineDescriptor(
    PipelineDescriptor* pPipelineDescriptor)
{
    if (NULL != pPipelineDescriptor)
    {
        for (UINT i = 0; i < pPipelineDescriptor->pipelineInfo.numNodes; i++)
        {
            PerNodeInfo* pPerNodeInfo = &pPipelineDescriptor->pipelineInfo.nodeInfo[i];

            if (NULL != pPerNodeInfo->pNodeProperties)
            {
                for (UINT j = 0; j < pPerNodeInfo->nodePropertyCount; j++)
                {
                    CAMX_FREE(pPerNodeInfo->pNodeProperties[j].pValue);
                    pPerNodeInfo->pNodeProperties[j].pValue = NULL;
                }
                CAMX_FREE(pPerNodeInfo->pNodeProperties);
                pPerNodeInfo->pNodeProperties = NULL;
            }
        }

        for (UINT i = 0; i < pPipelineDescriptor->numOutputs; i++)
        {
            if (NULL != pPipelineDescriptor->outputData[i].pOutputStreamWrapper)
            {
                CAMX_DELETE pPipelineDescriptor->outputData[i].pOutputStreamWrapper;
                pPipelineDescriptor->outputData[i].pOutputStreamWrapper = NULL;
            }
        }

        for (UINT i = 0; i < pPipelineDescriptor->numInputs; i++)
        {
            PipelineInputData* pPipelineInputData = &pPipelineDescriptor->inputData[i];

            if ((NULL != pPipelineInputData->pInputStreamWrapper) && (TRUE == pPipelineInputData->isWrapperOwner))
            {
                CAMX_DELETE pPipelineDescriptor->inputData[i].pInputStreamWrapper;
                pPipelineInputData->pInputStreamWrapper = NULL;
                pPipelineInputData->isWrapperOwner      = FALSE;
            }
        }

        m_pipelineTracking.RemoveByValue(pPipelineDescriptor);

        CAMX_FREE(pPipelineDescriptor);
        pPipelineDescriptor = NULL;
    }
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::CreatePipelineFromDesc
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
Pipeline* ChiContext::CreatePipelineFromDesc(
    PipelineDescriptor* pPipelineDescriptor,
    UINT                pipelineIndex)
{
    CamxResult result    = CamxResultSuccess;
    Pipeline*  pPipeline = NULL;

    if (NULL != pPipelineDescriptor)
    {
        // 定义并初始化输入输出数据
        PipelineCreateInputData  pipelineCreateInputData  = { 0 };
        PipelineCreateOutputData pipelineCreateOutputData = { 0 };
        // 定义ChiPipelineInputOptions数组
        ChiPipelineInputOptions  pipelineInputOptions[MaxPipelineInputs];

        /// @todo (CAMX-1015) Duplicated code

        // 填充pipelineCreateInputData
        pipelineCreateInputData.pPipelineDescriptor    = pPipelineDescriptor;
        pipelineCreateInputData.pChiContext            = this;
        pipelineCreateInputData.isSecureMode           = pPipelineDescriptor->flags.isSecureMode;
        pipelineCreateInputData.pipelineIndex          = pipelineIndex;

        // MaxPipelineInputs is the number of elements in the array below. Pipeline::Create call will fill in only those
        // many entries based on the actual number of pipeline buffer inputs or 1 entry for sensor input

        // 填充pipelineCreateOutputData
        pipelineCreateOutputData.numInputs             = MaxPipelineInputs;
        pipelineCreateOutputData.pPipelineInputOptions = &pipelineInputOptions[0];

        // TODO
        result = Pipeline::Create(&pipelineCreateInputData, &pipelineCreateOutputData);

        if (CamxResultSuccess == result)
        {
            pPipeline = pipelineCreateOutputData.pPipeline;
        }
    }

    return pPipeline;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::CreateChiFence
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::CreateChiFence(
    CHIFENCECREATEPARAMS*   pInfo,
    CHIFENCEHANDLE*         phChiFence)
{
    CamxResult  result      = CamxResultSuccess;
    ChiFence*   pChiFence   = NULL;

    CAMX_ASSERT(NULL != pInfo);
    CAMX_ASSERT(NULL != phChiFence);

    pChiFence = static_cast<ChiFence*>(CAMX_CALLOC(sizeof(ChiFence)));
    if (NULL == pChiFence)
    {
        result = CamxResultENoMemory;
        CAMX_LOG_ERROR(CamxLogGroupChi, "Out of memory");
    }
    else
    {
        pChiFence->hChiFence    = static_cast<CHIFENCEHANDLE>(pChiFence);
        pChiFence->type         = pInfo->type;
        pChiFence->aRefCount    = 1;
        pChiFence->resultState  = ChiFenceSuccess;
    }

    if (CamxResultSuccess == result)
    {
        if (ChiFenceTypeInternal == pInfo->type)
        {
            result = CSLCreatePrivateFence("ChiInternalFence", &pChiFence->hFence);
            if (CamxResultSuccess != result)
            {
                CAMX_LOG_ERROR(CamxLogGroupChi, "Failed to create CSL fence: %d", result);
                CAMX_FREE(pChiFence);
                pChiFence = NULL;
            }
        }
        else if (ChiFenceTypeEGL == pInfo->type)
        {
            pChiFence->eglSync = pInfo->eglSync;
        }
        else if (ChiFenceTypeNative == pInfo->type)
        {
            pChiFence->nativeFenceFD = pInfo->nativeFenceFD;
        }
        else
        {
            result = CamxResultEInvalidArg;
            CAMX_LOG_ERROR(CamxLogGroupChi, "Invalid Chi fence type requested: %d", pInfo->type);
            CAMX_FREE(pChiFence);
            pChiFence = NULL;
        }
    }
    if (CamxResultSuccess == result)
    {
        *phChiFence = pChiFence->hChiFence;
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::ReleaseChiFence
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::ReleaseChiFence(
    CHIFENCEHANDLE hChiFence)
{
    CamxResult  result      = CamxResultSuccess;
    ChiFence*   pChiFence   = static_cast<ChiFence*>(hChiFence);

    if (NULL == pChiFence)
    {
        result = CamxResultEInvalidArg;
        CAMX_LOG_ERROR(CamxLogGroupChi, "hChiFence is invalid");
    }
    else
    {
        if (0 == CamxAtomicDec(&pChiFence->aRefCount))
        {
            if (ChiFenceTypeInternal == pChiFence->type)
            {
                if (CSLInvalidHandle == pChiFence->hFence)
                {
                    result = CamxResultEInvalidArg;
                    CAMX_LOG_ERROR(CamxLogGroupChi, "hChiFence is invalid");
                }
                else
                {
                    result = CSLReleaseFence(pChiFence->hFence);
                    if (CamxResultSuccess != result)
                    {
                        CAMX_LOG_ERROR(CamxLogGroupChi, "Failed to release CSL fence %d", pChiFence->hFence);
                    }
                }

                CAMX_FREE(pChiFence);
                pChiFence = NULL;
            }
        }
        else
        {
            CAMX_ASSERT(pChiFence->aRefCount > 0);
        }
    }
    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::SignalChiFence
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::SignalChiFence(
    CHIFENCEHANDLE hChiFence,
    CamxResult     resultStatus)
{
    CamxResult  result      = CamxResultSuccess;
    ChiFence*   pChiFence   = static_cast<ChiFence*>(hChiFence);

    if (NULL == pChiFence)
    {
        result = CamxResultEInvalidArg;
        CAMX_LOG_ERROR(CamxLogGroupChi, "hChiFence is invalid");
    }
    else if (ChiFenceTypeInternal != pChiFence->type)
    {
        result = CamxResultEUnsupported;
        CAMX_LOG_ERROR(CamxLogGroupChi, "Signal not supported for this type of fence: %p", hChiFence);
    }
    else if (CSLInvalidHandle == pChiFence->hFence)
    {
        result = CamxResultEInvalidState;
        CAMX_LOG_ERROR(CamxLogGroupChi, "Internal Chi fence %p has invalid state", hChiFence);
    }
    else
    {
        // Set the CHI fence error status and based on which node(s) waiting for the signal
        // is/are expected to retrigger processing or take some other appropriate action
        // NOTE: Fence signal is treated successful, even if CHI fence error status is not
        //       in order for the DRQ to continue to function properly

        result = SetChiFenceResult(hChiFence, resultStatus);
        if (CamxResultSuccess != result)
        {
            CAMX_LOG_ERROR(CamxLogGroupChi,
                "Failed to set chi fence (%d) status (%d) in order to notify waiting the node(s)",
                pChiFence->hFence,
                resultStatus);
        }

        result = CSLFenceSignal(pChiFence->hFence, CSLFenceResultSuccess);
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::SetChiFenceResult
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::SetChiFenceResult(
    CHIFENCEHANDLE hChiFence,
    CamxResult     lResult)
{
    CamxResult  result    = CamxResultSuccess;
    ChiFence*   pChiFence = static_cast<ChiFence*>(hChiFence);

    if (NULL == pChiFence)
    {
        result = CamxResultEInvalidArg;
        CAMX_LOG_ERROR(CamxLogGroupChi, "pChiFence is NULL");
    }

    if (CamxResultSuccess == result)
    {
        // Set default value of the result to invalid state
        pChiFence->resultState = ChiFenceInvalid;

        if (ChiFenceTypeInternal != pChiFence->type)
        {
            result = CamxResultEUnsupported;
            CAMX_LOG_ERROR(CamxLogGroupChi, "Signal not supported for this type of fence: %p", hChiFence);
        }
        else if (CSLInvalidHandle == pChiFence->hFence)
        {
            result = CamxResultEInvalidState;
            CAMX_LOG_ERROR(CamxLogGroupChi, "Internal Chi fence %p has invalid state", hChiFence);
        }

        if (CamxResultSuccess == result)
        {
            pChiFence->resultState = (CamxResultSuccess == lResult) ? ChiFenceSuccess : ChiFenceFailed;
        }
        else
        {
            pChiFence->resultState = ChiFenceInvalid;
        }
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::GetChiFenceResult
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::GetChiFenceResult(
    CHIFENCEHANDLE hChiFence,
    CDKResult*     pResult)
{
    CamxResult  result    = CamxResultSuccess;
    ChiFence*   pChiFence = static_cast<ChiFence*>(hChiFence);

    if (NULL == pChiFence)
    {
        result = CamxResultEInvalidArg;
        CAMX_LOG_ERROR(CamxLogGroupChi, "hChiFence is invalid");
    }
    else if (ChiFenceTypeInternal != pChiFence->type)
    {
        result = CamxResultEUnsupported;
        CAMX_LOG_ERROR(CamxLogGroupChi, "Signal not supported for this type of fence: %p", hChiFence);
    }
    else if (CSLInvalidHandle == pChiFence->hFence)
    {
        result = CamxResultEInvalidState;
        CAMX_LOG_ERROR(CamxLogGroupChi, "Internal Chi fence %p has invalid state", hChiFence);
    }

    if (CamxResultSuccess == result)
    {
        *pResult = ( pChiFence->resultState == ChiFenceSuccess) ? CDKResultSuccess : CDKResultEFailed;
    }
    else
    {
        *pResult = CDKResultEInvalidState;
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::WaitChiFence
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::WaitChiFence(
    CHIFENCEHANDLE          hChiFence,
    PFNCHIFENCECALLBACK     pCallback,
    VOID*                   pUserData)
{
    CamxResult  result      = CamxResultSuccess;
    ChiFence*   pChiFence   = static_cast<ChiFence*>(hChiFence);

    if (NULL == pChiFence)
    {
        result = CamxResultEInvalidArg;
        CAMX_LOG_ERROR(CamxLogGroupChi, "hChiFence is invalid");
    }
    else if (ChiFenceTypeInternal == pChiFence->type)
    {
        if (CSLInvalidHandle == pChiFence->hFence)
        {
            result = CamxResultEInvalidState;
            CAMX_LOG_ERROR(CamxLogGroupChi, "Internal Chi fence %p has invalid state", hChiFence);
        }
        else
        {
            if (NULL == pCallback)
            {
                result = CSLFenceWait(pChiFence->hFence, UINT64_MAX);
            }
            else
            {
                m_pDeferredRequestQueue->WaitForFenceDependency(&pChiFence, 1, pCallback, pUserData);
            }

            if (CamxResultSuccess != result)
            {
                CAMX_LOG_ERROR(CamxLogGroupChi, "Failed to wait on Chi fence %p", hChiFence);
            }
        }
    }
    else if ((ChiFenceTypeEGL == pChiFence->type) || (ChiFenceTypeNative == pChiFence->type))
    {
        result = m_pDeferredRequestQueue->WaitForFenceDependency(&pChiFence, 1, pCallback, pUserData);
    }
    else
    {
        result = CamxResultEInvalidArg;
        CAMX_LOG_ERROR(CamxLogGroupChi, "Invalid Chi fence (%p) type: %d", pChiFence, pChiFence->type);
    }

    return result;
}


////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::CreateSession
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CHISession* ChiContext::CreateSession(
    UINT             numPipelines,
    ChiPipelineInfo* pPipelineInfo,
    ChiCallBacks*    pCallbacks,
    VOID*            pPrivateCallbackData,
    CHISESSIONFLAGS  sessionCreateflags)
{
    CAMX_UNREFERENCED_PARAM(sessionCreateflags);

    // 定义返回的对象
    CHISession*          pChiSession = NULL;
    // 定义session创建所需的data
    CHISessionCreateData createData  = {};

    /// @todo (CAMX-1512) HFR support for CHI
    createData.sessionCreateData.pChiContext             = this;
    createData.sessionCreateData.pHwContext              = m_pHwContext;
    createData.sessionCreateData.pThreadManager          = GetThreadManager();

    // 若为输入传感器，则获取对应的批帧
    if (TRUE == pPipelineInfo->pipelineInputInfo.isInputSensor)
    {
        createData.sessionCreateData.usecaseNumBatchedFrames =
            pPipelineInfo->pipelineInputInfo.sensorInfo.pSensorModeInfo->batchedFrames;
    }
    else
    {
        createData.sessionCreateData.usecaseNumBatchedFrames = 1;
    }

    // 给createData成员变量赋值
    createData.sessionCreateData.pChiAppCallBacks        = pCallbacks;
    createData.sessionCreateData.numPipelines            = numPipelines;
    createData.sessionCreateData.pPipelineInfo           = pPipelineInfo;
    createData.sessionCreateData.pPrivateCbData          = pPrivateCallbackData;
    
    pChiSession = CHISession::Create(&createData);

    // 保存创建好的pChiSession
    if (NULL != pChiSession)
    {
        LDLLNode* pNode = static_cast<LDLLNode*>(CAMX_CALLOC(sizeof(LDLLNode)));
        if (NULL != pNode)
        {
            pNode->pData = pChiSession;
            m_sessionTracking.InsertToTail(pNode);
        }
        else
        {
            CAMX_LOG_ERROR(CamxLogGroupChi, "Out of memory");
            pChiSession->Destroy(TRUE);
            pChiSession = NULL;
        }
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupChi, "Unable to create session");
    }

    return pChiSession;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::SelectFormat
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
Format ChiContext::SelectFormat(
    ChiStream* pStream,
    BOOL       bIsOverrideImplDefinedWithRaw)
{
    /// @todo (CAMX-1512) Fix the format selection
    Format          format = Format::Blob;
    ChiStreamFormat pixelFormat;
    GrallocUsage    streamGrallocUsage;
    CHISTREAMTYPE   streamType;

    if (NULL != pStream)
    {
        pixelFormat = pStream->format;
        streamGrallocUsage = pStream->grallocUsage;
        streamType = pStream->streamType;

        if (ChiStreamFormatYCrCb420_SP == pixelFormat)
        {
            format = Format::YUV420NV21;
        }
        else if (ChiStreamFormatYCbCr420_888 == pixelFormat)
        {
            // Determine if the stream's pixel format should be NV21
            if ((ChiStreamTypeOutput == pStream->streamType) &&
                (GrallocUsageSwReadOften == (GrallocUsageSwReadOften & pStream->grallocUsage)))
            {
                format = Format::YUV420NV21;
            }
            else
            {
                format = Format::YUV420NV12;
            }
        }
        else if (ChiStreamFormatP010 == pixelFormat)
        {
            format = Format::P010;
        }
        else if (ChiStreamFormatImplDefined == pixelFormat)
        {
            if ((ChiStreamTypeBidirectional == streamType) || (ChiStreamTypeInput == streamType) ||
                ((streamGrallocUsage & GrallocUsageHwCameraZSL) == GrallocUsageHwCameraZSL))
            {
                if (TRUE == bIsOverrideImplDefinedWithRaw)
                {
                    format = Format::RawMIPI;
                }
                else
                {
                    const StaticSettings* pStaticSettings = m_pHwEnvironment->GetSettingsManager()->GetStaticSettings();

                    if (OutputFormatYUV420NV21 == pStaticSettings->outputFormat)
                    {
                        format = Format::YUV420NV21;
                    }
                    else if ((OutputFormatUBWCNV12 == pStaticSettings->outputFormat) &&
                             (TRUE == pStaticSettings->multiCameraVREnable))
                    {
                        // VR Camera stitch node currently supports UBWC format for preview
                        // but other Dual Camera nodes such as SAT and RTB doesnt suppport UBWC format.
                        format = Format::UBWCNV12;
                    }
                    else
                    {
                        format = Format::YUV420NV12;
                    }
                }
            }
            else
            {
                format = static_cast<Format>(GetImplDefinedFormat(pStream, bIsOverrideImplDefinedWithRaw));
            }
            CAMX_LOG_INFO(CamxLogGroupChi, "select ChiStreamFormatImplDefined format = %d", format);
        }
        /// @todo (CAMX-1797) Fix format selection
        else if (ChiStreamFormatRaw16 == pixelFormat)
        {
            format = Format::RawPlain16;
        }
        else if ((ChiStreamFormatRawOpaque == pixelFormat) ||
            (ChiStreamFormatRaw10 == pixelFormat))
        {
            format = Format::RawMIPI;
        }
        else if (ChiStreamFormatY8 == pixelFormat)
        {
            if (DataspaceDepth == pStream->dataspace)
            {
                format = Format::Y8;
            }
            else
            {
                format = Format::RawMIPI;
            }
        }
        else if (ChiStreamFormatBlob == pixelFormat)
        {
            if ((DataspaceJFIF == pStream->dataspace) ||
                (DataspaceV0JFIF == pStream->dataspace))
            {
                format = Format::Jpeg;
            }
            else
            {
                format = Format::Blob;
            }
        }
        else if (ChiStreamFormatPD10 == pixelFormat)
        {
            format = Format::PD10;
            CAMX_LOG_INFO(CamxLogGroupChi, "Select PD10 format = %d", format);
        }
        else if (ChiStreamFormatUBWCTP10 == pixelFormat)
        {
            format = Format::UBWCTP10;
            CAMX_LOG_INFO(CamxLogGroupChi, "Select UBWCTP10 format = %d", format);
        }
        else if (ChiStreamFormatY16 == pixelFormat)
        {
            format = Format::Y16;
        }
        else
        {
            CAMX_ASSERT_ALWAYS_MESSAGE("ERROR: CHI failed to pick the correct output format in CreatePipeline!");
        }
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupChi, "pStream is NULL");
    }

    return format;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::GetImplDefinedFormat
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
Format ChiContext::GetImplDefinedFormat(
    ChiStream* pStream,
    BOOL       bIsOverrideImplDefinedWithRaw)
{
    Format       chosenImplDefinedFormat = Format::YUV420NV12;
    GrallocUsage grallocUsage            = 0U;
    if (NULL != pStream)
    {
        grallocUsage            = pStream->grallocUsage;
    }

    if (NULL != pStream)
    {
        switch (m_pHwEnvironment->GetSettingsManager()->GetStaticSettings()->outputFormat)
        {
            case OutputFormatYUV420NV12:
                // Determine if the stream's format should be NV21
                if ((ChiStreamTypeInput == pStream->streamType)          ||
                    (ChiStreamTypeBidirectional == pStream->streamType)  ||
                    (GrallocUsageHwCameraZSL == (GrallocUsageHwCameraZSL & grallocUsage)))
                {
                    if (TRUE == bIsOverrideImplDefinedWithRaw)
                    {
                        chosenImplDefinedFormat = Format::RawMIPI;
                    }
                    else
                    {
                        chosenImplDefinedFormat = Format::YUV420NV21;
                    }
                }
                else
                {
                    chosenImplDefinedFormat = Format::YUV420NV12;
                }
                break;

            case OutputFormatYUV420NV21:
                chosenImplDefinedFormat = Format::YUV420NV21;
                break;

            case OutputFormatUBWCNV12:
                chosenImplDefinedFormat = Format::UBWCNV12;
                break;

            case OutputFormatUBWCTP10:
                chosenImplDefinedFormat = Format::UBWCTP10;
                break;

            case OutputFormatRAWPLAIN16:
                chosenImplDefinedFormat = Format::RawPlain16;
                break;

            case OutputFormatPD10:
                chosenImplDefinedFormat = Format::PD10;
                break;

            case OutputFormatP010:
                chosenImplDefinedFormat = Format::P010;
                break;
            default:
                chosenImplDefinedFormat = Format::YUV420NV12;
                break;
        }

        // If SW encoder gralloc flag is set, output must not be given in UBWC format
        if (((chosenImplDefinedFormat == Format::UBWCNV12)     ||
             (chosenImplDefinedFormat == Format::UBWCTP10)     ||
             (chosenImplDefinedFormat == Format::UBWCNV124R))  &&
            (GrallocUsageSwReadOften == (GrallocUsageSwReadOften & grallocUsage)))
        {
            chosenImplDefinedFormat = Format::YUV420NV12;
        }
    }

    // overwrite the format based on the gralloc flag
    if ((grallocUsage & GrallocUsagePrivate0) &&
        (grallocUsage & GrallocUsageTP10) &&
        (Format::UBWCTP10 != chosenImplDefinedFormat))
    {
        chosenImplDefinedFormat = Format::UBWCTP10;
        CAMX_LOG_VERBOSE(CamxLogGroupChi, "Overwrite chosen format to %d", chosenImplDefinedFormat);
    }

    if (grallocUsage & GrallocUsageP010)
    {
        chosenImplDefinedFormat = Format::P010;
    }
    CAMX_LOG_VERBOSE(CamxLogGroupChi, "Setting format type %d", chosenImplDefinedFormat);
    return chosenImplDefinedFormat;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::SetChiStreamInfo
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID ChiContext::SetChiStreamInfo(
    ChiStreamWrapper* pChiStreamWrapper,
    UINT              numBatchFrames)
{
    /// @todo (CAMX-1512) Fix the stream - NativeStream returns Camera3Stream
    ChiStream* pStream = reinterpret_cast<ChiStream*>(pChiStreamWrapper->GetNativeStream());

    if (NULL != pStream)
    {
        GrallocUsage grallocUsage = pStream->grallocUsage;

        switch (pStream->streamType)
        {
            case StreamTypeOutput:

                if (grallocUsage & GrallocUsageHwVideoEncoder)
                {
                    grallocUsage |= (GrallocUsageSwReadRarely | GrallocUsageSwWriteRarely | GrallocUsageHwCameraWrite);
                }
                else
                {
                    grallocUsage |= GrallocUsageHwCameraWrite;
                }

                if (ChiStreamFormatImplDefined == pStream->format)
                {
                    // setting gralloc usage flag as private0 for UBWC NV12
                    switch (pChiStreamWrapper->GetInternalFormat())
                    {
                        case Format::UBWCNV12:
                            grallocUsage |= GrallocUsagePrivate0;
                            break;

                        case Format::UBWCTP10:
                            grallocUsage |= GrallocUsagePrivate0;
                            grallocUsage |= GrallocUsageTP10;
                            break;

                        case Format::P010:
                            grallocUsage |= GrallocUsageP010;
                            break;

                        case Format::YUV420NV21:
                            if (0 != (GrallocUsageHwVideoEncoder & grallocUsage))
                            {
                                grallocUsage |= GrallocUsageProducerVideoNV21Encoder;
                            }
                            else
                            {
                                grallocUsage |= GrallocUsageProducerCamera;
                            }
                            break;

                        default:
                            grallocUsage &= ~(GrallocUsagePrivate0 | GrallocUsagePrivate3);
                            break;
                    }
                }

                break;

            case StreamTypeInput:
            case StreamTypeBidirectional:
                grallocUsage |= (GrallocUsageHwCameraRead | GrallocUsageHwCameraWrite);
                break;

            default:
                break;
        }

        pChiStreamWrapper->SetNativeGrallocUsage(grallocUsage);
    }

    UINT32 maxNumBuffers = DefaultRequestQueueDepth;

    if (numBatchFrames> 1)
    {
        maxNumBuffers = RequestQueueDepth * numBatchFrames;

        if (maxNumBuffers > MaxNumberOfBuffersAllowed)
        {
            maxNumBuffers = MaxNumberOfBuffersAllowed;
        }
    }
    if ((NULL != pStream) && (0 != (pStream->grallocUsage & GrallocUsageProtected)))
    {
        maxNumBuffers = GetStaticSettings()->maxBuffersSecureCamera;
    }
    pChiStreamWrapper->SetNativeMaxNumBuffers(maxNumBuffers);
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// ChiContext::CloneNodeProperties
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID ChiContext::CloneNodeProperties(
    ChiNode*      pChiNode,
    PerNodeInfo*  pPerNodeInfo)
{
    SIZE_T length = 0;

    pPerNodeInfo->nodePropertyCount = pChiNode->numProperties;
    pPerNodeInfo->pNodeProperties =
        static_cast<PerNodeProperty*>(CAMX_CALLOC(sizeof(PerNodeProperty) * pChiNode->numProperties));
    if (NULL == pPerNodeInfo->pNodeProperties)
    {
        CAMX_LOG_ERROR(CamxLogGroupHAL, "No memory allocated for pNodeProperties");
        return;
    }
    for (UINT i = 0; i < pChiNode->numProperties; i++)
    {
        pPerNodeInfo->pNodeProperties[i].id = pChiNode->pNodeProperties[i].id;

        switch (pChiNode->pNodeProperties[i].id)
        {
            case NodePropertyCustomLib:
            case NodePropertyProfileId:
            case NodePropertyStabilizationType:
            case NodePropertyProcessingType:
            case NodePropertyIPEDownscale:
            case NodePropertyIPEDownscaleWidth:
            case NodePropertyIPEDownscaleHeight:
            case NodePropertyIFECSIDHeight:
            case NodePropertyIFECSIDWidth:
            case NodePropertyIFECSIDTop:
            case NodePropertyIFECSIDLeft:
            case NodePropertyNodeClass:
            case NodePropertyGPUCapsMaskType:
            case NodePropertyEnbaleIPECHICropDependency:
                length = OsUtils::StrLen(static_cast<const CHAR*>(pChiNode->pNodeProperties[i].pValue)) + 1;
                pPerNodeInfo->pNodeProperties[i].pValue = CAMX_CALLOC(length);
                Utils::Memcpy(pPerNodeInfo->pNodeProperties[i].pValue, pChiNode->pNodeProperties[i].pValue, length);
                break;
            case NodePropertyStatsSkipPattern:
                length = sizeof(UINT);
                pPerNodeInfo->pNodeProperties[i].pValue = CAMX_CALLOC(length);
                Utils::Memcpy(pPerNodeInfo->pNodeProperties[i].pValue, pChiNode->pNodeProperties[i].pValue, length);
                break;
            case NodePropertyEnableFOVC:
                length = sizeof(UINT);
                pPerNodeInfo->pNodeProperties[i].pValue = CAMX_CALLOC(length);
                Utils::Memcpy(pPerNodeInfo->pNodeProperties[i].pValue, pChiNode->pNodeProperties[i].pValue, length);
                break;
            case NodePropertyStitchMaxJpegSize:
                length = OsUtils::StrLen(static_cast<const CHAR*>(pChiNode->pNodeProperties[i].pValue)) + 1;
                pPerNodeInfo->pNodeProperties[i].pValue = CAMX_CALLOC(length);
                Utils::Memcpy(pPerNodeInfo->pNodeProperties[i].pValue, pChiNode->pNodeProperties[i].pValue, length);
                break;

            default:
                break;
        }
    }
    return;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// ChiContext::ProcessPipelineCreateDesc
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID ChiContext::ProcessPipelineCreateDesc(
    const ChiPipelineCreateDescriptor* pPipelineCreateDescriptor,
    UINT                               numOutputs,
    ChiPortBufferDescriptor*           pOutputBufferDescriptor,
    PipelineDescriptor*                pPipelineDescriptor)
{
    CAMX_UNREFERENCED_PARAM(numOutputs);
    CAMX_UNREFERENCED_PARAM(pOutputBufferDescriptor);

    PerPipelineInfo* pPipelineInfo = &pPipelineDescriptor->pipelineInfo;

    Utils::Memset(pPipelineInfo, 0, sizeof(PerPipelineInfo));

    pPipelineInfo->numNodes = pPipelineCreateDescriptor->numNodes;

    const ChiNodeLink* pChiNodeLinkages = pPipelineCreateDescriptor->pLinks;

    for (UINT node = 0; node < pPipelineInfo->numNodes; node++)
    {
        PerNodeInfo*  pPerNodeInfo   = &pPipelineInfo->nodeInfo[node];
        ChiNode*      pChiNode       = &pPipelineCreateDescriptor->pNodes[node];
        ChiNodePorts* pChiPorts      = &pChiNode->nodeAllPorts;
        UINT32        numInputPorts  = pChiPorts->numInputPorts;
        UINT32        numOutputPorts = pChiPorts->numOutputPorts;

        pPerNodeInfo->nodeId               = pChiNode->nodeId;
        pPerNodeInfo->instanceId           = pChiNode->nodeInstanceId;
        pPerNodeInfo->inputPorts.numPorts  = numInputPorts;
        pPerNodeInfo->outputPorts.numPorts = numOutputPorts;

        CloneNodeProperties(pChiNode, pPerNodeInfo);

        // IFE is always the first HW Node (thereby checking on HwNodeIDStart). Any topology that has sensor node
        // would be a real time and with same analogy, any topology that has IFE is also real time as sensor is
        // hooked directly to IFE. Since there would be some usecases where there is only IFE that is hooked with
        // external sensor then it makes sense to check for IFE
        if ((Sensor == pPerNodeInfo->nodeId) || (HwNodeIDStart == pPerNodeInfo->nodeId))
        {
            pPipelineDescriptor->flags.isSensorInput = TRUE;
            pPipelineDescriptor->flags.isRealTime    = TRUE;
        }

        /// @todo (CAMX-3119) remove Torch check below and handle this in generic way.
        if (Torch == pPerNodeInfo->nodeId)
        {
            pPipelineDescriptor->flags.isTorchWidget = TRUE;
        }

        // Set Node class bypassable or default
        for (UINT8 index = 0; index < pPerNodeInfo->nodePropertyCount; index++)
        {
            if ((NodePropertyNodeClass == pPerNodeInfo->pNodeProperties[index].id) &&
                (0 == OsUtils::StrCmp((static_cast<const CHAR*>(pPerNodeInfo->pNodeProperties[index].pValue)), "1")))
            {
                pPerNodeInfo->nodeClass = NodeClass::Bypass;
            }
            else if ((NodePropertyNodeClass == pPerNodeInfo->pNodeProperties[index].id) &&
                (0 == OsUtils::StrCmp((static_cast<const CHAR*>(pPerNodeInfo->pNodeProperties[index]
                    .pValue)), "2")))
            {
                pPerNodeInfo->nodeClass = NodeClass::Inplace;
            }
            else
            {
                pPerNodeInfo->nodeClass = NodeClass::Default;
            }
        }

        // Go through the output ports
        for (UINT outputPort = 0; outputPort < numOutputPorts; outputPort++)
        {
            ChiOutputPortDescriptor* pChiOutputPortDescriptor = &pChiPorts->pOutputPorts[outputPort];
            OutputPortInfo*          pOutputPortInfo          = &pPerNodeInfo->outputPorts.portInfo[outputPort];

            pOutputPortInfo->portId               = pChiOutputPortDescriptor->portId;
            pOutputPortInfo->portSourceTypeId     = pChiOutputPortDescriptor->portSourceTypeId;
            pOutputPortInfo->numSourceIdsMapped   = pChiOutputPortDescriptor->numSourceIdsMapped;
            pOutputPortInfo->pMappedSourcePortIds = pChiOutputPortDescriptor->pMappedSourceIds;

            if ((FALSE != pChiOutputPortDescriptor->isOutputStreamBuffer) ||
                (NodeClass::Inplace == pPerNodeInfo->nodeClass))
            {
                pOutputPortInfo->flags.isSinkBuffer = TRUE;
            }
            else if (FALSE != pChiOutputPortDescriptor->isSinkPort)
            {
                pOutputPortInfo->flags.isSinkNoBuffer = TRUE;
            }

            for (UINT link = 0; link < pPipelineCreateDescriptor->numLinks; link++)
            {
                ///@ todo (CAMX-1797) Disable setting
                if (TRUE == GetStaticSettings()->forceDisableUBWCOnIfeIpeLink)
                {
                    for (UINT dst = 0; dst < pChiNodeLinkages[link].numDestNodes; dst++)
                    {
                        if ((pChiNodeLinkages[link].srcNode.nodeId         == 65536) &&
                            (pChiNodeLinkages[link].pDestNodes[dst].nodeId == 65538))
                        {
                            if (Format::UBWCTP10 == static_cast<Format>(pChiNodeLinkages[link].bufferProperties.bufferFormat))
                            {
                                UINT32* pOverrideFormat =
                                    // NOWHINE CP036a: Since the function is const, had to add the const_cast
                                    const_cast<UINT32*>(&(pChiNodeLinkages[link].bufferProperties.bufferFormat));

                                *pOverrideFormat = static_cast<UINT32>(Format::YUV420NV12);
                                break;
                            }
                        }
                    }
                }

                if ((pOutputPortInfo->portId  == pChiNodeLinkages[link].srcNode.nodePortId) &&
                    (pPerNodeInfo->nodeId     == pChiNodeLinkages[link].srcNode.nodeId)     &&
                    (pPerNodeInfo->instanceId == pChiNodeLinkages[link].srcNode.nodeInstanceId))
                {
                    const ChiLinkBufferProperties* pChiLinkBufferProperties = &pChiNodeLinkages[link].bufferProperties;
                    const ChiLinkProperties*       pChiLinkProperties       = &pChiNodeLinkages[link].linkProperties;

                    pOutputPortInfo->portLink.numInputPortsConnected = pChiNodeLinkages[link].numDestNodes;

                    if (FALSE != pChiLinkProperties->isBatchedMode)
                    {
                        pOutputPortInfo->portLink.linkProperties.isBatchMode = TRUE;
                    }

                    pOutputPortInfo->portLink.linkBufferProperties.format               =
                        static_cast<Format>(pChiLinkBufferProperties->bufferFormat);
                    pOutputPortInfo->portLink.linkBufferProperties.immediateAllocCount  =
                        pChiLinkBufferProperties->bufferImmediateAllocCount;

                    pOutputPortInfo->portLink.linkBufferProperties.sizeBytes  = pChiLinkBufferProperties->bufferSize;
                    pOutputPortInfo->portLink.linkBufferProperties.queueDepth = pChiLinkBufferProperties->bufferQueueDepth;
                    pOutputPortInfo->portLink.linkBufferProperties.heap       = pChiLinkBufferProperties->bufferHeap;

                    UINT memFlags    = pChiLinkBufferProperties->bufferFlags;
                    UINT numMemFlags = 0;

                    /// @todo (CAMX-1015) Optimize this
                    if (0 != (memFlags & BufferMemFlagHw))
                    {
                        pOutputPortInfo->portLink.linkBufferProperties.memFlags[numMemFlags] = BufferMemFlag::Hw;
                        numMemFlags++;
                        memFlags = (memFlags & (~BufferMemFlagHw));
                    }

                    if (0 != (memFlags & BufferMemFlagProtected))
                    {
                        pOutputPortInfo->portLink.linkBufferProperties.memFlags[numMemFlags] = BufferMemFlag::Protected;
                        numMemFlags++;
                        memFlags = (memFlags & (~BufferMemFlagProtected));
                    }

                    if (0 != (memFlags & BufferMemFlagCache))
                    {
                        pOutputPortInfo->portLink.linkBufferProperties.memFlags[numMemFlags] = BufferMemFlag::Cache;
                        numMemFlags++;
                        memFlags = (memFlags & (~BufferMemFlagCache));
                    }

                    if (0 != (memFlags & BufferMemFlagLockable))
                    {
                        pOutputPortInfo->portLink.linkBufferProperties.memFlags[numMemFlags] = BufferMemFlag::UMDAccess;
                        numMemFlags++;
                        memFlags = (memFlags & (~BufferMemFlagLockable));
                    }

                    CAMX_ASSERT(0 == memFlags);

                    pOutputPortInfo->portLink.linkBufferProperties.numMemFlags = numMemFlags;

                    break;
                }
            }
        }
    }

    /// @todo (CAMX-1797) Remove hardcoding to pipeline index 0 - pPerUsecase->pipelineInfo[0]
    for (UINT nodeIndex = 0; nodeIndex < pPipelineInfo->numNodes; nodeIndex++)
    {
        PerNodeInfo*  pPerNodeInfo  = &pPipelineInfo->nodeInfo[nodeIndex];
        ChiNode*      pChiNode      = &pPipelineCreateDescriptor->pNodes[nodeIndex];
        ChiNodePorts* pChiPorts     = &pChiNode->nodeAllPorts;
        UINT32        numInputPorts = pChiPorts->numInputPorts;

        // Go through the input ports
        for (UINT inputPort = 0; inputPort < numInputPorts; inputPort++)
        {
            ChiInputPortDescriptor* pChiInputPortDescriptor = &pChiPorts->pInputPorts[inputPort];
            UINT                    inputPortId             = pChiInputPortDescriptor->portId;
            InputPortInfo*          pInputPortInfo          = &pPerNodeInfo->inputPorts.portInfo[inputPort];

            pInputPortInfo->portId           = inputPortId;
            pInputPortInfo->portSourceTypeId = pChiInputPortDescriptor->portSourceTypeId;

            if (FALSE != pChiInputPortDescriptor->isInputStreamBuffer)
            {
                pInputPortInfo->flags.isSourceBuffer = TRUE;
            }
            else
            {
                BOOL matchFound = FALSE;

                for (UINT link = 0; ((link < pPipelineCreateDescriptor->numLinks) && (FALSE == matchFound)); link++)
                {
                    for (UINT dest = 0; ((dest < pChiNodeLinkages[link].numDestNodes) && (FALSE == matchFound)); dest++)
                    {
                        if ((inputPortId              == pChiNodeLinkages[link].pDestNodes[dest].nodePortId) &&
                            (pPerNodeInfo->nodeId     == pChiNodeLinkages[link].pDestNodes[dest].nodeId)     &&
                            (pPerNodeInfo->instanceId == pChiNodeLinkages[link].pDestNodes[dest].nodeInstanceId))
                        {
                            for (UINT index = 0; index < pPipelineInfo->numNodes; index++)
                            {
                                UINT32 nodeId         = pPipelineInfo->nodeInfo[index].nodeId;
                                UINT32 nodeInstanceId = pPipelineInfo->nodeInfo[index].instanceId;

                                if ((nodeId         == pChiNodeLinkages[link].srcNode.nodeId) &&
                                    (nodeInstanceId == pChiNodeLinkages[link].srcNode.nodeInstanceId))
                                {
                                    pInputPortInfo->parentNodeIndex    = index;
                                    pInputPortInfo->parentOutputPortId = pChiNodeLinkages[link].srcNode.nodePortId;

                                    matchFound = TRUE;
                                    break;
                                }
                            }
                        }
                    }
                }
            }
        }
    }
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::SubmitRequest
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::SubmitRequest(
    CHISession*         pSession,
    ChiPipelineRequest* pRequest)
{
    CamxResult result = CamxResultSuccess;

    // 更新pSession的成员变量
    pSession->UpdateMultiRequestSyncData(pRequest);

    // Validate requests
    // 遍历请求捕捉到的请求
    for (UINT i = 0; i < pRequest->numRequests; i++)
    {
        if (0 != pRequest->pCaptureRequests[i].hPipelineHandle)
        {
            // 检测pCaptureRequest->pMetadata不为空
            result = pSession->CheckValidInputRequest(&pRequest->pCaptureRequests[i]);

            if (CamxResultSuccess == result)
            {
                // Fall back to non optimized stream on before PCR logic if 0 or for non real time pipelines
                if ((FALSE == pSession->IsPipelineRealTime(pRequest->pCaptureRequests[i].hPipelineHandle)) ||
                    (0 == GetStaticSettings()->numPCRsBeforeStreamOn))
                {
                    pSession->StreamOn(pRequest->pCaptureRequests[i].hPipelineHandle);
                }
            }
            else
            {
                CAMX_ASSERT_ALWAYS_MESSAGE("Request batch index %u is not valid.", i);
                break;
            }
        }
    }

    // Submit requests to session.
    // For multi-camera, batch of requests for different pipeline are sent to session together.
    if (CamxResultSuccess == result)
    {
        result = pSession->ProcessCaptureRequest(pRequest);
        if (CamxResultSuccess != result)
        {
            CAMX_LOG_ERROR(CamxLogGroupHAL, "Request %llu failed when submitting requests.",
                pRequest->pCaptureRequests[0].frameNumber);
        }
        CAMX_ASSERT(CamxResultSuccess == result);
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::ActivatePipeline
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::ActivatePipeline(
    CHISession*         pChiSession,
    CHIPIPELINEHANDLE   hPipelineDescriptor)
{
    CAMX_ASSERT(NULL != pChiSession);
    CAMX_ASSERT(NULL != hPipelineDescriptor);

    CamxResult result = CamxResultSuccess;
    result = pChiSession->StreamOn(hPipelineDescriptor);

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::DeactivatePipeline
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult ChiContext::DeactivatePipeline(
    CHISession*                 pChiSession,
    CHIPIPELINEHANDLE           hPipelineDescriptor,
    CHIDEACTIVATEPIPELINEMODE   modeBitmask)
{
    CAMX_ASSERT(NULL != pChiSession);
    CAMX_ASSERT(NULL != hPipelineDescriptor);

    CamxResult result = CamxResultSuccess;

    result = pChiSession->StreamOff(hPipelineDescriptor, modeBitmask);

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::DestroySession
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID ChiContext::DestroySession(
    CHISession* pChiSession,
    BOOL isForced)
{
    if (NULL != pChiSession)
    {
        m_sessionTracking.RemoveByValue(pChiSession);

        pChiSession->Destroy(isForced);
        pChiSession = NULL;
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupChi, "Invalid input arguments");
    }
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::FlushSession
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID ChiContext::FlushSession(
    CHISession* pChiSession,
    BOOL isForced)
{
    if (FALSE == GetStaticSettings()->disableChiFlush)
    {
        CAMX_LOG_INFO(CamxLogGroupChi, "Processing Flush Request.");
        pChiSession->Flush(isForced);
    }
    else
    {
        CAMX_LOG_WARN(CamxLogGroupChi, "Flush Request is dropped and we are enforcing a timed wait!");
        pChiSession->FlushRequests(isForced);
    }
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::GetThreadManager
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
ThreadManager* ChiContext::GetThreadManager()
{
    return m_pThreadManager;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::GetHwContext
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
HwContext* ChiContext::GetHwContext() const
{
    return m_pHwContext;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::GetStaticSettings
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
const StaticSettings* ChiContext::GetStaticSettings() const
{
    return m_pHwEnvironment->GetSettingsManager()->GetStaticSettings();
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// ChiContext::DumpState
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID ChiContext::DumpState(
    INT fd)
{
    static const UINT32 Indent = 2;

    // Dump high level info
    CAMX_LOG_TO_FILE(fd, Indent, "+------------------------------------------------------------------+");
    CAMX_LOG_TO_FILE(fd, Indent, "+         Chi statistics                                           +");
    CAMX_LOG_TO_FILE(fd, Indent, "+------------------------------------------------------------------+");
    CAMX_LOG_TO_FILE(fd, Indent, "+ Number of open sessions: %d", m_sessionTracking.NumNodes());
    CAMX_LOG_TO_FILE(fd, Indent, "+ Number of open pipeline descriptors: %d", m_pipelineTracking.NumNodes());
    CAMX_LOG_TO_FILE(fd, Indent, "+------------------------------------------------------------------+");
    CAMX_LOG_TO_FILE(fd, Indent, "+------------------------------------------------------------------+");

    // Dump open sessions & attached pipelines (and open requests per pipeline - to get buffer/fence info)
    LDLLNode* pNode = m_sessionTracking.Head();

    while (NULL != pNode)
    {
        CHISession* pSession = static_cast<CHISession*>(pNode->pData);

        CAMX_LOG_TO_FILE(fd, Indent, "+------------------------------------------------------------------+");
        CAMX_LOG_TO_FILE(fd, Indent, "+           Chi Session                                            +");
        CAMX_LOG_TO_FILE(fd, Indent, "+------------------------------------------------------------------+");
        CAMX_LOG_TO_FILE(fd, Indent, "+ Session: %p", pSession);
        pSession->DumpState(fd, Indent + 2);

        pNode = LightweightDoublyLinkedList::NextNode(pNode);
    }

    // Dump the context's DRQ
    CAMX_LOG_TO_FILE(fd, Indent, "+------------------------------------------------------------------+");
    CAMX_LOG_TO_FILE(fd, Indent, "+          ChiContext DRQ                                          +");
    CAMX_LOG_TO_FILE(fd, Indent, "+------------------------------------------------------------------+");
    m_pDeferredRequestQueue->DumpState(fd, Indent + 2);

    // Dump the threadpool state
    // Dump current MemSpy state
    // Dump command buffers...
}



CAMX_NAMESPACE_END
