////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Copyright (c) 2017-2018 Qualcomm Technologies, Inc.
// All Rights Reserved.
// Confidential and Proprietary - Qualcomm Technologies, Inc.
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// @file  camxipenode.cpp
/// @brief IPE Node class implementation
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

#include "camxtrace.h"
#include "camxpipeline.h"
#include "camxcdmdefs.h"
#include "camxcslicpdefs.h"
#include "camxcslresourcedefs.h"
#include "camxhal3metadatautil.h"
#include "camxhal3module.h"
#include "camxhwcontext.h"
#include "camximagebuffer.h"
#include "camximageformatutils.h"
#include "camxtuningdatamanager.h"
#include "parametertuningtypes.h"
#include "camxtitan17xcontext.h"
#include "camxtitan17xdefs.h"
#include "camxipe2dlut10.h"
#include "camxipeanr10.h"
#include "camxipeasf30.h"
#include "camxipecac22.h"
#include "camxipechromaenhancement12.h"
#include "camxipechromasuppression20.h"
#include "camxipecolorcorrection13.h"
#include "camxipecolortransform12.h"
#include "camxipegamma15.h"
#include "camxipegrainadder10.h"
#include "camxipeica10.h"
#include "camxipeltm13.h"
#include "camxipesce11.h"
#include "camxipetf10.h"
#include "camxipeupscaler20.h"
#include "camxipenode.h"
#include "camxiqinterface.h"
#include "ipdefs.h"
#include "titan170_base.h"
#include "parametertuningtypes.h"
#include "camxtranslator.h"
// NOWHINE ENTIRE FILE PR002 <- Win32 definition
// This function needs to be outside the CAMX_NAMESPACE because firmware uses "ImageFormat" that is used in UMD as well
ImageFormat TranslateFormatToFirmwareImageFormat(
    CamX::Format format)
{
    ImageFormat firmwareFormat = IMAGE_FORMAT_INVALID;

    switch (format)
    {
        case CamX::Format::YUV420NV12:
        case CamX::Format::YUV420NV21:
        case CamX::Format::FDYUV420NV12:
            firmwareFormat = IMAGE_FORMAT_LINEAR_NV12;
            break;
        case CamX::Format::UBWCTP10:
            firmwareFormat = IMAGE_FORMAT_UBWC_TP_10;
            break;
        case CamX::Format::UBWCNV12:
            firmwareFormat = IMAGE_FORMAT_UBWC_NV_12;
            break;
        case CamX::Format::UBWCNV124R:
            firmwareFormat = IMAGE_FORMAT_UBWC_NV12_4R;
            break;
        case CamX::Format::YUV420NV12TP10:
        case CamX::Format::YUV420NV21TP10:
            firmwareFormat = IMAGE_FORMAT_LINEAR_TP_10;
            break;
        case CamX::Format::PD10:
            firmwareFormat = IMAGE_FORMAT_PD_10;
            break;
        case CamX::Format::P010:
            firmwareFormat = IMAGE_FORMAT_LINEAR_P010;
            break;
        default:
            firmwareFormat = IMAGE_FORMAT_INVALID;
            break;
    }

    return firmwareFormat;
}

CAMX_NAMESPACE_BEGIN

static const UINT IPEMaxInput                    = 8;    ///< Number of Input Ports : 4 Input for image buffers and 4 for ref
static const UINT IPEMaxOutput                   = 6;    ///< Number of Output Ports: Display,  Video plus 4 ref output ports
static const UINT IPEMaxTopCmdBufferPatchAddress = 546;  ///< Number of Max address patching for top level payload
static const UINT IPEMaxPreLTMPatchAddress       = 16;   ///< Number of Max address patching for preLTM IQ modules
static const UINT IPEMaxPostLTMPatchAddress      = 16;   ///< Number of Max address patching for postLTM IQ modules
static const UINT IPEMaxDMIPatchAddress          = 52;   ///< Number of Max address patching for DMI headers
static const UINT IPEMaxNPSPatchAddress          = 8;    ///< Number of Max address patching for NPS (ANR/TF)
static const UINT IPEMaxPatchAddress             = IPEMaxTopCmdBufferPatchAddress +
                                                   IPEMaxPreLTMPatchAddress       +
                                                   IPEMaxPostLTMPatchAddress      +
                                                   IPEMaxNPSPatchAddress;         ///< Max address patches for packet;
static const UINT IPEDefaultDownScalarMode       = 1;
static const UINT IPEMidDownScalarMode           = 2;
static const UINT IPECustomDownScalarMode        = 3;
static const UINT IPEMidDownScalarWidth          = 4928;
static const UINT IPEMidDownScalarHeight         = 3808;
static const UINT IPEMFNRCmdBlobCount            = 2;    ///< Number of command buffers for MFNR enabled IPE instance

static const FLOAT IPEDownscaleThresholdMin      = 1.0f; ///< Min scale ratio above which downscaling causes IQ issues
static const FLOAT IPEDownscaleThresholdMax      = 1.2f; ///< Max scale ratio below which downscaling causes IQ issues

UINT64              IPENode::s_debugDataRequestId;
UINT32              IPENode::s_debugDataWriterCounter;
DebugDataWriter*    IPENode::s_pDebugDataWriter;



/// @brief Information about android to native effect.
struct IPEAndroidToCamxEffect
{
    ModeEffectSubModeType   from;           ///< Effect mode value
    ControlEffectModeValues to;             ///< Control effect Mode Value
};

/// @brief Information about android to native effect.
struct IPEAndroidToCamxScene
{
    ModeSceneSubModeType   from;            ///< Scene mode value
    ControlSceneModeValues to;              ///< Control scene mode value
};

// Map effects from CamX to Android
static IPEAndroidToCamxEffect IPEEffectMap[] =
{
    { ModeEffectSubModeType::None,       ControlEffectModeOff },
    { ModeEffectSubModeType::Mono,       ControlEffectModeMono },
    { ModeEffectSubModeType::Sepia,      ControlEffectModeSepia },
    { ModeEffectSubModeType::Negative,   ControlEffectModeNegative },
    { ModeEffectSubModeType::Solarize,   ControlEffectModeSolarize },
    { ModeEffectSubModeType::Posterize,  ControlEffectModePosterize },
    { ModeEffectSubModeType::Aqua,       ControlEffectModeAqua },
    { ModeEffectSubModeType::Emboss,     ControlEffectModeOff },
    { ModeEffectSubModeType::Sketch,     ControlEffectModeOff },
    { ModeEffectSubModeType::Neon,       ControlEffectModeOff },
    { ModeEffectSubModeType::Blackboard, ControlEffectModeBlackboard },
    { ModeEffectSubModeType::Whiteboard, ControlEffectModeWhiteboard },
};

// Map scene modes from CamX to Android
static IPEAndroidToCamxScene IPESceneMap[] =
{
    { ModeSceneSubModeType::None,          ControlSceneModeDisabled },
    { ModeSceneSubModeType::Landscape,     ControlSceneModeLandscape },
    { ModeSceneSubModeType::Snow,          ControlSceneModeSnow },
    { ModeSceneSubModeType::Beach,         ControlSceneModeBeach },
    { ModeSceneSubModeType::Sunset,        ControlSceneModeSunset },
    { ModeSceneSubModeType::Night,         ControlSceneModeNight },
    { ModeSceneSubModeType::Portrait,      ControlSceneModePortrait },
    { ModeSceneSubModeType::BackLight,     ControlSceneModeDisabled },
    { ModeSceneSubModeType::Sports,        ControlSceneModeSports },
    { ModeSceneSubModeType::AntiShake,     ControlSceneModeDisabled },
    { ModeSceneSubModeType::Flowers,       ControlSceneModeDisabled },
    { ModeSceneSubModeType::CandleLight,   ControlSceneModeCandlelight },
    { ModeSceneSubModeType::Fireworks,     ControlSceneModeFireworks },
    { ModeSceneSubModeType::Party,         ControlSceneModeParty },
    { ModeSceneSubModeType::NightPortrait, ControlSceneModeNightPortrait },
    { ModeSceneSubModeType::Theater,       ControlSceneModeTheatre },
    { ModeSceneSubModeType::Action,        ControlSceneModeAction },
    { ModeSceneSubModeType::AR,            ControlSceneModeDisabled },
    { ModeSceneSubModeType::FacePriority,  ControlSceneModeFacePriority },
    { ModeSceneSubModeType::Barcode,       ControlSceneModeBarcode },
    { ModeSceneSubModeType::BestShot,      ControlSceneModeDisabled },
};

// This list will follow order of modules in real hardware
static IPEIQModuleInfo IQModulesList[] =
{
    { ISPIQModuleType::IPEICA,               TRUE,   IPEPath::INPUT,       IPEICA10::Create               },
    { ISPIQModuleType::IPE2DLUT,             TRUE,   IPEPath::INPUT,       IPE2DLUT10::Create             },
    { ISPIQModuleType::IPEANR,               TRUE,   IPEPath::INPUT,       IPEANR10::Create               },
    { ISPIQModuleType::IPETF,                TRUE,   IPEPath::INPUT,       IPETF10::Create                },
    { ISPIQModuleType::IPEICA,               TRUE,   IPEPath::REFERENCE,   IPEICA10::Create               },
    { ISPIQModuleType::IPECAC,               TRUE,   IPEPath::INPUT,       IPECAC22::Create               },
    { ISPIQModuleType::IPECST,               TRUE,   IPEPath::INPUT,       IPEColorTransform12::Create    },
    { ISPIQModuleType::IPELTM,               TRUE,   IPEPath::INPUT,       IPELTM13::Create               },
    { ISPIQModuleType::IPEColorCorrection,   TRUE,   IPEPath::INPUT,       IPEColorCorrection13::Create   },
    { ISPIQModuleType::IPEGamma,             TRUE,   IPEPath::INPUT,       IPEGamma15::Create             },
    { ISPIQModuleType::IPEChromaEnhancement, TRUE,   IPEPath::INPUT,       IPEChromaEnhancement12::Create },
    { ISPIQModuleType::IPEChromaSuppression, TRUE,   IPEPath::INPUT,       IPEChromaSuppression20::Create },
    { ISPIQModuleType::IPESCE,               FALSE,  IPEPath::INPUT,       IPESCE11::Create               },
    { ISPIQModuleType::IPEASF,               TRUE,   IPEPath::INPUT,       IPEASF30::Create               },
    { ISPIQModuleType::IPEUpscaler,          TRUE,   IPEPath::INPUT,       IPEUpscaler20::Create          },
    { ISPIQModuleType::IPEGrainAdder,        TRUE,   IPEPath::INPUT,       IPEGrainAdder10::Create        },
};

static const UINT CmdBufferFrameProcessSizeBytes = sizeof(IpeFrameProcess) +                    ///< firmware requires different
    (static_cast<UINT>(CDMProgramArrayOrder::ProgramArrayMax) * sizeof(CDMProgramArray)) +      ///< CDM programs in payload
    (static_cast<UINT>(PreLTMCDMProgramOrder::ProgramIndexMaxPreLTM) +                          ///< are appended to Frame
     static_cast<UINT>(PostLTMCDMProgramOrder::ProgramIndexMaxPostLTM) * sizeof(CdmProgram));   ///< process data

// Private Static member holding fixed values of Frame buffer offsets within IpeFrameProcess struct, for ease of patching
FrameBuffers IPENode::s_frameBufferOffset[IPEMaxSupportedBatchSize][IPE_IO_IMAGES_MAX];

static const UINT CmdBufferGenericBlobSizeInBytes = CSLGenericBlobHeaderSizeInDwords * sizeof(UINT32)
    + sizeof(CSLICPClockBandwidthRequest);
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::IPENode
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
IPENode::IPENode()
{
    Titan17xContext* pContext = static_cast<Titan17xContext*>(GetHwContext());

    m_pNodeName      = "IPE";
    m_OEMICASettingEnable   = pContext->GetTitan17xSettingsManager()->GetTitan17xStaticSettings()->IsICAIQSettingEnable;
    m_OEMIQSettingEnable    = pContext->GetTitan17xSettingsManager()->GetTitan17xStaticSettings()->IsOEMIQSettingEnable;
    m_OEMStatsSettingEnable = GetStaticSettings()->IsOEMStatSettingEnable;
    m_enableIPEHangDump     = GetStaticSettings()->enableIPEHangDump;
    m_compressiononOutput   = FALSE;

    for (UINT batchIndex = 0; batchIndex < IPEMaxSupportedBatchSize; batchIndex++)
    {
        for (UINT port = 0; port < IPE_IO_IMAGES_MAX; port++)
        {
            for (UINT plane = 0; plane < MAX_NUM_OF_IMAGE_PLANES; plane++)
            {
                // Precompute the frame buffer offset for all ports
                s_frameBufferOffset[batchIndex][port].bufferPtr[plane] =
                    static_cast<UINT32>(offsetof(IpeFrameProcessData, frameSets[0]) + sizeof(FrameSet) * batchIndex) +
                    static_cast<UINT32>(offsetof(FrameSet, buffers[0]) + sizeof(FrameBuffers) * port) +
                    static_cast<UINT32>(offsetof(FrameBuffers, bufferPtr[0]) + (sizeof(FrameBufferPtr) * plane));

                s_frameBufferOffset[batchIndex][port].metadataBufferPtr[plane] =
                    static_cast<UINT32>(offsetof(IpeFrameProcessData, frameSets[0]) + sizeof(FrameSet) * batchIndex) +
                    static_cast<UINT32>(offsetof(FrameSet, buffers[0]) + sizeof(FrameBuffers) * port) +
                    static_cast<UINT32>(offsetof(FrameBuffers, metadataBufferPtr[0]) + (sizeof(FrameBufferPtr) * plane));
            }
        }
    }

    CAMX_ASSERT(CamxResultSuccess == VendorTagManager::QueryVendorTagLocation("org.quic.camera2.ipeicaconfigs",
                                                                              "ICAInPerspectiveTransform",
                                                                              &m_IPEICATAGLocation[0]));
    CAMX_ASSERT(CamxResultSuccess == VendorTagManager::QueryVendorTagLocation("org.quic.camera2.ipeicaconfigs",
                                                                              "ICAInGridTransform",
                                                                              &m_IPEICATAGLocation[1]));
    CAMX_ASSERT(CamxResultSuccess == VendorTagManager::QueryVendorTagLocation("org.quic.camera2.ipeicaconfigs",
                                                                              "ICAInInterpolationParams",
                                                                              &m_IPEICATAGLocation[2]));
    CAMX_ASSERT(CamxResultSuccess == VendorTagManager::QueryVendorTagLocation("org.quic.camera2.ipeicaconfigs",
                                                                              "ICARefPerspectiveTransform",
                                                                              &m_IPEICATAGLocation[3]));
    CAMX_ASSERT(CamxResultSuccess == VendorTagManager::QueryVendorTagLocation("org.quic.camera2.ipeicaconfigs",
                                                                              "ICARefGridTransform",
                                                                              &m_IPEICATAGLocation[4]));
    CAMX_ASSERT(CamxResultSuccess == VendorTagManager::QueryVendorTagLocation("org.quic.camera2.ipeicaconfigs",
                                                                              "ICARefInterpolationParams",
                                                                              &m_IPEICATAGLocation[5]));
    CAMX_ASSERT(CamxResultSuccess == VendorTagManager::QueryVendorTagLocation("org.quic.camera2.ipeicaconfigs",
                                                                              "ICAReferenceParams",
                                                                              &m_IPEICATAGLocation[6]));
    CAMX_ASSERT(CamxResultSuccess == VendorTagManager::QueryVendorTagLocation("org.quic.camera2.ipeicaconfigs",
                                                                              "ICAInPerspectiveTransformLookAhead",
                                                                              &m_IPEICATAGLocation[7]));
    CAMX_ASSERT(CamxResultSuccess == VendorTagManager::QueryVendorTagLocation("org.quic.camera2.ipeicaconfigs",
                                                                              "ICAInGridTransformLookAhead",
                                                                              &m_IPEICATAGLocation[8]));
    CAMX_ASSERT(CamxResultSuccess == VendorTagManager::QueryVendorTagLocation("org.quic.camera2.mfnrconfigs",
                                                                              "MFNRTotalNumFrames",
                                                                              &m_MFNRTotalNumFramesTAGLocation));
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::~IPENode
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
IPENode::~IPENode()
{
    Cleanup();

    if (TRUE == IsDeviceAcquired())
    {
        ReleaseDevice();
    }
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// DumpPayload
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
static CamxResult DumpPayload(
    IPECmdBufferId  index,
    CmdBuffer*      pIPECmdBuffer,
    UINT64          requestId)
{
    CamxResult result = CamxResultSuccess;
    CHAR       filename[100];

    CAMX_UNREFERENCED_PARAM(index);
    CAMX_UNREFERENCED_PARAM(pIPECmdBuffer);

    /// @todo (CAMX-2491) Remove this way of getting settings
    if (TRUE == HwEnvironment::GetInstance()->GetStaticSettings()->dumpIPEFirmwarePayload)
    {
        switch (index)
        {
            case CmdBufferFrameProcess:
                CamX::OsUtils::SNPrintF(filename, sizeof(filename), "%s/IPEDumpFrameProcessData_%lld.txt",
                                        ConfigFileDirectory, requestId);
                break;
            case CmdBufferStriping:
                CamX::OsUtils::SNPrintF(filename, sizeof(filename), "%s/IPEDumpStripingOutput_%lld.txt",
                                        ConfigFileDirectory, requestId);
                break;
            case CmdBufferIQSettings:
               CamX::OsUtils::SNPrintF(filename, sizeof(filename), "%s/IPEDumpIQSettings_%lld.txt",
                                       ConfigFileDirectory, requestId);
                break;
            case CmdBufferPreLTM:
               CamX::OsUtils::SNPrintF(filename, sizeof(filename), "%s/IPEDumpPreLTM_%lld.txt",
                                       ConfigFileDirectory, requestId);
                break;
            case CmdBufferPostLTM:
               CamX::OsUtils::SNPrintF(filename, sizeof(filename), "%s/IPEDumpPostLTM_%lld.txt",
                                       ConfigFileDirectory, requestId);
                break;
            case CmdBufferDMIHeader:
               CamX::OsUtils::SNPrintF(filename, sizeof(filename), "%s/IPEDumpDMIHeader_%lld.txt",
                                       ConfigFileDirectory, requestId);
                break;
            case CmdBufferNPS:
               CamX::OsUtils::SNPrintF(filename, sizeof(filename), "%s/IPEDumpNPS_%lld.txt",
                                       ConfigFileDirectory, requestId);
                break;
            default:
                result = CamxResultEInvalidArg;
                break;
        }
        if (CamxResultSuccess == result)
        {
            FILE* pFile = CamX::OsUtils::FOpen(filename, "wb");
            if (NULL != pFile)
            {
                CamX::OsUtils::FWrite(pIPECmdBuffer->GetHostAddr(), pIPECmdBuffer->GetMaxLength(), 1, pFile);
                CamX::OsUtils::FClose(pFile);
            }
        }
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// DumpDebug
///
/// @brief: This is called when firmware signal an error and UMD needs firmware dump
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
static CamxResult DumpDebug(
    IPECmdBufferId          index,
    CmdBuffer*              pBuffer,
    UINT64                  requestId,
    UINT32                  instance,
    UINT32                  realtime,
    IPEInstanceProperty     instanceProperty)
{
    CamxResult result = CamxResultSuccess;
    CHAR       filename[256];
    static uint32_t output = 0;

    if (index == CmdBufferBLMemory)
    {
        switch (index)
        {
            case CmdBufferBLMemory:
                CAMX_LOG_ERROR(CamxLogGroupPProc, "dump bl buffer");
#if defined (CAMX_ANDROID_API) && (CAMX_ANDROID_API >= 28)
                CamX::OsUtils::SNPrintF(filename, sizeof(filename),
                    "/data/vendor/camera/IPEBLMemoryDump%llu_%d_instance_%d_realtime_%d.txt",
                    requestId, output, instance, realtime);
#else
                CamX::OsUtils::SNPrintF(filename, sizeof(filename),
                    "/data/misc/camera/IPEBLMemoryDump%llu_%d_instance_%d_realtime_%d_processingType_%d_profileId_%d.txt",
                    requestId, output, instance, realtime, instanceProperty.processingType, instanceProperty.profileId);
#endif // Android-P or later
                output++;
                break;
            default:
                result = CamxResultEInvalidArg;
                break;
        }
        if (CamxResultSuccess == result)
        {
            FILE* pFile = CamX::OsUtils::FOpen(filename, "wb");
            if (!pFile)
            {
                CAMX_LOG_ERROR(CamxLogGroupPProc, "Can't open file");
                return CamxResultEFailed;
            }
            CamX::OsUtils::FWrite(pBuffer->GetHostAddr(), pBuffer->GetMaxLength(), 1, pFile);
            CamX::OsUtils::FClose(pFile);
        }
    }
    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::TranslateToFirmwarePortId
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CAMX_INLINE VOID IPENode::TranslateToFirmwarePortId(
    UINT32          portId,
    IPE_IO_IMAGES*  pFirmwarePortId)
{
    CAMX_ASSERT(portId < static_cast<UINT32>(IPE_IO_IMAGES::IPE_IO_IMAGES_MAX));

    *pFirmwarePortId = static_cast<IPE_IO_IMAGES>(portId);
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::Create
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
IPENode* IPENode::Create(
    const NodeCreateInputData* pCreateInputData,
    NodeCreateOutputData*      pCreateOutputData)
{
    CAMX_UNREFERENCED_PARAM(pCreateInputData);
    CAMX_UNREFERENCED_PARAM(pCreateOutputData);
    IPENode* pNodeObj = CAMX_NEW IPENode;

    if ((NULL != pNodeObj) && (NULL != pCreateInputData))
    {
        UINT32   propertyCount = pCreateInputData->pNodeInfo->nodePropertyCount;
        INT32    stabType      = 0;

        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "nodePropertyCount %d",
            pCreateInputData->pNodeInfo->nodePropertyCount);

        pNodeObj->m_nodePropDisableZoomCrop = FALSE;

        for (UINT32 count = 0; count < propertyCount; count++)
        {
            // There can be multiple IPE instances in a pipeline, each instance can have differnt IQ modules enabled
            if (NodePropertyProfileId == pCreateInputData->pNodeInfo->pNodeProperties[count].id)
            {
                pNodeObj->m_instanceProperty.profileId = static_cast<IPEProfileId>(
                    atoi(static_cast<const CHAR*>(
                        pCreateInputData->pNodeInfo->pNodeProperties[count].pValue)));
            }
            // If EIS is enabled, IPE instance needs to know if its in EIS2.0 path 3.0.
            else if (NodePropertyStabilizationType == pCreateInputData->pNodeInfo->pNodeProperties[count].id)
            {
                // Node property value is shifted to use multiple stabilization type together.
                stabType |= (1 << (atoi(static_cast<const CHAR*>(
                        pCreateInputData->pNodeInfo->pNodeProperties[count].pValue))));
                // Check if EIS ICA dependency need to be bypassed
                if ((TRUE == HwEnvironment::GetInstance()->GetStaticSettings()->bypassIPEICADependency) &&
                    ((IPEStabilizationTypeEIS2 & pNodeObj->m_instanceProperty.stabilizationType) ||
                     (IPEStabilizationTypeEIS3 & pNodeObj->m_instanceProperty.stabilizationType)))
                {
                    CAMX_LOG_INFO(CamxLogGroupPProc, "EIS stabalization disabled");
                }
                else
                {
                    pNodeObj->m_instanceProperty.stabilizationType = static_cast<IPEStabilizationType>(stabType);
                }
            }
            // If MFNR is enabled, IPE instance needs to know if its in prefilter/blend/scale or postfilter.
            else if (NodePropertyProcessingType == pCreateInputData->pNodeInfo->pNodeProperties[count].id)
            {
                pNodeObj->m_instanceProperty.processingType = static_cast<IPEProcessingType>(
                    atoi(static_cast<const CHAR*>(
                        pCreateInputData->pNodeInfo->pNodeProperties[count].pValue)));
            }
            // There can be multiple IPE instances in a pipeline, each instance can have differnt properties
            else if (NodePropertyIPEDownscale == pCreateInputData->pNodeInfo->pNodeProperties[count].id)
            {
                pNodeObj->m_instanceProperty.ipeOnlyDownscalerMode =
                    atoi(static_cast<const CHAR*>(
                    pCreateInputData->pNodeInfo->pNodeProperties[count].pValue));
            }
            else if (NodePropertyIPEDownscaleWidth == pCreateInputData->pNodeInfo->pNodeProperties[count].id)
            {
                pNodeObj->m_instanceProperty.ipeDownscalerWidth =
                    atoi(static_cast<const CHAR*>(
                    pCreateInputData->pNodeInfo->pNodeProperties[count].pValue));
            }
            else if (NodePropertyIPEDownscaleHeight == pCreateInputData->pNodeInfo->pNodeProperties[count].id)
            {
                pNodeObj->m_instanceProperty.ipeDownscalerHeight =
                    atoi(static_cast<const CHAR*>(
                    pCreateInputData->pNodeInfo->pNodeProperties[count].pValue));
            }
            else if (NodePropertyEnbaleIPECHICropDependency == pCreateInputData->pNodeInfo->pNodeProperties[count].id)
            {
                pNodeObj->m_instanceProperty.enableCHICropInfoPropertyDependency = static_cast<BOOL>(
                    atoi(static_cast<const CHAR*>(pCreateInputData->pNodeInfo->pNodeProperties[count].pValue)));
            }
            else if (NodePropertyEnableFOVC == pCreateInputData->pNodeInfo->pNodeProperties[count].id)
            {
                pNodeObj->m_instanceProperty.enableFOVC = *static_cast<UINT*>
                    (pCreateInputData->pNodeInfo->pNodeProperties[count].pValue);
            }
        }
        CAMX_LOG_INFO(CamxLogGroupPProc, "IPE Instance profileId %d stabilization %d processing: %d"
                      "ipeOnlyDownscalerMode=%d, width=%d, height=%d",
                      pNodeObj->m_instanceProperty.profileId, pNodeObj->m_instanceProperty.stabilizationType,
                      pNodeObj->m_instanceProperty.processingType,
                      pNodeObj->m_instanceProperty.ipeOnlyDownscalerMode,
                      pNodeObj->m_instanceProperty.ipeDownscalerWidth,
                      pNodeObj->m_instanceProperty.ipeDownscalerHeight);

        return pNodeObj;
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupISP, "Null input pointer");
        return NULL;
    }
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::Destroy
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::Destroy()
{
    CAMX_DELETE this;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// static VOID FillCmdBufferParams
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CAMX_INLINE static VOID FillCmdBufferParams(
    ResourceParams* pParams,
    UINT            resourceSize,
    CmdType         type,
    UINT32          memFlags,
    UINT            maxNumNestedAddrs,
    const INT32*    pDeviceIndex,
    UINT            blobCount)
{
    *pParams                                = { 0 };
    pParams->resourceSize                   = resourceSize;
    pParams->poolSize                       = blobCount*resourceSize;
    pParams->usageFlags.cmdBuffer           = 1;
    pParams->cmdParams.type                 = type;
    pParams->alignment                      = CamxCommandBufferAlignmentInBytes;
    pParams->cmdParams.enableAddrPatching   = (maxNumNestedAddrs > 0) ? 1 : 0;
    pParams->cmdParams.maxNumNestedAddrs    = maxNumNestedAddrs;
    pParams->memFlags                       = memFlags;
    pParams->pDeviceIndices                 = pDeviceIndex;
    pParams->numDevices                     = (pParams->pDeviceIndices != NULL) ? 1 : 0;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::ProcessingNodeInitialize
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::ProcessingNodeInitialize(
    const NodeCreateInputData* pCreateInputData,
    NodeCreateOutputData*      pCreateOutputData)
{
    CAMX_UNREFERENCED_PARAM(pCreateInputData);

    CamxResult      result                      = CamxResultSuccess;

    INT32           deviceIndex                 = -1;
    UINT            indicesLengthRequired       = 0;

    CAMX_ASSERT(IPE == Type());
    CAMX_ASSERT(NULL != pCreateOutputData);

    pCreateOutputData->maxOutputPorts = IPEMaxOutput;
    pCreateOutputData->maxInputPorts  = IPEMaxInput;

    // Add device indices
    result = HwEnvironment::GetInstance()->GetDeviceIndices(CSLDeviceTypeICP, &deviceIndex, 1, &indicesLengthRequired);

    if (CamxResultSuccess == result)
    {
        CAMX_ASSERT(indicesLengthRequired == 1);
        result = AddDeviceIndex(deviceIndex);
        m_deviceIndex = deviceIndex;
    }

    if ((CamxResultSuccess == result) && (0 != GetStaticSettings()->tuningDumpDataSizeIPE))
    {
        m_pTuningMetadata = static_cast<IPETuningMetadata*>(CAMX_CALLOC(sizeof(IPETuningMetadata)));
        if (NULL == m_pTuningMetadata)
        {
            CAMX_LOG_ERROR(CamxLogGroupISP, "Failed to allocate Tuning metadata.");
            result = CamxResultENoMemory;
        }

        if (CamxResultSuccess == result)
        {
            if (NULL == s_pDebugDataWriter)
            {
                s_pDebugDataWriter = CAMX_NEW TDDebugDataWriter();
                if (NULL == s_pDebugDataWriter)
                {
                    CAMX_LOG_ERROR(CamxLogGroupISP, "Failed to allocate Tuning metadata.");
                    result = CamxResultENoMemory;
                }
            }

            // Keep track of the intance using DebugDataWriter
            if (NULL != s_pDebugDataWriter)
            {
                s_debugDataWriterCounter++;
            }
        }
    }

    m_OEMStatsConfig = GetStaticSettings()->IsOEMStatSettingEnable;
    m_adrcInfo.isADRCEnabled = FALSE;
    m_adrcInfo.percentageOfGTM = 0.0f;

    // Configure IPE Capability
    result = ConfigureIPECapability();

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::GetGammaOutput
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::GetGammaOutput(
    ISPInternalData* pISPData,
    UINT32           parentID)
{
    CamxResult result       = CamxResultSuccess;
    UINT32*    pGammaOutput = NULL;
    UINT32     gammaLength  = 0;
    UINT32     metaTag = 0;
    BOOL       isGammaValid = FALSE;


    if (parentID == IFE)
    {
        static const UINT PropertiesIPE[] =
        {
            PropertyIDIFEGammaOutput,
        };
        const UINT length                    = CAMX_ARRAY_SIZE(PropertiesIPE);
        VOID* pData[length]                  = { 0 };
        UINT64 propertyDataIPEOffset[length] = { 0 };
        GetDataList(PropertiesIPE, pData, propertyDataIPEOffset, length);

        if (NULL != pData[0])
        {
            pGammaOutput = reinterpret_cast<GammaInfo*>(pData[0])->gammaG;
            gammaLength = sizeof(reinterpret_cast<GammaInfo*>(pData[0])->gammaG);
            isGammaValid = reinterpret_cast<GammaInfo*>(pData[0])->isGammaValid;
        }
    }
    else if (parentID == BPS)
    {
        static const UINT PropertiesIPE[] =
        {
            PropertyIDBPSGammaOutput,
        };
        const UINT length = CAMX_ARRAY_SIZE(PropertiesIPE);
        VOID* pData[length] = { 0 };
        UINT64 propertyDataIPEOffset[length] = { 0 };
        GetDataList(PropertiesIPE, pData, propertyDataIPEOffset, length);

        if (NULL != pData[0])
        {
            pGammaOutput = reinterpret_cast<GammaInfo*>(pData[0])->gammaG;
            gammaLength = sizeof(reinterpret_cast<GammaInfo*>(pData[0])->gammaG);
            isGammaValid = reinterpret_cast<GammaInfo*>(pData[0])->isGammaValid;
        }
    }
    else
    {
        result = VendorTagManager::QueryVendorTagLocation("org.quic.camera.gammainfo",
            "GammaInfo",
            &metaTag);
        if (CamxResultSuccess == result)
        {
            static const UINT PropertiesIPE[] = { metaTag | InputMetadataSectionMask };
            const UINT length = CAMX_ARRAY_SIZE(PropertiesIPE);
            VOID* pData[length] = { 0 };
            UINT64 propertyDataIPEOffset[length] = { 0 };
            GetDataList(PropertiesIPE, pData, propertyDataIPEOffset, length);
            if (NULL != pData[0])
            {
                pGammaOutput = reinterpret_cast<GammaInfo*>(pData[0])->gammaG;
                gammaLength = sizeof(reinterpret_cast<GammaInfo*>(pData[0])->gammaG);
                isGammaValid = reinterpret_cast<GammaInfo*>(pData[0])->isGammaValid;
            }
        }
    }

    pISPData->gammaOutput.isGammaValid = isGammaValid;
    if (NULL != pGammaOutput)
    {
        CAMX_ASSERT(gammaLength == sizeof(pISPData->gammaOutput.gammaG));
        Utils::Memcpy(pISPData->gammaOutput.gammaG,
                      pGammaOutput,
                      gammaLength);
    }
    else
    {
        result = CamxResultEResource;
        CAMX_LOG_ERROR(CamxLogGroupPProc, "Error in getting gamma output slot");
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::GetADRCInfoOutput
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::GetADRCInfoOutput()
{
    CamxResult result       = CamxResultSuccess;
    if (TRUE ==IsNodeInPipeline(IFE))
    {
        static const UINT PropertiesIPE[] =
        {
            PropertyIDIFEADRCInfoOutput,
        };
        const UINT length                    = CAMX_ARRAY_SIZE(PropertiesIPE);
        VOID* pData[length]                  = { 0 };
        UINT64 propertyDataIPEOffset[length] = { 0 };
        GetDataList(PropertiesIPE, pData, propertyDataIPEOffset, length);
        if (NULL != pData[0])
        {
            m_adrcInfo.isADRCEnabled = reinterpret_cast<PropertyISPADRCInfo*>(pData[0])->isADRCEnabled;
            m_adrcInfo.percentageOfGTM = reinterpret_cast<PropertyISPADRCInfo*>(pData[0])->percentageOfGTM;
        }
    }
    else if (TRUE == IsNodeInPipeline(BPS))
    {
        static const UINT PropertiesIPE[] =
        {
            PropertyIDBPSADRCInfoOutput,
        };
        const UINT length                    = CAMX_ARRAY_SIZE(PropertiesIPE);
        VOID* pData[length]                  = { 0 };
        UINT64 propertyDataIPEOffset[length] = { 0 };
        GetDataList(PropertiesIPE, pData, propertyDataIPEOffset, length);
        if (NULL != pData[0])
        {
            m_adrcInfo.isADRCEnabled = reinterpret_cast<PropertyISPADRCInfo*>(pData[0])->isADRCEnabled;
            m_adrcInfo.percentageOfGTM = reinterpret_cast<PropertyISPADRCInfo*>(pData[0])->percentageOfGTM;
        }
    }
    else
    {
        CAMX_LOG_VERBOSE(CamxLogGroupISP, "Can't get ADRC Info!!!, Due to No IFE/BPS Node exist");
    }
    CAMX_LOG_INFO(CamxLogGroupISP, "adrcEnabled = %d, percentageOfGTM = %f",
            m_adrcInfo.isADRCEnabled, m_adrcInfo.percentageOfGTM);
    return result;
}


////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillIQSetting
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillIQSetting(
    ISPInputData*            pInputData,
    IpeIQSettings*           pIPEIQsettings,
    PerRequestActivePorts*   pPerRequestPorts)
{
    CamxResult             result       = CamxResultSuccess;
    const ImageFormat*     pImageFormat = NULL;
    UINT32                 clampValue   = 0;

    /// @todo (CAMX-729) Implement IPE IQ modules

    for (UINT portIndex = 0; portIndex < pPerRequestPorts->numOutputPorts; portIndex++)
    {
        PerRequestOutputPortInfo* pOutputPort = &pPerRequestPorts->pOutputPorts[portIndex];

        CAMX_ASSERT(NULL != pOutputPort);

        for (UINT bufferIndex = 0; bufferIndex < pOutputPort->numOutputBuffers; bufferIndex++)
        {
            if (NULL != pOutputPort->pImageBuffer[bufferIndex])
            {
                pImageFormat = pOutputPort->pImageBuffer[bufferIndex]->GetFormat();

                CAMX_ASSERT(NULL != pImageFormat);

                // Clamp value depends on output format. For 10-bit Max clamp value
                // is 0x3FF and for 8-bit it is 0xFF. OEM can choose lower value at the
                // cost of image quality. Clamp value should be read from chromatix
                /// @todo (CAMX-2276) Temporary workaround until pImageBuffer for HAL buffers fix is in place
                if ((NULL != pImageFormat) && (TRUE == ImageFormatUtils::Is10BitFormat(pImageFormat->format)))
                {
                    clampValue = Max10BitValue;
                }
                else
                {
                    clampValue = Max8BitValue;
                }

                if (IPEOutputPortDisplay == pOutputPort->portId)
                {
                    pIPEIQsettings->displayClampParameters.lumaClamp.clampMin   = 0;
                    pIPEIQsettings->displayClampParameters.lumaClamp.clampMax   = clampValue;
                    pIPEIQsettings->displayClampParameters.chromaClamp.clampMin = 0;
                    pIPEIQsettings->displayClampParameters.chromaClamp.clampMax = clampValue;
                }

                if (IPEOutputPortVideo == pOutputPort->portId)
                {
                    pIPEIQsettings->videoClampParameters.lumaClamp.clampMin   = 0;
                    pIPEIQsettings->videoClampParameters.lumaClamp.clampMax   = clampValue;
                    pIPEIQsettings->videoClampParameters.chromaClamp.clampMin = 0;
                    pIPEIQsettings->videoClampParameters.chromaClamp.clampMax = clampValue;
                }
            }
        }
    }

    // DS parameters
    TuningDataManager* pTuningManager = pInputData->pTuningDataManager;
    CAMX_ASSERT(NULL != pTuningManager);

    ds4to1_1_0_0::chromatix_ds4to1v10Type* pChromatix = NULL;
    if (TRUE == pTuningManager->IsValidChromatix())
    {
        pChromatix = pTuningManager->GetChromatix()->GetModule_ds4to1v10_ipe(
                        reinterpret_cast<TuningMode*>(&pInputData->pTuningData->TuningMode[0]),
                        pInputData->pTuningData->noOfSelectionParameter);

        CAMX_ASSERT(NULL != pChromatix);
        if (NULL != pChromatix)
        {
            INT pass_dc16 = static_cast<INT>(ispglobalelements::trigger_pass::PASS_DC16);
            INT pass_dc4 = static_cast<INT>(ispglobalelements::trigger_pass::PASS_DC4);
            INT pass_full = static_cast<INT>(ispglobalelements::trigger_pass::PASS_FULL);
            ds4to1_1_0_0::chromatix_ds4to1v10_reserveType*  pReserveData = &pChromatix->chromatix_ds4to1v10_reserve;

            pIPEIQsettings->ds4Parameters.dc16Parameters.coefficient7 =
                pReserveData->mod_ds4to1v10_pass_reserve_data[pass_dc16].pass_data.coeff_07;
            pIPEIQsettings->ds4Parameters.dc16Parameters.coefficient16 =
                pReserveData->mod_ds4to1v10_pass_reserve_data[pass_dc16].pass_data.coeff_16;
            pIPEIQsettings->ds4Parameters.dc16Parameters.coefficient25 =
                pReserveData->mod_ds4to1v10_pass_reserve_data[pass_dc16].pass_data.coeff_25;

            pIPEIQsettings->ds4Parameters.dc4Parameters.coefficient7 =
                pReserveData->mod_ds4to1v10_pass_reserve_data[pass_dc4].pass_data.coeff_07;
            pIPEIQsettings->ds4Parameters.dc4Parameters.coefficient16 =
                pReserveData->mod_ds4to1v10_pass_reserve_data[pass_dc4].pass_data.coeff_16;
            pIPEIQsettings->ds4Parameters.dc4Parameters.coefficient25 =
                pReserveData->mod_ds4to1v10_pass_reserve_data[pass_dc4].pass_data.coeff_25;

            pIPEIQsettings->ds4Parameters.fullpassParameters.coefficient7 =
                pReserveData->mod_ds4to1v10_pass_reserve_data[pass_full].pass_data.coeff_07;
            pIPEIQsettings->ds4Parameters.fullpassParameters.coefficient16 =
                pReserveData->mod_ds4to1v10_pass_reserve_data[pass_full].pass_data.coeff_16;
            pIPEIQsettings->ds4Parameters.fullpassParameters.coefficient25 =
                pReserveData->mod_ds4to1v10_pass_reserve_data[pass_full].pass_data.coeff_25;
        }
    }

    if (NULL == pChromatix)
    {
        pIPEIQsettings->ds4Parameters.dc16Parameters.coefficient7 = 125;
        pIPEIQsettings->ds4Parameters.dc16Parameters.coefficient16 = 91;
        pIPEIQsettings->ds4Parameters.dc16Parameters.coefficient25 = 144;

        pIPEIQsettings->ds4Parameters.dc4Parameters.coefficient7 = 125;
        pIPEIQsettings->ds4Parameters.dc4Parameters.coefficient16 = 91;
        pIPEIQsettings->ds4Parameters.dc4Parameters.coefficient25 = 144;

        pIPEIQsettings->ds4Parameters.fullpassParameters.coefficient7 = 125;
        pIPEIQsettings->ds4Parameters.fullpassParameters.coefficient16 = 91;
        pIPEIQsettings->ds4Parameters.fullpassParameters.coefficient25 = 144;
    }

    CAMX_LOG_VERBOSE(CamxLogGroupPProc,
                     "DC16: Coeff07 = %d, Coeff16 = %d, Coeff25 = %d",
                     pIPEIQsettings->ds4Parameters.dc16Parameters.coefficient7,
                     pIPEIQsettings->ds4Parameters.dc16Parameters.coefficient16,
                     pIPEIQsettings->ds4Parameters.dc16Parameters.coefficient25);

    CAMX_LOG_VERBOSE(CamxLogGroupPProc,
                     "DC4: Coeff07 = %d, Coeff16 = %d, Coeff25 = %d",
                     pIPEIQsettings->ds4Parameters.dc4Parameters.coefficient7,
                     pIPEIQsettings->ds4Parameters.dc4Parameters.coefficient16,
                     pIPEIQsettings->ds4Parameters.dc4Parameters.coefficient25);

    CAMX_LOG_VERBOSE(CamxLogGroupPProc,
                     "FULL: Coeff07 = %d, Coeff16 = %d, Coeff25 = %d",
                     pIPEIQsettings->ds4Parameters.fullpassParameters.coefficient7,
                     pIPEIQsettings->ds4Parameters.fullpassParameters.coefficient16,
                     pIPEIQsettings->ds4Parameters.fullpassParameters.coefficient25);

    if ((m_instanceProperty.profileId == IPEProfileId::IPEProfileIdScale) ||
        (TRUE == GetStaticSettings()->force8BitUBWCAlignment))
    {
        pIPEIQsettings->ica1Parameters.eightBitOutputAlignment = 1;
    }

    //  set ICA's invalidPixelModeInterpolationEnabled to be always 1
    //  especially for the DS64 image from IPE (avoid green lines in its bottom)
    pIPEIQsettings->ica1Parameters.invalidPixelModeInterpolationEnabled = 1;

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillFramePerfParams
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillFramePerfParams(
    IpeFrameProcessData* pFrameProcessData)
{
    CamxResult    result = CamxResultSuccess;

    CAMX_ASSERT(NULL != pFrameProcessData);

    pFrameProcessData->maxNumOfCoresToUse       = (GetStaticSettings()->numIPECoresToUse <= m_capability.numIPE) ?
                     GetStaticSettings()->numIPECoresToUse:m_capability.numIPE;
    pFrameProcessData->targetCompletionTimeInNs = (TRUE == m_enableIPEHangDump) ? 2 * 100000000 : 0;

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillFrameUBWCParams
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillFrameUBWCParams(
    IpeFrameProcessData* pFrameProcessData)
{
    CamxResult    result = CamxResultSuccess;

    pFrameProcessData->ubwcStatsBufferAddress   = 0;
    pFrameProcessData->ubwcStatsBufferSize      = 0;

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::IsValidDimension
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
BOOL IPENode::IsValidDimension(
    IpeZoomWindow* pZoomWindow)
{
    UINT32 IPEMaxInputWidth       = m_capability.maxInputWidth[ICA_MODE_DISABLED];
    UINT32 IPEMaxInputHeight      = m_capability.maxInputHeight[ICA_MODE_DISABLED];
    UINT32 IPEMinInputWidthLimit  = m_capability.minInputWidth;
    UINT32 IPEMinInputHeightLimit = m_capability.minInputHeight;


    if (pZoomWindow->windowLeft < 0                           ||
        pZoomWindow->windowTop < 0                            ||
        (pZoomWindow->windowHeight <= IPEMinInputHeightLimit  ||
         pZoomWindow->windowWidth <= IPEMinInputWidthLimit)   ||
        (pZoomWindow->windowHeight > IPEMaxInputHeight)       ||
        (pZoomWindow->windowWidth > IPEMaxInputWidth))
    {
        return false;
    }

    return true;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillFrameZoomWindow
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillFrameZoomWindow(
    ISPInputData*     pInputData,
    UINT              parentNodeId)
{
    CamxResult        result             = CamxResultSuccess;
    IpeZoomWindow*    pZoomWindowICA1    = NULL;
    IpeZoomWindow*    pZoomWindowICA2    = NULL;
    IFECropInfo*      pCropInfo          = NULL;
    IFEScalerOutput*  pIFEScalerOutput   = NULL;
    IFECropInfo       cropInfo;
    int32_t           adjustedWidth      = 0;
    int32_t           adjustedHeight     = 0;
    UINT32            adjustedFullWidth  = 0;
    UINT32            adjustedFullHeight = 0;
    FLOAT             cropFactor         = 1.0f;

    IpeIQSettings*      pIPEIQsettings = reinterpret_cast<IpeIQSettings*>(pInputData->pipelineIPEData.pIPEIQSettings);

    CAMX_ASSERT(NULL != pIPEIQsettings);
    if ((IPEProfileIdPPS == m_instanceProperty.profileId)     ||
        (IPEProfileIdDefault == m_instanceProperty.profileId) ||
        (IPEProfileIdHDR10 == m_instanceProperty.profileId))
    {
        cropInfo.fullPath.left   = 0;
        cropInfo.fullPath.top    = 0;
        cropInfo.fullPath.width  = m_fullInputWidth;
        cropInfo.fullPath.height = m_fullInputHeight;

        if (parentNodeId == IFE)
        {
            const UINT props[] =
            {
                PropertyIDIFEDigitalZoom,
                PropertyIDFOVCFrameInfo,
                PropertyIDIFEScaleOutput,
            };
            VOID*      pData[CAMX_ARRAY_SIZE(props)]  = { 0 };
            UINT64     offset[CAMX_ARRAY_SIZE(props)] = { 0 };

            GetDataList(props, pData, offset, CAMX_ARRAY_SIZE(props));
            pCropInfo        = reinterpret_cast<IFECropInfo*>(pData[0]);
            pIFEScalerOutput = reinterpret_cast<IFEScalerOutput*>(pData[2]);

            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "IFE: crop width=%d, crop height=%d, Scaler output ratio = %f",
                             pCropInfo->fullPath.width, pCropInfo->fullPath.height, pIFEScalerOutput->scalingFactor);

            if (NULL != pCropInfo)
            {
                cropInfo = *pCropInfo;
            }
            pCropInfo = &cropInfo;

            if ((TRUE == m_FOVCEnabled) && (NULL != pData[1]))
            {
                FOVCOutput* pFOVCInfo = reinterpret_cast<FOVCOutput*>(pData[1]);

                if (pFOVCInfo->fieldOfViewCompensationFactor > 0.0f &&
                    (pInputData->frameNum > GetStaticSettings()->maxPipelineDelay))
                {
                    m_prevFOVC = pFOVCInfo->fieldOfViewCompensationFactor;
                }
            }
        }
        // Don't need to add parent check, because ipe just use this property when parent node is chinode.
        else if (TRUE == m_instanceProperty.enableCHICropInfoPropertyDependency)
        {
            UINT32        metaTag = 0;

            result = VendorTagManager::QueryVendorTagLocation("com.qti.cropregions",
                                                              "ChiNodeResidualCrop",
                                                              &metaTag);
            if (CamxResultSuccess == result)
            {
                static const UINT tagInfo[]          = { metaTag };
                const static UINT length             = CAMX_ARRAY_SIZE(tagInfo);
                VOID*             pData[length]      = { 0 };
                UINT64            dataOffset[length] = { 0 };
                CropWindow*       pCropWindow        = NULL;

                GetDataList(tagInfo, pData, dataOffset, length);

                pCropWindow = reinterpret_cast<CropWindow*>(pData[0]);
                if (NULL != pCropWindow)
                {
                    cropInfo.fullPath.left   = pCropWindow->left;
                    cropInfo.fullPath.top    = pCropWindow->top;
                    cropInfo.fullPath.width  = pCropWindow->width;
                    cropInfo.fullPath.height = pCropWindow->height;
                }
                else
                {
                    CAMX_LOG_ERROR(CamxLogGroupPProc, "Failed to ChiNodeResidualCrop data");
                }
            }
            else
            {
                CAMX_LOG_ERROR(CamxLogGroupPProc, "Failed to query ChiNodeResidualCrop vendor tag");
            }

            pCropInfo = &cropInfo;
        }
        else if ((parentNodeId == BPS) ||
                 ((TRUE == IsPipelineHasSnapshotJPEGStream() && (FALSE == IsRealTime()))) ||
                 (FALSE == IsRealTime()))
        {
            UINT32        metaTag = 0;
            result = VendorTagManager::QueryVendorTagLocation("org.quic.camera2.ref.cropsize",
                                                              "RefCropSize",
                                                              &metaTag);

            if (CamxResultSuccess == result)
            {
                metaTag |= InputMetadataSectionMask;
                CropWindow*        pCropWindow     = NULL;
                RefCropWindowSize* pRefCropWindow  = NULL;
                static const UINT  PropertiesIPE[] =
                {
                    InputScalerCropRegion,
                    metaTag
                };
                static const UINT Length                        = CAMX_ARRAY_SIZE(PropertiesIPE);
                VOID*             pData[Length]                 = { 0 };
                UINT64            propertyDataIPEOffset[Length] = { 0 , 0};

                GetDataList(PropertiesIPE, pData, propertyDataIPEOffset, Length);
                pCropWindow    = (static_cast<CropWindow*>(pData[0]));
                pRefCropWindow = (static_cast<RefCropWindowSize*>(pData[1]));
                CAMX_ASSERT ((NULL != pCropWindow) || (NULL != pRefCropWindow));

                if ((0 == pRefCropWindow->width) || (0 == pRefCropWindow->height))
                {
                    pRefCropWindow->width  = m_fullInputWidth;
                    pRefCropWindow->height = m_fullInputHeight;
                }
                CAMX_LOG_VERBOSE(CamxLogGroupPProc, "ZDBG IPE crop Window [%d, %d, %d, %d] full size %dX%d active %dX%d",
                                 pCropWindow->left,
                                 pCropWindow->top,
                                 pCropWindow->width,
                                 pCropWindow->height,
                                 m_fullInputWidth,
                                 m_fullInputHeight,
                                 pRefCropWindow->width,
                                 pRefCropWindow->height);

                cropInfo.fullPath.left   = (pCropWindow->left * m_fullInputWidth) / pRefCropWindow->width;
                cropInfo.fullPath.top    = (pCropWindow->top * m_fullInputHeight) / pRefCropWindow->height;
                cropInfo.fullPath.width  = (pCropWindow->width * m_fullInputWidth) / pRefCropWindow->width;
                cropInfo.fullPath.height = (pCropWindow->height * m_fullInputHeight) / pRefCropWindow->height;

                FLOAT widthRatio  = static_cast<FLOAT>(cropInfo.fullPath.width) / static_cast<FLOAT>(m_fullOutputWidth);
                FLOAT heightRatio = static_cast<FLOAT>(cropInfo.fullPath.height) / static_cast<FLOAT>(m_fullOutputHeight);
                if ((widthRatio > IPEDownscaleThresholdMin && widthRatio < IPEDownscaleThresholdMax) &&
                    (heightRatio > IPEDownscaleThresholdMin && heightRatio < IPEDownscaleThresholdMax))
                 {
                     cropInfo.fullPath.left     = (cropInfo.fullPath.width - m_fullOutputWidth) / 2;
                     cropInfo.fullPath.top      = (cropInfo.fullPath.height - m_fullOutputHeight) / 2;
                     cropInfo.fullPath.width    = m_fullOutputWidth;
                     cropInfo.fullPath.height   = m_fullOutputHeight;
                 }

                if (((cropInfo.fullPath.left + cropInfo.fullPath.width) >
                    (static_cast<INT32>(m_fullInputWidth)))                                     ||
                    ((cropInfo.fullPath.top + cropInfo.fullPath.height) >
                    (static_cast<INT32>(m_fullInputHeight))))
                {
                    CAMX_LOG_ERROR(CamxLogGroupPProc, "ZDBG wrong IPE crop Window [%d, %d, %d, %d] full %dX%d",
                                   cropInfo.fullPath.left,
                                   cropInfo.fullPath.top,
                                   cropInfo.fullPath.width,
                                   cropInfo.fullPath.height,
                                   m_fullInputWidth,
                                   m_fullInputHeight);

                    cropInfo.fullPath.left   = 0;
                    cropInfo.fullPath.top    = 0;
                    cropInfo.fullPath.width  = m_fullInputWidth;
                    cropInfo.fullPath.height = m_fullInputHeight;
                }
                pCropInfo                = &cropInfo;
            }
            else
            {
                CAMX_LOG_ERROR(CamxLogGroupPProc, "cannot find vendor tag ref.cropsize");
            }
        }
        else
        {
            // Check IFE residual CROP from the result metadata (RT pipeline)
            static const UINT PropertiesIPE[] =
            {
                PropertyIDIFEDigitalZoom,
                PropertyIDIFEScaleOutput,
            };
            static const UINT Length                        = CAMX_ARRAY_SIZE(PropertiesIPE);
            VOID*             pData[Length]                 = { 0 };
            UINT64            propertyDataIPEOffset[Length] = { 0 };

            GetDataList(PropertiesIPE, pData, propertyDataIPEOffset, Length);
            pCropInfo        = reinterpret_cast<IFECropInfo*>(pData[0]);
            pIFEScalerOutput = reinterpret_cast<IFEScalerOutput*>(pData[1]);

            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "pCropInfo from RT pipeline %p %p mask 0x%x 0x%x",
                             pCropInfo, pIFEScalerOutput, PropertyIDIFEDigitalZoom, PropertyIDIFEScaleOutput);

            if (pCropInfo == NULL || pIFEScalerOutput == NULL)
            {
                // Query from Input data(Offline Pipeline)
                static const UINT PropertiesOfflineIPE[] =
                {
                    PropertyIDIFEDigitalZoom | InputMetadataSectionMask,
                    PropertyIDIFEScaleOutput | InputMetadataSectionMask
                };

                GetDataList(PropertiesOfflineIPE, pData, propertyDataIPEOffset, Length);
                pCropInfo        = reinterpret_cast<IFECropInfo*>(pData[0]);
                pIFEScalerOutput = reinterpret_cast<IFEScalerOutput*>(pData[1]);

                CAMX_LOG_VERBOSE(CamxLogGroupPProc, "pCropInfo from Offline pipeline %p %p mask 0x%x, 0x%x",
                    pCropInfo, pIFEScalerOutput,
                    PropertyIDIFEDigitalZoom | InputMetadataSectionMask,
                    PropertyIDIFEScaleOutput | InputMetadataSectionMask);
            }

            CAMX_ASSERT(NULL != pCropInfo);
            CAMX_ASSERT(NULL != pIFEScalerOutput);
            cropInfo  = *pCropInfo;
            pCropInfo = &cropInfo;
        }

        // Fill in ICA1 Zoom Params
        pZoomWindowICA1    = &pIPEIQsettings->ica1Parameters.zoomWindow;
        adjustedFullWidth  = m_fullInputWidth;
        adjustedFullHeight = m_fullInputHeight;

        //< Update Crop info if EIS is enabled. firmware expects crop window wrt window after margin crop (ICA1 output)
        if ((NULL != pCropInfo) &&
            ((0 != (IPEStabilizationType::IPEStabilizationTypeEIS2 & m_instanceProperty.stabilizationType)) ||
            (0 != (IPEStabilizationType::IPEStabilizationTypeEIS3 & m_instanceProperty.stabilizationType))))
        {
            adjustedFullWidth          = m_fullInputWidth - m_stabilizationMargin.widthPixels;
            adjustedFullHeight         = m_fullInputHeight - m_stabilizationMargin.heightLines;
            cropFactor                 = static_cast<FLOAT>(pCropInfo->fullPath.height) / m_fullInputHeight;
            FLOAT cropFactorOffsetLeft = static_cast<FLOAT>(pCropInfo->fullPath.left) / m_fullInputWidth;
            FLOAT cropFactorOffsetTop  = static_cast<FLOAT>(pCropInfo->fullPath.top) / m_fullInputHeight;

            pCropInfo->fullPath.width  = adjustedFullWidth  * cropFactor;
            pCropInfo->fullPath.height = adjustedFullHeight * cropFactor;
            pCropInfo->fullPath.left   = adjustedFullWidth  * cropFactorOffsetLeft;
            pCropInfo->fullPath.top    = adjustedFullHeight * cropFactorOffsetTop;

            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "ZDBG: After IPE ICA1 Zoom Window [%d, %d, %d, %d] full %d x %d "
                             "crop_factor %f, leftOffsetCropFactor %f, topOffsetCropFactor %f, Stabilization type %d",
                             pCropInfo->fullPath.left, pCropInfo->fullPath.top,
                             pCropInfo->fullPath.width, pCropInfo->fullPath.height,
                             adjustedFullWidth, adjustedFullHeight, cropFactor,
                             cropFactorOffsetLeft, cropFactorOffsetTop,
                             m_instanceProperty.stabilizationType);
        }

        // Apply fixed FOV correction requested by stats
        if ((TRUE == m_FOVCEnabled) && (m_prevFOVC > 0.0f) && (BPS != parentNodeId) && (pCropInfo != NULL))
        {
            cropFactor = m_prevFOVC;

            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "FFOV:crop factor %f", cropFactor);

            adjustedWidth  = pCropInfo->fullPath.width;
            adjustedHeight = pCropInfo->fullPath.height;

            // Calculate total change in width or height
            adjustedWidth  -= static_cast<int32_t>(adjustedWidth * (1 - cropFactor));
            adjustedHeight -= static_cast<int32_t>(adjustedHeight * (1 - cropFactor));

            // (change in height)/2 is change in top, (change in width)/2 is change in left
            pCropInfo->fullPath.left   += (adjustedWidth / 2);
            pCropInfo->fullPath.top    += (adjustedHeight / 2);
            pCropInfo->fullPath.width  -= adjustedWidth;
            pCropInfo->fullPath.height -= adjustedHeight;

            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "ZDBG: After IPE ICA1 Zoom Window [%d, %d, %d, %d] full %d x %d "
                             "FOVC crop_factor %f",
                             pCropInfo->fullPath.left, pCropInfo->fullPath.top,
                             pCropInfo->fullPath.width, pCropInfo->fullPath.height,
                             adjustedFullWidth, adjustedFullHeight, cropFactor);
        }

        if (NULL != pCropInfo)
        {
            pZoomWindowICA1->windowTop    = pCropInfo->fullPath.top;
            pZoomWindowICA1->windowLeft   = pCropInfo->fullPath.left;
            pZoomWindowICA1->windowWidth  = pCropInfo->fullPath.width;
            pZoomWindowICA1->windowHeight = pCropInfo->fullPath.height;
            pZoomWindowICA1->fullWidth    = adjustedFullWidth;
            pZoomWindowICA1->fullHeight   = adjustedFullHeight;
        }
        else if ((0 != m_previousCropInfo.fullPath.width) && (0 != m_previousCropInfo.fullPath.height))
        {
            pZoomWindowICA1->windowTop    = m_previousCropInfo.fullPath.top;
            pZoomWindowICA1->windowLeft   = m_previousCropInfo.fullPath.left;
            pZoomWindowICA1->windowWidth  = m_previousCropInfo.fullPath.width;
            pZoomWindowICA1->windowHeight = m_previousCropInfo.fullPath.height;
            pZoomWindowICA1->fullWidth    = adjustedFullWidth;
            pZoomWindowICA1->fullHeight   = adjustedFullHeight;
        }

        /// @todo (CAMX-2313) Enable ifeZoomWIndow param in IPE IQSettings
        // Fill in ICA2 Zoom Params
        // ICA2 Zoom Window is needed for reference frames. Hence this is populated from previous crop value which is stored
        if ((0 != m_previousCropInfo.fullPath.width) && (0 != m_previousCropInfo.fullPath.height))
        {
            pZoomWindowICA2 = &pIPEIQsettings->ica2Parameters.zoomWindow;
            pZoomWindowICA2->windowTop    = m_previousCropInfo.fullPath.top;
            pZoomWindowICA2->windowLeft   = m_previousCropInfo.fullPath.left;
            pZoomWindowICA2->windowWidth  = m_previousCropInfo.fullPath.width;
            pZoomWindowICA2->windowHeight = m_previousCropInfo.fullPath.height;
            pZoomWindowICA2->fullWidth    = adjustedFullWidth;
            pZoomWindowICA2->fullHeight   = adjustedFullHeight;
            /// @todo (CAMX-2313) Enable ifeZoomWIndow param in IPE IQSettings
        }

        // Save crop info for next frame reference crop information for ICA2
        if (NULL != pCropInfo)
        {
            Utils::Memcpy(&m_previousCropInfo, pCropInfo, sizeof(IFECropInfo));
        }
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "ZDBG: IPE ICA1 Zoom Window [%d, %d, %d, %d] full %d x %d",
            pIPEIQsettings->ica1Parameters.zoomWindow.windowLeft,
            pIPEIQsettings->ica1Parameters.zoomWindow.windowTop,
            pIPEIQsettings->ica1Parameters.zoomWindow.windowWidth,
            pIPEIQsettings->ica1Parameters.zoomWindow.windowHeight,
            pIPEIQsettings->ica1Parameters.zoomWindow.fullWidth,
            pIPEIQsettings->ica1Parameters.zoomWindow.fullHeight);
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "ZDBG: IPE ICA1 IFE Zoom Window [%d, %d, %d, %d] full %d x %d",
            pIPEIQsettings->ica1Parameters.ifeZoomWindow.windowLeft,
            pIPEIQsettings->ica1Parameters.ifeZoomWindow.windowTop,
            pIPEIQsettings->ica1Parameters.ifeZoomWindow.windowWidth,
            pIPEIQsettings->ica1Parameters.ifeZoomWindow.windowHeight,
            pIPEIQsettings->ica1Parameters.ifeZoomWindow.fullWidth,
            pIPEIQsettings->ica1Parameters.ifeZoomWindow.fullHeight);
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "ZDBG: IPE ICA2 Zoom Window [%d, %d, %d, %d] full %d x %d",
            pIPEIQsettings->ica2Parameters.zoomWindow.windowLeft,
            pIPEIQsettings->ica2Parameters.zoomWindow.windowTop,
            pIPEIQsettings->ica2Parameters.zoomWindow.windowWidth,
            pIPEIQsettings->ica2Parameters.zoomWindow.windowHeight,
            pIPEIQsettings->ica2Parameters.zoomWindow.fullWidth,
            pIPEIQsettings->ica2Parameters.zoomWindow.fullHeight);
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "ZDBG: IPE ICA2 IFE Zoom Window [%d, %d, %d, %d] full %d x %d",
            pIPEIQsettings->ica2Parameters.ifeZoomWindow.windowLeft,
            pIPEIQsettings->ica2Parameters.ifeZoomWindow.windowTop,
            pIPEIQsettings->ica2Parameters.ifeZoomWindow.windowWidth,
            pIPEIQsettings->ica2Parameters.ifeZoomWindow.windowHeight,
            pIPEIQsettings->ica2Parameters.ifeZoomWindow.fullWidth,
            pIPEIQsettings->ica2Parameters.ifeZoomWindow.fullHeight);

        if (FALSE == SetScaleRatios(pInputData, parentNodeId, pCropInfo, pIFEScalerOutput))
        {
            CAMX_LOG_WARN(CamxLogGroupPProc, "Cannot Set Scale Ratios! Use default 1.0f");
        }
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillFrameBufferData
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillFrameBufferData(
    CmdBuffer*      pMainCmdBuffer,
    ImageBuffer*    pImageBuffer,
    UINT32          payloadBatchFrameIdx,
    UINT32          bufferBatchFrameIdx,
    UINT32          portId)
{
    CamxResult      result       = CamxResultSuccess;
    SIZE_T          planeOffset  = 0;
    SIZE_T          metadataSize = 0;
    CSLMemHandle    hMem;
    UINT32          numPlanes;

    numPlanes = pImageBuffer->GetNumberOfPlanes();

    // Prepare Patching struct for smmu addresses
    for (UINT32 i = 0; i < numPlanes ; i++)
    {
        if (1 < pImageBuffer->GetNumFramesInBatch())
        {
            // For super buffer output from IFE, IPE node shall get new frame offset within same buffer.
            pImageBuffer->GetPlaneCSLMemHandle(bufferBatchFrameIdx, i, &hMem, &planeOffset, &metadataSize);
        }
        else
        {
            // For IPE video port, batch mode is enabled on link but instead of super buffer, these are individual HAL buffers.
            pImageBuffer->GetPlaneCSLMemHandle(0, i, &hMem, &planeOffset, &metadataSize);
        }

        const ImageFormat* pFormat = pImageBuffer->GetFormat();
        ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
        //  Following is the memory layout of UBWC formats. Pixel data is located after Meta data.                            //
        //                                                                                                                    //
        //   4K aligned  Address -->  ----------------------------                                                            //
        //                            |  Y  - Meta Data Plane    |                                                            //
        //                            ----------------------------                                                            //
        //                            |  Y  - Pixel Data Plane   |                                                            //
        //   4K aligned  Address -->  ----------------------------                                                            //
        //                            |  UV - Meta Data Plane    |                                                            //
        //                            ----------------------------                                                            //
        //                            |  UV - Pixel Data Plane   |                                                            //
        //                            ----------------------------                                                            //
        //                                                                                                                    //
        //  So, metadata Size for the plane needs to be added to get the Pixel data address.                                  //
        //  Note for Linear formats metadataSize should be 0.                                                                 //
        ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
        if (TRUE == ImageFormatUtils::IsUBWC(pFormat->format))
        {
            result = pMainCmdBuffer->AddNestedBufferInfo(s_frameBufferOffset[payloadBatchFrameIdx][portId].metadataBufferPtr[i],
                                                         hMem,
                                                         static_cast <UINT32>(planeOffset));
            if (CamxResultSuccess != result)
            {
                CAMX_LOG_ERROR(CamxLogGroupPProc, "Error in patching address portId %d plane %d", portId, i);
                break;
            }
        }

        result = pMainCmdBuffer->AddNestedBufferInfo(s_frameBufferOffset[payloadBatchFrameIdx][portId].bufferPtr[i],
                                                     hMem,
                                                     (static_cast <UINT32>(planeOffset) + static_cast <UINT32>(metadataSize)));
        if (CamxResultSuccess != result)
        {
            CAMX_LOG_ERROR(CamxLogGroupPProc, "Error in patching address portId %d plane %d", portId, i);
            break;
        }
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillInputFrameSetData
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillInputFrameSetData(
    CmdBuffer*      pFrameProcessCmdBuffer,
    UINT            portId,
    ImageBuffer*    pImageBuffer,
    UINT32          numFramesInBuffer)
{
    CamxResult result = CamxResultSuccess;

    for (UINT32 batchedFrameIndex = 0; batchedFrameIndex < numFramesInBuffer; batchedFrameIndex++)
    {
        result = FillFrameBufferData(pFrameProcessCmdBuffer,
                                     pImageBuffer,
                                     batchedFrameIndex,
                                     batchedFrameIndex,
                                     portId);
        if (CamxResultSuccess != result)
        {
            break;
        }
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillInputFrameSetDataForBatchReferencePorts
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillInputFrameSetDataForBatchReferencePorts(
    CmdBuffer*      pFrameProcessCmdBuffer,
    UINT            portId,
    ImageBuffer*    pImageBufferPrevious,
    ImageBuffer*    pImageBufferCurrent,
    UINT32          numFramesInBuffer)
{
    CamxResult result = CamxResultSuccess;

    // First frame in batch has reference buffer from the last frame in output of previous request
    result = FillFrameBufferData(pFrameProcessCmdBuffer,
                                 pImageBufferPrevious,
                                 0,
                                 numFramesInBuffer -1,
                                 portId);
    if (CamxResultSuccess != result)
    {
        CAMX_LOG_ERROR(CamxLogGroupPProc, "Failure!");
    }

    if (CamxResultSuccess == result)
    {
        for (UINT32 batchedFrameIndex = 1; batchedFrameIndex < numFramesInBuffer; batchedFrameIndex++)
        {
            result = FillFrameBufferData(pFrameProcessCmdBuffer,
                                         pImageBufferCurrent,
                                         batchedFrameIndex,
                                         batchedFrameIndex - 1,
                                         portId);
            if (CamxResultSuccess != result)
            {
                break;
            }
        }
    }

    return result;
}


////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillOutputFrameSetData
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillOutputFrameSetData(
    CmdBuffer*              pFrameProcessCmdBuffer,
    UINT                    portId,
    ImageBuffer*            pImageBuffer,
    UINT32                  batchedFrameIndex)
{
    CamxResult              result              = CamxResultSuccess;

    result = FillFrameBufferData(pFrameProcessCmdBuffer,
                                 pImageBuffer,
                                 batchedFrameIndex,
                                 batchedFrameIndex,
                                 portId);

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::PostPipelineCreate
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::PostPipelineCreate()
{
    CamxResult      result   = CamxResultSuccess;
    UINT32          memFlags = (CSLMemFlagUMDAccess | CSLMemFlagSharedAccess | CSLMemFlagHw);
    ResourceParams  params   = { 0 };

    // Assemble IPE IQ Modules
    result = CreateIPEIQModules();

    m_tuningData.noOfSelectionParameter = 1;
    m_tuningData.TuningMode[0].mode     = ChiModeType::Default;

    if (NodePropertyProcessingType == m_instanceProperty.processingType)
    {
        m_IPECmdBlobCount = IPEMFNRCmdBlobCount;
    }
    else
    {
        m_IPECmdBlobCount = GetPipeline()->GetRequestQueueDepth();
    }
    CAMX_LOG_INFO(CamxLogGroupPProc, "IPE Instance ID %d numbufs %d", InstanceID(), m_IPECmdBlobCount);

    if (CamxResultSuccess == result)
    {
        UpdateIQCmdSize();
        result = InitializeCmdBufferManagerList(IPECmdBufferMaxIds);
    }

    if (CamxResultSuccess == result)
    {
        params.usageFlags.packet               = 1;
        // 1 Command Buffer for all the IQ Modules
        // 2 KMD command buffer
        params.packetParams.maxNumCmdBuffers   = CSLIPECmdBufferMaxIds;

        // 8 Input and 6 Outputs
        params.packetParams.maxNumIOConfigs    = IPEMaxInput + IPEMaxOutput;
        params.packetParams.enableAddrPatching = 1;
        params.packetParams.maxNumPatches      = IPEMaxPatchAddress;
        params.resourceSize                    = Packet::CalculatePacketSize(&params.packetParams);
        params.memFlags                        = CSLMemFlagKMDAccess | CSLMemFlagUMDAccess;
        params.pDeviceIndices                  = &m_deviceIndex;
        params.numDevices                      = 1;

        // Same number as cmd buffers
        params.poolSize                        = m_IPECmdBlobCount * params.resourceSize;
        params.alignment                       = CamxPacketAlignmentInBytes;

        result = CreateCmdBufferManager(&params, &m_pIQPacketManager);
        if (CamxResultSuccess == result)
        {
            FillCmdBufferParams(&params,
                                CmdBufferFrameProcessSizeBytes,
                                CmdType::FW,
                                CSLMemFlagUMDAccess,
                                IPEMaxTopCmdBufferPatchAddress,
                                &m_deviceIndex,
                                m_IPECmdBlobCount);

            /// @todo (CAMX-731) Need to create multiple command buffer managers
            result = CreateCmdBufferManager(&params, &m_pIPECmdBufferManager[CmdBufferFrameProcess]);
            if (CamxResultSuccess == result)
            {
                FillCmdBufferParams(&params,
                                    CmdBufferGenericBlobSizeInBytes,
                                    CmdType::Generic,
                                    CSLMemFlagUMDAccess | CSLMemFlagKMDAccess,
                                    0,
                                    NULL,
                                    m_IPECmdBlobCount);

                result = CreateCmdBufferManager(&params, &m_pIPECmdBufferManager[CmdBufferGenericBlob]);
            }

            if (CamxResultSuccess == result)
            {
                FillCmdBufferParams(&params,
                                    sizeof(IpeIQSettings),
                                    CmdType::FW,
                                    CSLMemFlagUMDAccess,
                                    0,
                                    &m_deviceIndex,
                                    m_IPECmdBlobCount);

                result = CreateCmdBufferManager(&params, &m_pIPECmdBufferManager[CmdBufferIQSettings]);
            }

            if ((CamxResultSuccess == result) && (m_maxCmdBufferSizeBytes[CmdBufferPreLTM] > 0))
            {
                FillCmdBufferParams(&params,
                                    m_maxCmdBufferSizeBytes[CmdBufferPreLTM],
                                    CmdType::CDMDirect,
                                    CSLMemFlagUMDAccess,
                                    IPEMaxPreLTMPatchAddress,
                                    &m_deviceIndex,
                                    m_IPECmdBlobCount);

                /// @todo (CAMX-731) For IQ modules prepare groups based on common trigger
                result = CreateCmdBufferManager(&params, &m_pIPECmdBufferManager[CmdBufferPreLTM]);
            }

            if ((CamxResultSuccess == result) && (m_maxCmdBufferSizeBytes[CmdBufferPostLTM] > 0))
            {
                FillCmdBufferParams(&params,
                                    m_maxCmdBufferSizeBytes[CmdBufferPostLTM],
                                    CmdType::CDMDirect,
                                    CSLMemFlagUMDAccess,
                                    IPEMaxPostLTMPatchAddress,
                                    &m_deviceIndex,
                                    m_IPECmdBlobCount);

                /// @todo (CAMX-731) For IQ modules prepare groups based on common trigger
                result = CreateCmdBufferManager(&params, &m_pIPECmdBufferManager[CmdBufferPostLTM]);
            }

            if ((CamxResultSuccess == result) && (m_maxCmdBufferSizeBytes[CmdBufferDMIHeader] > 0))
            {
                FillCmdBufferParams(&params,
                                    m_maxCmdBufferSizeBytes[CmdBufferDMIHeader],
                                    CmdType::CDMDMI,
                                    CSLMemFlagUMDAccess,
                                    IPEMaxDMIPatchAddress,
                                    &m_deviceIndex,
                                    m_IPECmdBlobCount);

                result = CreateCmdBufferManager(&params, &m_pIPECmdBufferManager[CmdBufferDMIHeader]);
            }

            if ((CamxResultSuccess == result) && (m_maxCmdBufferSizeBytes[CmdBufferNPS] > 0))
            {
                FillCmdBufferParams(&params,
                                    m_maxCmdBufferSizeBytes[CmdBufferNPS],
                                    CmdType::FW,
                                    CSLMemFlagUMDAccess,
                                    IPEMaxNPSPatchAddress,
                                    &m_deviceIndex,
                                    m_IPECmdBlobCount);

                result = CreateCmdBufferManager(&params, &m_pIPECmdBufferManager[CmdBufferNPS]);
            }
        }
        else
        {
            CAMX_ASSERT_ALWAYS_MESSAGE("%s: Failed to Creat Cmd Buffer Manager", __FUNCTION__);
        }
    }

    if (CamxResultSuccess != result)
    {
        Cleanup();
    }

    if (CamxResultSuccess == result)
    {
        // Save required static metadata
        GetStaticMetadata();
    }

    if (CamxResultSuccess == result)
    {
        /// @todo (CAMX-738) Find input port dimensions/format from metadata / use case pool and do acquire.
        result = AcquireDevice();
    }

    if (CamxResultSuccess == result)
    {
        /// @todo (CAMX-732) Get Scratch buffer from topology from loopback port
        m_numScratchBuffers = MaxScratchBuffer;
        for (UINT count = 0; count < m_numScratchBuffers; count++)
        {
            /// @todo (CAMX-886) Add CSLMemFlagSharedAccess once available from memory team
            m_pScratchMemoryBuffer[count] = static_cast<CSLBufferInfo*>(CAMX_CALLOC(sizeof(CSLBufferInfo)));

            if (NULL != m_pScratchMemoryBuffer[count])
            {
                if (TRUE == IsSecureMode())
                {
                    memFlags = (CSLMemFlagProtected | CSLMemFlagHw);
                }

                result = CSLAlloc(NameAndInstanceId(),
                                  m_pScratchMemoryBuffer[count],
                                  m_firmwareScratchMemSize,
                                  CamxCommandBufferAlignmentInBytes,
                                  memFlags,
                                  &DeviceIndices()[0],
                                  1);
                if (CamxResultSuccess == result)
                {
                    CAMX_LOG_VERBOSE(CamxLogGroupPProc,
                                     "CSLAlloc returned m_pScratchMemoryBuffer[%d].fd=%d",
                                     count,
                                     m_pScratchMemoryBuffer[count]->fd);
                }
                else
                {
                    CAMX_ASSERT_ALWAYS_MESSAGE("%s: Failed to Alloc scratch", __FUNCTION__);
                    break;
                }
                CAMX_ASSERT(CSLInvalidHandle != m_pScratchMemoryBuffer[count]->hHandle);
                CAMX_ASSERT(NULL != m_pScratchMemoryBuffer[count]->pVirtualAddr);
            }
            else
            {
                result = CamxResultENoMemory;
            }
        }
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::CheckDimensionRequirementsForIPEDownscaler
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
BOOL IPENode::CheckDimensionRequirementsForIPEDownscaler(
    UINT32 width,
    UINT32 height,
    UINT   downScalarMode)
{
    BOOL    result          = TRUE;
    UINT32  referenceWidth  = 0;
    UINT32  referenceHeight = 0;
    FLOAT   aspectRatio     = static_cast<FLOAT>(width) / height;

    if (aspectRatio <= 1.0f)
    {
        referenceWidth  = 1440;
        referenceHeight = 1440;
    }
    else if ((aspectRatio > 1.0f) && (aspectRatio <= 4.0f/3.0f))
    {
        referenceWidth  = 1920;
        referenceHeight = 1440;
    }
    else if ((aspectRatio > 4.0f/3.0f) && (aspectRatio <= 16.0f/9.0f))
    {
        referenceWidth  = 1920;
        referenceHeight = 1080;
    }
    else
    {   // (aspectRatio > 16.0f/9.0f)
        referenceWidth  = 1920;
        referenceHeight = 1440;
    }

    if (IPEMidDownScalarMode == downScalarMode)
    {
        referenceWidth  = IPEMidDownScalarWidth;
        referenceHeight = IPEMidDownScalarHeight;
    }
    else if (IPECustomDownScalarMode == downScalarMode)
    {
        referenceWidth  = m_instanceProperty.ipeDownscalerWidth;
        referenceHeight = m_instanceProperty.ipeDownscalerHeight;
    }

    if ((width >= referenceWidth) && (height >= referenceHeight))
    {
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Streams dim %dx%d bigger than ref dims %dx%d",
            width, height, referenceWidth, referenceHeight);
        result = FALSE;
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::IsStandardAspectRatio
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
BOOL IPENode::IsStandardAspectRatio(
    FLOAT aspectRatio)
{
    BOOL    result          = TRUE;

    // Use a reduced precision for comparing aspect ratios as updating dimensions should not be very sensitive to small
    // differences in aspect ratios
    if (Utils::FEqualCoarse(aspectRatio, 1.0f)     ||
        Utils::FEqualCoarse(aspectRatio, 4.0f/3.0f)   ||
        Utils::FEqualCoarse(aspectRatio, 16.0f/9.0f)  ||
        Utils::FEqualCoarse(aspectRatio, 18.5f/9.0f) ||
        Utils::FEqualCoarse(aspectRatio, 18.0f/9.0f))
    {
        result = TRUE;
    }
    else
    {
        result = FALSE;
    }

    return result;
}


////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::GetIPEDownscalerOnlyDimensions
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::GetIPEDownscalerOnlyDimensions(
    UINT32  width,
    UINT32  height,
    UINT32* pMaxWidth,
    UINT32* pMaxHeight,
    FLOAT   downscaleLimit,
    UINT    downScalarMode)
{
    FLOAT heightRatio   = 0.0f;
    FLOAT widthRatio    = 0.0f;
    FLOAT aspectRatio   = static_cast<FLOAT>(width) / height;

    if (aspectRatio <= 1.0f)
    {
        *pMaxWidth  = 1440;
        *pMaxHeight = 1440;
    }
    else if ((aspectRatio > 1.0f) && (aspectRatio <= 4.0f/3.0f))
    {
        *pMaxWidth  = 1920;
        *pMaxHeight = 1440;
    }
    else if ((aspectRatio > 4.0f/3.0f) && (aspectRatio <= 16.0f/9.0f))
    {
        *pMaxWidth  = 1920;
        *pMaxHeight = 1080;
    }
    else
    {   // (aspectRatio > 16.0f/9.0f)
        *pMaxWidth  = 1920;
        *pMaxHeight = 1440;
    }

    if (IPEMidDownScalarMode == downScalarMode)
    {
        *pMaxWidth  = IPEMidDownScalarWidth;
        *pMaxHeight = IPEMidDownScalarHeight;
    }
    else if (IPECustomDownScalarMode == downScalarMode)
    {
        *pMaxWidth  = m_instanceProperty.ipeDownscalerWidth;
        *pMaxHeight = m_instanceProperty.ipeDownscalerHeight;
    }

    widthRatio  = *reinterpret_cast<FLOAT*>(pMaxWidth) / width;
    heightRatio = *reinterpret_cast<FLOAT*>(pMaxHeight) / height;

    if ((widthRatio > downscaleLimit) ||
        (heightRatio > downscaleLimit))
    {
        *pMaxWidth  = width * static_cast<UINT32>(downscaleLimit);
        *pMaxHeight = height * static_cast<UINT32>(downscaleLimit);
    }

    CAMX_LOG_VERBOSE(CamxLogGroupPProc, "IPE Downscaler resolution selected: %d X %d", *pMaxWidth, *pMaxHeight);

}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::IsIPEOnlyDownscalerEnabled
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
BOOL IPENode::IsIPEOnlyDownscalerEnabled(
    BufferNegotiationData* pBufferNegotiationData)
{
    BOOL    isDimensionRequirementValid = FALSE;

    if ((0 != m_instanceProperty.ipeOnlyDownscalerMode) ||
        (TRUE == GetStaticSettings()->enableIPEOnlyDownscale))
    {
        for (UINT index = 0; index < pBufferNegotiationData->numOutputPortsNotified; index++)
        {
            OutputPortNegotiationData* pOutputPortNegotiationData = &pBufferNegotiationData->pOutputPortNegotiationData[index];
            for (UINT inputIndex = 0; inputIndex < pOutputPortNegotiationData->numInputPortsNotification; inputIndex++)
            {
                BufferRequirement* pInputPortRequirement = &pOutputPortNegotiationData->inputPortRequirement[inputIndex];
                isDimensionRequirementValid =
                    CheckDimensionRequirementsForIPEDownscaler(pInputPortRequirement->optimalWidth,
                                                               pInputPortRequirement->optimalHeight,
                                                               m_instanceProperty.ipeOnlyDownscalerMode);
            }
        }
    }
    else
    {
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "IPE Downscaler only not enabled");
    }

    CAMX_LOG_VERBOSE(CamxLogGroupPProc, "IPE Downscaler enabled: %d", isDimensionRequirementValid);

    return isDimensionRequirementValid;
}


////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::ProcessingNodeFinalizeInputRequirement
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::ProcessingNodeFinalizeInputRequirement(
    BufferNegotiationData* pBufferNegotiationData)
{
    CAMX_ASSERT(NULL != pBufferNegotiationData);

    CamxResult          result                         = CamxResultSuccess;
    UINT32              optimalInputWidth              = 0;
    UINT32              optimalInputHeight             = 0;
    UINT32              maxInputWidth                  = 0xffff;
    UINT32              maxInputHeight                 = 0xffff;
    UINT32              minInputWidth                  = 0;
    UINT32              minInputHeight                 = 0;
    UINT32              perOutputPortOptimalWidth      = 0;
    UINT32              perOutputPortOptimalHeight     = 0;
    UINT32              perOutputPortMinWidth          = 0;
    UINT32              perOutputPortMinHeight         = 0;
    UINT32              perOutputPortMaxWidth          = 0xffff;
    UINT32              perOutputPortMaxHeight         = 0xffff;
    FLOAT               perOutputPortAspectRatio       = 0.0f;
    FLOAT               inputAspectRatio               = 0.0f;
    FLOAT               optimalAspectRatio             = 0.0f;
    const ImageFormat*  pFormat                        = NULL;
    FLOAT               upscaleLimit                   = 1.0;
    FLOAT               downscaleLimit                 = 1.0;
    UINT32              IPEMaxInputWidth               = 0;
    UINT32              IPEMaxInputHeight              = 0;
    UINT32              IPEMinInputWidthLimit          = 0;
    UINT32              IPEMinInputHeightLimit         = 0;
    BOOL                isIPEDownscalerEnabled         = FALSE;
    const FLOAT         FFOV_PER                       = 0.06f;
    AlignmentInfo       alignmentLCM[FormatsMaxPlanes] = { {0} };

    CAMX_ASSERT(NULL != pBufferNegotiationData);

    pFormat = static_cast<const ImageFormat *>
        (&pBufferNegotiationData->pOutputPortNegotiationData->pFinalOutputBufferProperties->imageFormat);

    upscaleLimit   = m_capability.maxUpscale[ImageFormatUtils::IsUBWC(pFormat->format)];
    downscaleLimit = m_capability.maxDownscale[ImageFormatUtils::IsUBWC(pFormat->format)];

   // Update IPE IO capability info based on ports enabled
    result = UpdateIPEIOLimits(pBufferNegotiationData);
    if (result != CamxResultSuccess)
    {
        CAMX_LOG_INFO(CamxLogGroupPProc, "Unable to update the capability");
    }

    /// @todo (CAMX-2013) Read ICA if enabled and take respective IO limits
    IPEMaxInputWidth       = m_capability.maxInputWidth[ICA_MODE_DISABLED];
    IPEMaxInputHeight      = m_capability.maxInputHeight[ICA_MODE_DISABLED];
    IPEMinInputWidthLimit  = m_capability.minInputWidth;
    IPEMinInputHeightLimit = m_capability.minInputHeight;

    // The IPE node will have to loop through all the output ports which are connected to a child node or a HAL target.
    // The input buffer requirement will be the super resolution after looping through all the output ports.
    // The super resolution may have different aspect ratio compared to the original output port aspect ratio, but
    // this will be taken care of by the crop hardware associated with the output port.
    UINT isUBWCFormat = 0;
    for (UINT index = 0; index < pBufferNegotiationData->numOutputPortsNotified; index++)
    {
        OutputPortNegotiationData* pOutputPortNegotiationData = &pBufferNegotiationData->pOutputPortNegotiationData[index];
        UINT                       outputPortId = GetOutputPortId(pOutputPortNegotiationData->outputPortIndex);

        if (((IPEProcessingType::IPEProcessingTypeDefault != m_instanceProperty.processingType)  &&
            (IPEProcessingType::IPEProcessingPreview != m_instanceProperty.processingType)) ||
            (FALSE == IsReferenceOutputPort(outputPortId)))
        {
            perOutputPortOptimalWidth = 0;
            perOutputPortOptimalHeight = 0;
            perOutputPortMinWidth = 0;
            perOutputPortMinHeight = 0;
            perOutputPortMaxWidth = 0xffff;
            perOutputPortMaxHeight = 0xffff;
            perOutputPortAspectRatio = 0.0f;

            pFormat = static_cast<const ImageFormat *>
                (&pBufferNegotiationData->pOutputPortNegotiationData[index].pFinalOutputBufferProperties->imageFormat);
            isUBWCFormat = ImageFormatUtils::IsUBWC(pFormat->format);

            upscaleLimit = m_capability.maxUpscale[isUBWCFormat];
            downscaleLimit = m_capability.maxDownscale[isUBWCFormat];

            Utils::Memset(&pOutputPortNegotiationData->outputBufferRequirementOptions, 0, sizeof(BufferRequirement));

            // Go through the requirements of the input ports connected to the output port
            for (UINT inputIndex = 0; inputIndex < pOutputPortNegotiationData->numInputPortsNotification; inputIndex++)
            {
                BufferRequirement* pInputPortRequirement = &pOutputPortNegotiationData->inputPortRequirement[inputIndex];
                /// @todo (CAMX-2013) take into account aspect ratio and format as well during negotiation.
                // Take the max of the min dimensions, min of the max dimensions and the
                // max of the optimal dimensions
                perOutputPortOptimalWidth = Utils::MaxUINT32(perOutputPortOptimalWidth, pInputPortRequirement->optimalWidth);
                perOutputPortOptimalHeight = Utils::MaxUINT32(perOutputPortOptimalHeight, pInputPortRequirement->optimalHeight);

                perOutputPortMinWidth = Utils::MaxUINT32(perOutputPortMinWidth, pInputPortRequirement->minWidth);
                perOutputPortMinHeight = Utils::MaxUINT32(perOutputPortMinHeight, pInputPortRequirement->minHeight);

                perOutputPortMaxWidth = Utils::MinUINT32(perOutputPortMaxWidth, pInputPortRequirement->maxWidth);
                perOutputPortMaxHeight = Utils::MinUINT32(perOutputPortMaxHeight, pInputPortRequirement->maxHeight);

                inputAspectRatio = static_cast<FLOAT>(pInputPortRequirement->optimalWidth) /
                                       pInputPortRequirement->optimalHeight;
                perOutputPortAspectRatio = Utils::MaxFLOAT(perOutputPortAspectRatio, inputAspectRatio);

                CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Idx:%d In=%dx%d Opt:%dx%d inAR:%f peroutAR:%f",
                    inputIndex, pInputPortRequirement->optimalWidth, pInputPortRequirement->optimalHeight,
                    perOutputPortOptimalWidth, perOutputPortOptimalHeight, inputAspectRatio,
                    perOutputPortAspectRatio);

                for (UINT planeIdx = 0; planeIdx < FormatsMaxPlanes; planeIdx++)
                {
                    alignmentLCM[planeIdx].strideAlignment   =
                        Utils::CalculateLCM(
                            static_cast<INT32>(alignmentLCM[planeIdx].strideAlignment),
                            static_cast<INT32>(pInputPortRequirement->planeAlignment[planeIdx].strideAlignment));
                    alignmentLCM[planeIdx].scanlineAlignment =
                        Utils::CalculateLCM(
                            static_cast<INT32>(alignmentLCM[planeIdx].scanlineAlignment),
                            static_cast<INT32>(pInputPortRequirement->planeAlignment[planeIdx].scanlineAlignment));
                }
            }

            // Store the buffer requirements for this output port which will be reused to set, during forward walk.
            // The values stored here could be final output dimensions unless it is overridden by forward walk.

            // Optimal dimension should lie between the min and max, ensure the same.
            // There is a chance of the Optimal dimension going over the max dimension.
            // Correct the same.
            perOutputPortOptimalWidth =
                Utils::ClampUINT32(perOutputPortOptimalWidth, perOutputPortMinWidth, perOutputPortMaxWidth);
            perOutputPortOptimalHeight =
                Utils::ClampUINT32(perOutputPortOptimalHeight, perOutputPortMinHeight, perOutputPortMaxHeight);

            // This current output port requires resolution that IPE cannot handle for UBWC format
            if ((0 != isUBWCFormat) &&
                ((perOutputPortOptimalWidth < m_capability.minOutputWidthUBWC) ||
                (perOutputPortOptimalHeight < m_capability.minOutputHeightUBWC)))
            {
                CAMX_LOG_WARN(CamxLogGroupPProc,
                              "IPE unabled to handle resolution %dx%d with current format %d for output port %d ",
                              perOutputPortOptimalWidth,
                              perOutputPortOptimalHeight,
                              pFormat->format,
                              GetOutputPortId(pOutputPortNegotiationData->outputPortIndex));
                result = CamxResultEFailed;
                break; // break out of loop as IPE fails to work with current resolution and format
            }

            pOutputPortNegotiationData->outputBufferRequirementOptions.optimalWidth = perOutputPortOptimalWidth;
            pOutputPortNegotiationData->outputBufferRequirementOptions.optimalHeight = perOutputPortOptimalHeight;
            pOutputPortNegotiationData->outputBufferRequirementOptions.minWidth = perOutputPortMinWidth;
            pOutputPortNegotiationData->outputBufferRequirementOptions.minHeight = perOutputPortMinHeight;
            pOutputPortNegotiationData->outputBufferRequirementOptions.maxWidth = perOutputPortMaxWidth;
            pOutputPortNegotiationData->outputBufferRequirementOptions.maxHeight = perOutputPortMaxHeight;
            Utils::Memcpy(&pOutputPortNegotiationData->outputBufferRequirementOptions.planeAlignment[0],
                          &alignmentLCM[0],
                          sizeof(AlignmentInfo) * FormatsMaxPlanes);

            Utils::Memset(&alignmentLCM[0], 0, sizeof(AlignmentInfo) * FormatsMaxPlanes);

            optimalInputWidth = Utils::MaxUINT32(optimalInputWidth, perOutputPortOptimalWidth);
            optimalInputHeight = Utils::MaxUINT32(optimalInputHeight, perOutputPortOptimalHeight);

            optimalAspectRatio = static_cast<FLOAT>(optimalInputWidth) / optimalInputHeight;

            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "OptimalIn:%dx%d OptAR:%f perOutAR:%f",
                optimalInputWidth, optimalInputHeight, optimalAspectRatio, perOutputPortAspectRatio);

            // Based on the various negotiations above it is possible that the optimal dimensions as input
            // to IPE could end up with an arbitrary aspect ratio. Hence make sure that the dimensions conform
            // to the maximum of the aspect ratio from the output dimensions. Assumption here is that the
            // output dimensions requested from IPE are proper. The dimensions are only adapted for the IPE input.
            if (TRUE == IsStandardAspectRatio(optimalAspectRatio) ||
                TRUE == Utils::FEqualCoarse(optimalAspectRatio, perOutputPortAspectRatio))
            {
                // The dimensions are fine. Do nothing
            }
            else if (optimalAspectRatio > perOutputPortAspectRatio)
            {
                optimalInputHeight = Utils::EvenFloorUINT32(static_cast<UINT32>(optimalInputWidth / perOutputPortAspectRatio));
                CAMX_LOG_ERROR(CamxLogGroupPProc, "NonConformant AspectRatio:%f Change Height %d using AR:%f",
                    optimalAspectRatio, optimalInputHeight, perOutputPortAspectRatio);
            }
            else
            {
                optimalInputWidth = Utils::EvenFloorUINT32(static_cast<UINT32>(optimalInputHeight * perOutputPortAspectRatio));
                CAMX_LOG_ERROR(CamxLogGroupPProc, "NonConformant AspectRatio:%f Change Width %d using AR:%f",
                    optimalAspectRatio, optimalInputWidth, perOutputPortAspectRatio);
            }

            // Minimum IPE input dimension should be big enough to give the
            // max output required for a connected to one of IPE destination ports,
            // considering the upscale limitations.
            if ((FALSE == GetStaticSettings()->enableIPEUpscale) &&
                (GetOutputPortId(pOutputPortNegotiationData->outputPortIndex) == IPEOutputPortVideo ||
                 GetOutputPortId(pOutputPortNegotiationData->outputPortIndex) == IPEOutputPortDisplay))
            {
                minInputHeight = Utils::MaxUINT32(minInputHeight,
                                                  static_cast<UINT32>(perOutputPortMaxHeight / 1.0f));
                minInputWidth = Utils::MaxUINT32(minInputWidth,
                                                 static_cast<UINT32>(perOutputPortMaxWidth / 1.0f));
            }
            else
            {
                minInputHeight = Utils::MaxUINT32(minInputHeight,
                                                  static_cast<UINT32>(perOutputPortMaxHeight / upscaleLimit));
                minInputWidth = Utils::MaxUINT32(minInputWidth,
                                                 static_cast<UINT32>(perOutputPortMaxWidth / upscaleLimit));
            }
            // Set the value at the minInputLimit of IPE if the current value is smaller than required.
            minInputWidth = Utils::MaxUINT32(IPEMinInputWidthLimit, minInputWidth);
            minInputHeight = Utils::MaxUINT32(IPEMinInputHeightLimit, minInputHeight);

            // Maximum input dimension should be small enough to give the
            // min output required for a connected IPE destination port,
            // considering the downscale limitations.
            maxInputHeight = Utils::MinUINT32(maxInputHeight,
                                              static_cast<UINT32>(perOutputPortMinHeight * downscaleLimit));
            maxInputWidth = Utils::MinUINT32(maxInputWidth,
                                             static_cast<UINT32>(perOutputPortMinWidth * downscaleLimit));
            // Cap the value at the IPE limitations if the current value is bigger than required.
            maxInputWidth = Utils::MinUINT32(IPEMaxInputWidth, maxInputWidth);
            maxInputHeight = Utils::MinUINT32(IPEMaxInputHeight, maxInputHeight);
        }
    }
    if (CamxResultSuccess == result)
    {
        result = GetEISMargin();
        if (CamxResultSuccess != result)
        {
            CAMX_LOG_ERROR(CamxLogGroupPProc, "Unable to determine EIS margins")
        }

        // Account for additional margin need in EIS usecases
        if (0 != (m_instanceProperty.stabilizationType & IPEStabilizationTypeEIS2))
        {
            optimalInputWidth  += static_cast<UINT32>(optimalInputWidth * m_EISMarginRequest.widthMargin);
            optimalInputHeight += static_cast<UINT32>(optimalInputHeight * m_EISMarginRequest.heightMargin);
        }
        else if (0 != (m_instanceProperty.stabilizationType & IPEStabilizationTypeEIS3))
        {
            optimalInputWidth  += static_cast<UINT32>(optimalInputWidth * m_EISMarginRequest.widthMargin);
            optimalInputHeight += static_cast<UINT32>(optimalInputHeight * m_EISMarginRequest.heightMargin);
        }
        else
        {
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Not EIS 2 or 3 for node %d insatnce %d, stabType %d",
                             Type(), InstanceID(), m_instanceProperty.stabilizationType);
        }

        // Add extra 3% for left and 3% for right(total 6%) on optimal input dim
        if ((TRUE == GetStaticSettings()->fovcEnable) &&
            (optimalInputWidth < maxInputWidth) &&
            (optimalInputHeight < maxInputHeight) && IsRealTime())
        {
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "bfr Add extra margin for fixed FOV width %d height %d",
                optimalInputWidth, optimalInputHeight);

            optimalInputWidth += static_cast<UINT32>(optimalInputWidth * (FFOV_PER));
            optimalInputHeight += static_cast<UINT32>(optimalInputHeight * (FFOV_PER));

            minInputWidth += static_cast<UINT32>(minInputWidth * (FFOV_PER));
            minInputHeight += static_cast<UINT32>(minInputHeight * (FFOV_PER));

            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Afr Add extra margin for fixed FOV width %d height %d",
                optimalInputWidth, optimalInputHeight);
            // if AF's fov factor is 0 then IPE should cropout 6%
            m_prevFOVC = FFOV_PER;
        }

        optimalInputWidth  = Utils::AlignGeneric32(optimalInputWidth, 4);
        optimalInputHeight = Utils::AlignGeneric32(optimalInputHeight, 4);

        minInputWidth  = Utils::AlignGeneric32(minInputWidth, 4);
        minInputHeight = Utils::AlignGeneric32(minInputHeight, 4);

        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "optimal input dimension after alignment width %d height %d",
                         optimalInputWidth, optimalInputHeight);
    }

    if (CamxResultSuccess == result)
    {
        if ((optimalInputWidth == 0) || (optimalInputHeight == 0))
        {
            result = CamxResultEFailed;

            CAMX_LOG_ERROR(CamxLogGroupPProc,
                           "Buffer Negotiation Failed, W:%d x H:%d!\n",
                           optimalInputWidth,
                           optimalInputHeight);
        }
        else
        {
            if ((minInputWidth > maxInputWidth) ||
                (minInputHeight > maxInputHeight))
            {
                CAMX_LOG_WARN(CamxLogGroupPProc, "Min > Max, unable to use current format");
                result = CamxResultEFailed;
            }
            // Ensure optimal dimension is within min and max dimension,
            // There are chances that the optmial dimension is more than max dimension.
            // Correct for the same.
            UINT32              tempOptimalInputWidth              = 0;
            UINT32              tempOptimalInputHeight             = 0;

            tempOptimalInputWidth  =
                Utils::ClampUINT32(optimalInputWidth, minInputWidth, maxInputWidth);
            tempOptimalInputHeight =
                Utils::ClampUINT32(optimalInputHeight, minInputHeight, maxInputHeight);

            if ((tempOptimalInputWidth != optimalInputWidth) ||
                (tempOptimalInputHeight != optimalInputHeight))
            {
                optimalAspectRatio = static_cast<FLOAT>(tempOptimalInputWidth) / tempOptimalInputHeight;
                if (TRUE == Utils::FEqualCoarse(optimalAspectRatio, perOutputPortAspectRatio))
                {
                    // The dimensions are fine. Do nothing
                }
                else if (optimalAspectRatio > perOutputPortAspectRatio)
                {
                    tempOptimalInputHeight =
                        Utils::EvenFloorUINT32(static_cast<UINT32>(tempOptimalInputWidth / perOutputPortAspectRatio));
                    // ensure that we dont exceed max
                    optimalInputHeight =
                        Utils::ClampUINT32(tempOptimalInputHeight, minInputHeight, maxInputHeight);
                    CAMX_LOG_ERROR(CamxLogGroupPProc, "NonConformant AspectRatio:%f Change Height %d using AR:%f",
                        optimalAspectRatio, optimalInputHeight, perOutputPortAspectRatio);
                }
                else
                {
                    tempOptimalInputWidth =
                        Utils::EvenFloorUINT32(static_cast<UINT32>(tempOptimalInputHeight * perOutputPortAspectRatio));
                    // ensure that we dont exceed max
                    optimalInputWidth =
                        Utils::ClampUINT32(tempOptimalInputWidth, minInputWidth, maxInputWidth);
                    CAMX_LOG_ERROR(CamxLogGroupPProc, "NonConformant AspectRatio:%f Change Width %d using AR:%f",
                        optimalAspectRatio, optimalInputWidth, perOutputPortAspectRatio);
                }
            }
            UINT32 numInputPorts = 0;
            UINT32 inputPortId[IPEMaxInput];

            // Get Input Port List
            GetAllInputPortIds(&numInputPorts, &inputPortId[0]);

            pBufferNegotiationData->numInputPorts = numInputPorts;

            isIPEDownscalerEnabled = IsIPEOnlyDownscalerEnabled(pBufferNegotiationData);
            if (TRUE == isIPEDownscalerEnabled)
            {
                UINT32  IPEDownscalerInputWidth = 0;
                UINT32  IPEDownscalerInputHeight = 0;

                GetIPEDownscalerOnlyDimensions(optimalInputWidth,
                    optimalInputHeight,
                    &IPEDownscalerInputWidth,
                    &IPEDownscalerInputHeight,
                    downscaleLimit,
                    m_instanceProperty.ipeOnlyDownscalerMode);

                optimalInputWidth = IPEDownscalerInputWidth;
                optimalInputHeight = IPEDownscalerInputHeight;

                minInputWidth = IPEDownscalerInputWidth;
                minInputHeight = IPEDownscalerInputHeight;
            }

            for (UINT input = 0; input < numInputPorts; input++)
            {
                pBufferNegotiationData->inputBufferOptions[input].nodeId     = Type();
                pBufferNegotiationData->inputBufferOptions[input].instanceId = InstanceID();
                pBufferNegotiationData->inputBufferOptions[input].portId     = inputPortId[input];

                BufferRequirement* pInputBufferRequirement =
                    &pBufferNegotiationData->inputBufferOptions[input].bufferRequirement;

                pInputBufferRequirement->optimalWidth  = optimalInputWidth;
                pInputBufferRequirement->optimalHeight = optimalInputHeight;
                // If IPE is enabling SIMO and if one of the output is smaller than the other,
                // then the scale capabilities (min,max) needs to be adjusted after accounting for
                // the scaling needed on the smaller output port.
                pInputBufferRequirement->minWidth      = minInputWidth;
                pInputBufferRequirement->minHeight     = minInputHeight;

                pInputBufferRequirement->maxWidth      = maxInputWidth;
                pInputBufferRequirement->maxHeight     = maxInputHeight;

                CAMX_LOG_INFO(CamxLogGroupPProc,
                              "Buffer Negotiation dims IPE: %d, Port %d Optimal %d x %d, Min %d x %d, Max %d x %d\n",
                              InstanceID(),
                              inputPortId[input],
                              optimalInputWidth,
                              optimalInputHeight,
                              minInputWidth,
                              minInputHeight,
                              maxInputWidth,
                              maxInputHeight);
            }
        }
    }

    if ((FALSE == GetPipeline()->HasStatsNode()) || (TRUE == GetStaticSettings()->disableStatsNode))
    {
        m_isStatsNodeAvailable = FALSE;
    }
    else
    {
        m_isStatsNodeAvailable = TRUE;
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::CalculateInputWithoutMargins
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::CalculateInputWithoutMargins(
    UINT* pWidth,
    UINT* pHeight)
{
    CamxResult result                = CamxResultSuccess;
    UINT32     marginPreviewTag      = 0;
    UINT32     marginVideoTag        = 0;
    UINT32     marginWidth           = 0;
    UINT32     marginHeight          = 0;
    UINT32     previewWidth          = 0;
    UINT32     previewHeight         = 0;
    UINT32     videoWidth            = 0;
    UINT32     videoHeight           = 0;
    UINT32     receivedDimensions[2] = { 0 };

    result = VendorTagManager::QueryVendorTagLocation("org.quic.camera.streamDimension", "preview", &marginPreviewTag);
    CAMX_ASSERT_MESSAGE((CamxResultSuccess == result), "Fail to query: marginPreviewTag");

    result = VendorTagManager::QueryVendorTagLocation("org.quic.camera.streamDimension", "video", &marginVideoTag);
    CAMX_ASSERT_MESSAGE((CamxResultSuccess == result), "Fail to query: marginVideoTag");

    static const UINT marginTags[] =
    {
        marginPreviewTag | UsecaseMetadataSectionMask,
        marginVideoTag   | UsecaseMetadataSectionMask,
    };

    const static UINT length         = CAMX_ARRAY_SIZE(marginTags);
    VOID*             pData[length]  = { 0 };
    UINT64            offset[length] = { 0 };

    result = GetDataList(marginTags, pData, offset, length);

    if (CamxResultSuccess == result)
    {
        if (NULL != pData[0])
        {
            receivedDimensions[0] = static_cast<UINT32*>(pData[0])[0];
            receivedDimensions[1] = static_cast<UINT32*>(pData[0])[1];
            previewWidth          = Utils::EvenFloorUINT32(receivedDimensions[0]);
            previewHeight         = Utils::EvenFloorUINT32(receivedDimensions[1]);
        }

        if (NULL != pData[1])
        {
            receivedDimensions[0] = static_cast<UINT32*>(pData[1])[0];
            receivedDimensions[1] = static_cast<UINT32*>(pData[1])[1];
            videoWidth            = Utils::EvenFloorUINT32(receivedDimensions[0]);
            videoHeight           = Utils::EvenFloorUINT32(receivedDimensions[1]);
        }
    }

    if ((videoWidth * videoHeight) < (previewWidth * previewHeight))
    {
        videoWidth  = previewWidth;
        videoHeight = previewHeight;
    }

    if ((videoWidth != 0) && (videoHeight != 0))
    {
        marginWidth  = *pWidth - videoWidth;
        marginHeight = *pHeight - videoHeight;
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupPProc, "Invalid stream dimension %dx%d", videoWidth, videoHeight);
    }

    if ((*pWidth > marginWidth) && (*pHeight > marginHeight))
    {
        *pWidth  = *pWidth - marginWidth;
        *pHeight = *pHeight - marginHeight;
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupPProc, "Unexpected margin calculated %dx%d", marginWidth, marginHeight);
        result = CamxResultEFailed;
    }

    CAMX_LOG_VERBOSE(CamxLogGroupPProc, "calculated margin %d x %d, Input Dimension %d x %d",
                     marginWidth, marginHeight, *pWidth, *pHeight);

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FinalizeBufferProperties
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::FinalizeBufferProperties(
    BufferNegotiationData* pBufferNegotiationData)
{
    CamxResult result  = CamxResultSuccess;
    UINT               numInputPort;
    UINT               inputPortId[IPEMaxInput];
    const ImageFormat* pImageFormat = NULL;

    CAMX_ASSERT(NULL != pBufferNegotiationData);

    // Get Input Port List
    GetAllInputPortIds(&numInputPort, &inputPortId[0]);

    // Loop through input ports to get IPEInputPortFull
    for (UINT index = 0; index < numInputPort; index++)
    {
        if (pBufferNegotiationData->pInputPortNegotiationData[index].inputPortId == IPEInputPortFull)
        {
            pImageFormat = pBufferNegotiationData->pInputPortNegotiationData[index].pImageFormat;
            break;
        }
    }

    CAMX_ASSERT(NULL != pImageFormat);

    UINT width = 0;
    UINT height = 0;
    if (NULL != pImageFormat)
    {
        width  = pImageFormat->width;
        height = pImageFormat->height;
    }

    // Calculating the margins as FinalizeBufferProperties is called before the margins are published
    if (((0 != (IPEStabilizationType::IPEStabilizationTypeEIS3 & m_instanceProperty.stabilizationType)) ||
        (0 != (IPEStabilizationType::IPEStabilizationTypeEIS2 & m_instanceProperty.stabilizationType))) &&
        (0 != (IPEStabilizationType::IPEStabilizationMCTF & m_instanceProperty.stabilizationType)))
    {
        result = CalculateInputWithoutMargins(&width, &height);
    }
    CAMX_ASSERT(CamxResultSuccess == result);

    for (UINT index = 0; index < pBufferNegotiationData->numOutputPortsNotified; index++)
    {
        OutputPortNegotiationData* pOutputPortNegotiationData   = &pBufferNegotiationData->pOutputPortNegotiationData[index];
        InputPortNegotiationData*  pInputPortNegotiationData    = &pBufferNegotiationData->pInputPortNegotiationData[0];
        BufferProperties*          pFinalOutputBufferProperties = pOutputPortNegotiationData->pFinalOutputBufferProperties;
        UINT outputPortId = GetOutputPortId(pOutputPortNegotiationData->outputPortIndex);

        if ((NULL != pImageFormat) &&
            (FALSE == IsSinkPortWithBuffer(pOutputPortNegotiationData->outputPortIndex)) &&
            (FALSE == IsNonSinkHALBufferOutput(pOutputPortNegotiationData->outputPortIndex)))
        {
            switch (outputPortId)
            {
                case IPEOutputPortDisplay:
                    if (FALSE == m_nodePropDisableZoomCrop)
                    {
                        pFinalOutputBufferProperties->imageFormat.width  =
                        pOutputPortNegotiationData->outputBufferRequirementOptions.optimalWidth;
                        pFinalOutputBufferProperties->imageFormat.height =
                        pOutputPortNegotiationData->outputBufferRequirementOptions.optimalHeight;
                    }
                    else
                    {
                        CAMX_LOG_INFO(CamxLogGroupPProc, "IPE Profile ID is IPEProfileWithoutScale so no zoom");
                        pFinalOutputBufferProperties->imageFormat.width  =
                                pInputPortNegotiationData->pImageFormat->width;
                        pFinalOutputBufferProperties->imageFormat.height =
                                pInputPortNegotiationData->pImageFormat->height;
                    }
                    break;
                case IPEOutputPortVideo:
                    pFinalOutputBufferProperties->imageFormat.width  =
                        pOutputPortNegotiationData->outputBufferRequirementOptions.optimalWidth;
                    pFinalOutputBufferProperties->imageFormat.height =
                        pOutputPortNegotiationData->outputBufferRequirementOptions.optimalHeight;
                    break;
                case IPEOutputPortFullRef:
                    pFinalOutputBufferProperties->imageFormat.width  = width;
                    pFinalOutputBufferProperties->imageFormat.height = height;
                    break;
                case IPEOutputPortDS4Ref:
                    if ((m_instanceProperty.processingType == IPEProcessingType::IPEMFNRPostfilter) &&
                        (m_instanceProperty.profileId != IPEProfileId::IPEProfileIdScale))
                    {
                        pFinalOutputBufferProperties->imageFormat.width =
                            pBufferNegotiationData->pInputPortNegotiationData[0].pImageFormat->width;

                        pFinalOutputBufferProperties->imageFormat.height =
                            pBufferNegotiationData->pInputPortNegotiationData[0].pImageFormat->height;
                        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "DS4 port dim %d x %d",
                            pFinalOutputBufferProperties->imageFormat.width,
                            pFinalOutputBufferProperties->imageFormat.height);
                    }
                    else
                    {
                        pFinalOutputBufferProperties->imageFormat.width  =
                            Utils::EvenCeilingUINT32(Utils::AlignGeneric32(width, 4) / DS4Factor);
                        pFinalOutputBufferProperties->imageFormat.height =
                            Utils::EvenCeilingUINT32(Utils::AlignGeneric32(height, 4) / DS4Factor);
                    }
                    break;
                case IPEOutputPortDS16Ref:
                    pFinalOutputBufferProperties->imageFormat.width  =
                        Utils::EvenCeilingUINT32(Utils::AlignGeneric32(width, 16) / DS16Factor);
                    pFinalOutputBufferProperties->imageFormat.height =
                        Utils::EvenCeilingUINT32(Utils::AlignGeneric32(height, 16) / DS16Factor);
                    break;
                case IPEOutputPortDS64Ref:
                    pFinalOutputBufferProperties->imageFormat.width  =
                        Utils::EvenCeilingUINT32(Utils::AlignGeneric32(width, 64) / DS64Factor);
                    pFinalOutputBufferProperties->imageFormat.height =
                        Utils::EvenCeilingUINT32(Utils::AlignGeneric32(height, 64) / DS64Factor);
                    break;
                default:
                    break;
            }
            Utils::Memcpy(&pFinalOutputBufferProperties->imageFormat.planeAlignment[0],
                          &pOutputPortNegotiationData->outputBufferRequirementOptions.planeAlignment[0],
                          sizeof(AlignmentInfo) * FormatsMaxPlanes);
        }

        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "output port %d, Final dim %d x %d",
                         outputPortId,
                         pFinalOutputBufferProperties->imageFormat.width,
                         pFinalOutputBufferProperties->imageFormat.height);
    }

    return;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::CommitAllCommandBuffers
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::CommitAllCommandBuffers(
    CmdBuffer**  ppIPECmdBuffer)
{
    CamxResult  result = CamxResultSuccess;

    CAMX_ASSERT(NULL != ppIPECmdBuffer[CmdBufferFrameProcess]);
    CAMX_ASSERT(NULL != ppIPECmdBuffer[CmdBufferIQSettings]);

    result = ppIPECmdBuffer[CmdBufferFrameProcess]->CommitCommands();
    if (CamxResultSuccess == result)
    {
        result = ppIPECmdBuffer[CmdBufferIQSettings]->CommitCommands();
        if (CamxResultSuccess != result)
        {
            CAMX_LOG_ERROR(CamxLogGroupPProc, "failed to commit CmdBufferIQSettings");
        }
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupPProc, "failed to commit CmdBufferFrameProcess");
    }

    if ((NULL != ppIPECmdBuffer[CmdBufferStriping]) && (CamxResultSuccess == result))
    {
        result = ppIPECmdBuffer[CmdBufferStriping]->CommitCommands();
        if (CamxResultSuccess != result)
        {
            CAMX_LOG_ERROR(CamxLogGroupPProc, "failed to commit CmdBufferStriping");
        }
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::GetCDMProgramArrayOffsetFromBase
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
INT IPENode::GetCDMProgramArrayOffsetFromBase(
    CDMProgramArrayOrder    arrayIndex)
{
    INT offset = -1;

    CAMX_ASSERT(arrayIndex <= ProgramArrayICA2);
    /// @todo (CAMX-1033) Remove this function and make static variable holding offsets.
    if (arrayIndex <= ProgramArrayPreLTM)
    {
        offset = sizeof(CDMProgramArray) * arrayIndex;
    }
    else if (arrayIndex == ProgramArrayPostLTM)
    {
        offset = (sizeof(CDMProgramArray) * ProgramArrayPreLTM) + (sizeof(CdmProgram) * ProgramIndexMaxPreLTM);
    }
    else if (arrayIndex == ProgramArrayICA1)
    {
        offset =
            // size of postLTM CDM programs
            ((sizeof(CDMProgramArray) * ProgramArrayPostLTM) +
             (sizeof(CdmProgram) * ProgramIndexMaxPostLTM)   +
             (sizeof(CdmProgram) * ProgramIndexMaxPreLTM));
    }
    else if (arrayIndex == ProgramArrayICA2)
    {
        offset =
            // size of postLTM CDM programs
            ((sizeof(CDMProgramArray) * ProgramArrayICA1)  +
             (sizeof(CdmProgram) * ProgramIndexMaxPostLTM) +
             (sizeof(CdmProgram) * ProgramIndexMaxPreLTM));
    }

    return offset;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::GetCDMProgramArrayOffsetFromTop
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
INT IPENode::GetCDMProgramArrayOffsetFromTop(
    CDMProgramArrayOrder    arrayIndex)
{
    INT offset = 0;

    offset = GetCDMProgramArrayOffsetFromBase(arrayIndex);
    if (offset >= 0)
    {
        offset += sizeof(IpeFrameProcess);
    }

    return offset;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::GetCDMProgramOffset
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
INT IPENode::GetCDMProgramOffset(
    CDMProgramArrayOrder    arrayIndex,
    UINT                    CDMProgramIndex)
{
    INT     offset              = 0;
    UINT    CDMProgramOffset    = offsetof(CDMProgramArray, programs) + offsetof(CdmProgram, cdmBaseAndLength) +
        offsetof(CDM_BASE_LENGHT, bitfields);

    offset = GetCDMProgramArrayOffsetFromTop(arrayIndex);
    if (offset >= 0)
    {
        offset += sizeof(CdmProgram) * CDMProgramIndex + CDMProgramOffset;
    }

    return offset;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillPreLTMCDMProgram
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillPreLTMCDMProgram(
    CmdBuffer**             ppIPECmdBuffer,
    CDMProgramArray*        pCDMProgramArray,
    CdmProgram*             pCDMProgram,
    ProgramType             programType,
    PreLTMCDMProgramOrder   programIndex)
{
    CamxResult result = CamxResultSuccess;

    if (m_preLTMLUTCount[programIndex] > 0)
    {
        UINT numPrograms                                    = pCDMProgramArray->numPrograms;
        pCDMProgram                                         = &pCDMProgramArray->programs[numPrograms];
        pCDMProgram->hasSingleReg                           = 0;
        pCDMProgram->programType                            = programType;
        pCDMProgram->uID                                    = 0;
        pCDMProgram->cdmBaseAndLength.bitfields.LEN         = ((cdm_get_cmd_header_size(CDMCmdDMI) * RegisterWidthInBytes)
                                                               * m_preLTMLUTCount[programIndex]) - 1;
        pCDMProgram->cdmBaseAndLength.bitfields.RESERVED    = 0;
        pCDMProgram->cdmBaseAndLength.bitfields.BASE        = 0;
        pCDMProgram->bufferAllocatedInternally              = 0;

        /// @todo (CAMX-1033) Change below numPrograms to ProgramIndex once firmware support of program skip is available.
        INT offset  = GetCDMProgramOffset(ProgramArrayPreLTM, pCDMProgramArray->numPrograms);
        CAMX_ASSERT(offset >= 0);

        result      = ppIPECmdBuffer[CmdBufferFrameProcess]->AddNestedCmdBufferInfo(offset,
                                                                                    ppIPECmdBuffer[CmdBufferDMIHeader],
                                                                                    m_preLTMLUTOffset[programIndex]);
        (pCDMProgramArray->numPrograms)++;
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillPostLTMCDMProgram
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillPostLTMCDMProgram(
    CmdBuffer**             ppIPECmdBuffer,
    CDMProgramArray*        pCDMProgramArray,
    CdmProgram*             pCDMProgram,
    ProgramType             programType,
    PostLTMCDMProgramOrder  programIndex)
{
    CamxResult result = CamxResultSuccess;
    if (m_postLTMLUTCount[programIndex] > 0)
    {
        UINT numPrograms                                    = pCDMProgramArray->numPrograms;
        pCDMProgram                                         = &pCDMProgramArray->programs[numPrograms];
        pCDMProgram->hasSingleReg                           = 0;
        pCDMProgram->programType                            = programType;
        pCDMProgram->uID                                    = 0;
        pCDMProgram->cdmBaseAndLength.bitfields.LEN         = ((cdm_get_cmd_header_size(CDMCmdDMI) * RegisterWidthInBytes)
                                                               * m_postLTMLUTCount[programIndex]) - 1;
        pCDMProgram->cdmBaseAndLength.bitfields.RESERVED    = 0;
        pCDMProgram->cdmBaseAndLength.bitfields.BASE        = 0;

        /// @todo (CAMX-1033) Change below numPrograms to ProgramIndex once firmware support of program skip is available.
        INT offset  = GetCDMProgramOffset(ProgramArrayPostLTM, pCDMProgramArray->numPrograms);
        CAMX_ASSERT(offset >= 0);
        result      = ppIPECmdBuffer[CmdBufferFrameProcess]->AddNestedCmdBufferInfo(offset,
                                                                                    ppIPECmdBuffer[CmdBufferDMIHeader],
                                                                                    m_postLTMLUTOffset[programIndex]);
        (pCDMProgramArray->numPrograms)++;
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillNPSCDMProgram
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillNPSCDMProgram(
    CmdBuffer**             ppIPECmdBuffer,
    CDMProgramArray*        pCDMProgramArray,
    CdmProgram*             pCDMProgram,
    ProgramType             programType,
    CDMProgramArrayOrder    arrayIndex,
    UINT32                  passCmdBufferSize,
    UINT32                  passOffset)
{
    CamxResult result = CamxResultSuccess;

    UINT numPrograms                                 = pCDMProgramArray->numPrograms;
    pCDMProgram                                      = &pCDMProgramArray->programs[numPrograms];
    pCDMProgram->hasSingleReg                        = 0;
    pCDMProgram->programType                         = programType;
    pCDMProgram->uID                                 = 0;
    pCDMProgram->cdmBaseAndLength.bitfields.LEN      = passCmdBufferSize - 1;
    pCDMProgram->cdmBaseAndLength.bitfields.RESERVED = 0;
    pCDMProgram->bufferAllocatedInternally           = 0;

    /// @todo (CAMX-1033) Change below numPrograms to ProgramIndex once firmware support of program skip is available.
    INT offset = GetCDMProgramOffset(arrayIndex, pCDMProgramArray->numPrograms);
    CAMX_ASSERT(offset >= 0);
    result = ppIPECmdBuffer[CmdBufferFrameProcess]->AddNestedCmdBufferInfo(offset,
                                                                           ppIPECmdBuffer[CmdBufferNPS],
                                                                           passOffset);
    (pCDMProgramArray->numPrograms)++;


    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillCDMProgramArrays
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillCDMProgramArrays(
    IpeFrameProcessData*    pFrameProcessData,
    IpeIQSettings*          pIpeIQSettings,
    CmdBuffer**             ppIPECmdBuffer,
    UINT                    batchFrames)
{
    INT                 offset;
    CdmProgram*         pCDMProgram;
    CDMProgramArray*    pCDMProgramArray;
    UINT8*              pCDMPayload;
    UINT                numPrograms     = 0;
    CamxResult          result          = CamxResultSuccess;
    ProgramType         type;

    // Patch IQSettings buffer in IpeFrameProcessData
    offset = static_cast <UINT32>(offsetof(IpeFrameProcessData, iqSettingsAddr));
    result = ppIPECmdBuffer[CmdBufferFrameProcess]->AddNestedCmdBufferInfo(offset, ppIPECmdBuffer[CmdBufferIQSettings], 0);
    if (CamxResultSuccess != result)
    {
        CAMX_LOG_ERROR(CamxLogGroupPProc, "%s: Patching failed for IQSettings", __FUNCTION__);
    }
    else
    {
        // Patch cdmProgramArrayBase, which is allocated contiguously below IpeFrameProcessData
        offset = static_cast <UINT32>(offsetof(IpeFrameProcessData, cdmProgramArrayBase));
        result = ppIPECmdBuffer[CmdBufferFrameProcess]->AddNestedCmdBufferInfo(offset,
                                                                               ppIPECmdBuffer[CmdBufferFrameProcess],
                                                                               sizeof(IpeFrameProcess));
        if (CamxResultSuccess != result)
        {
            CAMX_LOG_ERROR(CamxLogGroupPProc, "%s: Patching failed for IQSettings", __FUNCTION__);
        }
    }

    // Populate offsets of all cdmPrograArrays in IpeFrameProcessData with respect to Base
    pFrameProcessData->cdmProgramArrayAnrFullPassAddr   =
        GetCDMProgramArrayOffsetFromBase(ProgramArrayANRFullPass);
    pFrameProcessData->cdmProgramArrayAnrDc4Addr        =
        GetCDMProgramArrayOffsetFromBase(ProgramArrayANRDS4);
    pFrameProcessData->cdmProgramArrayAnrDc16Addr       =
        GetCDMProgramArrayOffsetFromBase(ProgramArrayANRDS16);
    pFrameProcessData->cdmProgramArrayAnrDc64Addr       =
        GetCDMProgramArrayOffsetFromBase(ProgramArrayANRDS64);
    pFrameProcessData->cdmProgramArrayTfFullPassAddr    =
        GetCDMProgramArrayOffsetFromBase(ProgramArrayTFFullPass);
    pFrameProcessData->cdmProgramArrayTfDc4Addr         =
        GetCDMProgramArrayOffsetFromBase(ProgramArrayTFDS4);
    pFrameProcessData->cdmProgramArrayTfDc16Addr        =
        GetCDMProgramArrayOffsetFromBase(ProgramArrayTFDS16);
    pFrameProcessData->cdmProgramArrayTfDc64Addr        =
        GetCDMProgramArrayOffsetFromBase(ProgramArrayTFDS64);
    pFrameProcessData->cdmProgramArrayPreLtmAddr        =
        GetCDMProgramArrayOffsetFromBase(ProgramArrayPreLTM);
    pFrameProcessData->cdmProgramArrayPostLtmAddr       =
        GetCDMProgramArrayOffsetFromBase(ProgramArrayPostLTM);

    for (UINT i = 0; i < batchFrames; i++)
    {
        pFrameProcessData->frameSets[i].cdmProgramArrayIca1Addr =
            GetCDMProgramArrayOffsetFromBase(ProgramArrayICA1);
        pFrameProcessData->frameSets[i].cdmProgramArrayIca2Addr =
            GetCDMProgramArrayOffsetFromBase(ProgramArrayICA2);

    }

    pCDMPayload         = reinterpret_cast<UINT8*>(pFrameProcessData);
    pCDMProgramArray    =
        reinterpret_cast<CDMProgramArray*>(pCDMPayload + GetCDMProgramArrayOffsetFromTop(ProgramArrayPreLTM));
    pCDMProgramArray->allocator     = 0;
    pCDMProgramArray->numPrograms   = 0;

    // CDMProgramArray :: Pre LTM Section
    numPrograms                            = pCDMProgramArray->numPrograms;
    pCDMProgram                            = &pCDMProgramArray->programs[numPrograms];
    pCDMProgram->hasSingleReg              = 0;
    pCDMProgram->programType               = PROGRAM_TYPE_GENERIC;
    pCDMProgram->uID                       = 0;
    pCDMProgram->bufferAllocatedInternally = 0;

    if (NULL != ppIPECmdBuffer[CmdBufferPreLTM])
    {
        UINT length = (ppIPECmdBuffer[CmdBufferPreLTM]->GetResourceUsedDwords() * RegisterWidthInBytes);
        if (length > 0)
        {
            pCDMProgram->cdmBaseAndLength.bitfields.LEN         = length - 1;
            pCDMProgram->cdmBaseAndLength.bitfields.RESERVED    = 0;

            // CDMProgram :: Pre LTM :: GENERIC Cmd buffer
            offset = GetCDMProgramOffset(ProgramArrayPreLTM, ProgramIndexPreLTMGeneric);
            CAMX_ASSERT(offset >= 0);

            ppIPECmdBuffer[CmdBufferFrameProcess]->AddNestedCmdBufferInfo(offset, ppIPECmdBuffer[CmdBufferPreLTM], 0);
            (pCDMProgramArray->numPrograms)++;
        }
    }

    if (NULL != ppIPECmdBuffer[CmdBufferDMIHeader] && (CamxResultSuccess == result))
    {
        type = ((TRUE == pIpeIQSettings->ltmParameters.moduleCfg.EN) ? IPE_LTM_LUT_PROGRAM : PROGRAM_TYPE_SKIP);
        result = FillPreLTMCDMProgram(ppIPECmdBuffer, pCDMProgramArray, pCDMProgram, type, ProgramIndexLTM);
    }

    // CDMProgramArray :: Post LTM Section
    pCDMProgramArray =
        reinterpret_cast<CDMProgramArray*>(pCDMPayload + GetCDMProgramArrayOffsetFromTop(ProgramArrayPostLTM));
    pCDMProgramArray->allocator     = 0;
    pCDMProgramArray->numPrograms   = 0;

    // CDMProgram :: Generic
    numPrograms                            = pCDMProgramArray->numPrograms;
    pCDMProgram                            = &pCDMProgramArray->programs[numPrograms];
    pCDMProgram->hasSingleReg              = 0;
    pCDMProgram->programType               = PROGRAM_TYPE_GENERIC;
    pCDMProgram->uID                       = 0;
    pCDMProgram->bufferAllocatedInternally = 0;

    if (NULL != ppIPECmdBuffer[CmdBufferPostLTM])
    {
        UINT length = (ppIPECmdBuffer[CmdBufferPostLTM]->GetResourceUsedDwords() * RegisterWidthInBytes);
        if (length > 0)
        {
            pCDMProgram->cdmBaseAndLength.bitfields.LEN         = length - 1;
            pCDMProgram->cdmBaseAndLength.bitfields.RESERVED    = 0;

            // Generic Reg Random CDM from pre LTM Modules
            offset = GetCDMProgramOffset(ProgramArrayPostLTM, ProgramIndexPostLTMGeneric);
            CAMX_ASSERT(offset >= 0);

            ppIPECmdBuffer[CmdBufferFrameProcess]->AddNestedCmdBufferInfo(offset, ppIPECmdBuffer[CmdBufferPostLTM], 0);
            (pCDMProgramArray->numPrograms)++;
        }
    }

    if (NULL != ppIPECmdBuffer[CmdBufferDMIHeader])
    {
        // CDMProgram :: Gamma LUT
        if (CamxResultSuccess == result)
        {
            type = ((TRUE == pIpeIQSettings->glutParameters.moduleCfg.EN) ?
                IPE_GAMMA_GLUT_LUT_PROGRAM : PROGRAM_TYPE_SKIP);
            result = FillPostLTMCDMProgram(ppIPECmdBuffer,
                                           pCDMProgramArray,
                                           pCDMProgram,
                                           type,
                                           ProgramIndexGLUT);
        }
        // CDMProgram :: 2D LUT
        if (CamxResultSuccess == result)
        {
            type = ((TRUE == pIpeIQSettings->lut2dParameters.moduleCfg.EN) ?
                 IPE_2D_LUT_LUT_PROGRAM : PROGRAM_TYPE_SKIP);
            result = FillPostLTMCDMProgram(ppIPECmdBuffer,
                                           pCDMProgramArray,
                                           pCDMProgram,
                                           type,
                                           ProgramIndex2DLUT);
        }
        // CDMProgram :: ASF LUT
        if (CamxResultSuccess == result)
        {
            type = ((TRUE == pIpeIQSettings->asfParameters.moduleCfg.EN) ?
                IPE_ASF_LUT_PROGRAM : PROGRAM_TYPE_SKIP);
            result = FillPostLTMCDMProgram(ppIPECmdBuffer,
                                           pCDMProgramArray,
                                           pCDMProgram,
                                           type,
                                           ProgramIndexASF);
        }
        // CDMProgram :: GRA LUT
        type = ((TRUE == pIpeIQSettings->graParameters.moduleCfg.EN) ?
            IPE_GRA_LUT_PROGRAM : PROGRAM_TYPE_SKIP);
        if (CamxResultSuccess == result)
        {
            result = FillPostLTMCDMProgram(ppIPECmdBuffer,
                                           pCDMProgramArray,
                                           pCDMProgram,
                                           type,
                                           ProgramIndexGRA);
        }

        if (CamxResultSuccess == result)
        {
            // CDMProgram :: ICA1 LUT
            // if module is disabled dynamically skip the CDM program
            type = ((TRUE == pIpeIQSettings->ica1Parameters.isGridEnable) ||
                    (TRUE == pIpeIQSettings->ica1Parameters.isPerspectiveEnable)) ?
                    IPE_ICA1_LUT_PROGRAM : PROGRAM_TYPE_SKIP;
            result = FillICACDMprograms(pFrameProcessData,
                                        ppIPECmdBuffer,
                                        type,
                                        ProgramArrayICA1,
                                        ProgramIndexICA1);
        }

        if (CamxResultSuccess == result)
        {
            // CDMProgram :: ICA2 LUT
            type = ((TRUE == pIpeIQSettings->ica2Parameters.isGridEnable) ||
                    (TRUE == pIpeIQSettings->ica2Parameters.isPerspectiveEnable)) ?
                    IPE_ICA2_LUT_PROGRAM : PROGRAM_TYPE_SKIP;
            result = FillICACDMprograms(pFrameProcessData,
                                        ppIPECmdBuffer,
                                        type,
                                        ProgramArrayICA2,
                                        ProgramIndexICA2);
        }
    }

    if (NULL != ppIPECmdBuffer[CmdBufferNPS])
    {
        // CDMProgramArray :: NPS : ANR Full Pass
        pCDMProgramArray =
            reinterpret_cast<CDMProgramArray*>(pCDMPayload +
                GetCDMProgramArrayOffsetFromTop(ProgramArrayANRFullPass));

        pCDMProgramArray->allocator   = 0;
        pCDMProgramArray->numPrograms = 0;

        numPrograms = pCDMProgramArray->numPrograms;
        pCDMProgram = &pCDMProgramArray->programs[numPrograms];

        type = (TRUE == pIpeIQSettings->anrParameters.parameters[0].moduleCfg.EN) ?
            IPE_ANR_CYLPF_PROGRAM : PROGRAM_TYPE_SKIP;
        result = FillNPSCDMProgram(ppIPECmdBuffer,
                                   pCDMProgramArray,
                                   pCDMProgram,
                                   type,
                                   ProgramArrayANRFullPass,
                                   m_ANRSinglePassCmdBufferSize,
                                   m_ANRPassOffset[PASS_NAME_FULL]);

        // CDMProgramArray :: NPS : ANR DS4 Pass
        pCDMProgramArray =
            reinterpret_cast<CDMProgramArray*>(pCDMPayload + GetCDMProgramArrayOffsetFromTop(ProgramArrayANRDS4));

        pCDMProgramArray->allocator   = 0;
        pCDMProgramArray->numPrograms = 0;

        numPrograms = pCDMProgramArray->numPrograms;
        pCDMProgram = &pCDMProgramArray->programs[numPrograms];

        type = (TRUE == pIpeIQSettings->anrParameters.parameters[1].moduleCfg.EN) ?
            IPE_ANR_CYLPF_PROGRAM : PROGRAM_TYPE_SKIP;
        result = FillNPSCDMProgram(ppIPECmdBuffer,
                                   pCDMProgramArray,
                                   pCDMProgram,
                                   type,
                                   ProgramArrayANRDS4,
                                   m_ANRSinglePassCmdBufferSize,
                                   m_ANRPassOffset[PASS_NAME_DC_4]);

        // CDMProgramArray :: NPS : ANR DS16 Pass
        pCDMProgramArray =
            reinterpret_cast<CDMProgramArray*>(pCDMPayload + GetCDMProgramArrayOffsetFromTop(ProgramArrayANRDS16));

        pCDMProgramArray->allocator   = 0;
        pCDMProgramArray->numPrograms = 0;

        numPrograms = pCDMProgramArray->numPrograms;
        pCDMProgram = &pCDMProgramArray->programs[numPrograms];

        type = (TRUE == pIpeIQSettings->anrParameters.parameters[2].moduleCfg.EN) ?
            IPE_ANR_CYLPF_PROGRAM : PROGRAM_TYPE_SKIP;
        result = FillNPSCDMProgram(ppIPECmdBuffer,
                                   pCDMProgramArray,
                                   pCDMProgram,
                                   type,
                                   ProgramArrayANRDS16,
                                   m_ANRSinglePassCmdBufferSize,
                                   m_ANRPassOffset[PASS_NAME_DC_16]);


        // CDMProgramArray :: NPS : ANR DS64 Pass
        pCDMProgramArray =
            reinterpret_cast<CDMProgramArray*>(pCDMPayload + GetCDMProgramArrayOffsetFromTop(ProgramArrayANRDS64));

        pCDMProgramArray->allocator   = 0;
        pCDMProgramArray->numPrograms = 0;

        numPrograms = pCDMProgramArray->numPrograms;
        pCDMProgram = &pCDMProgramArray->programs[numPrograms];

        type = (TRUE == pIpeIQSettings->anrParameters.parameters[3].moduleCfg.EN) ?
            IPE_ANR_CYLPF_PROGRAM : PROGRAM_TYPE_SKIP;
        result = FillNPSCDMProgram(ppIPECmdBuffer,
                                   pCDMProgramArray,
                                   pCDMProgram,
                                   type,
                                   ProgramArrayANRDS64,
                                   m_ANRSinglePassCmdBufferSize,
                                   m_ANRPassOffset[PASS_NAME_DC_64]);

        // CDMProgramArray :: NPS : TF Full Pass
        pCDMProgramArray =
            reinterpret_cast<CDMProgramArray*>(pCDMPayload + GetCDMProgramArrayOffsetFromTop(ProgramArrayTFFullPass));

        pCDMProgramArray->allocator   = 0;
        pCDMProgramArray->numPrograms = 0;

        numPrograms = pCDMProgramArray->numPrograms;
        pCDMProgram = &pCDMProgramArray->programs[numPrograms];

        type = (TRUE == pIpeIQSettings->tfParameters.parameters[0].moduleCfg.EN) ?
            IPE_TF_PROGRAM : PROGRAM_TYPE_SKIP;
        result = FillNPSCDMProgram(ppIPECmdBuffer,
                                   pCDMProgramArray,
                                   pCDMProgram,
                                   type,
                                   ProgramArrayTFFullPass,
                                   m_TFSinglePassCmdBufferSize,
                                   m_TFPassOffset[PASS_NAME_FULL]);
        // CDMProgramArray :: NPS : TF DS4 Pass
        pCDMProgramArray =
            reinterpret_cast<CDMProgramArray*>(pCDMPayload + GetCDMProgramArrayOffsetFromTop(ProgramArrayTFDS4));

        pCDMProgramArray->allocator   = 0;
        pCDMProgramArray->numPrograms = 0;

        numPrograms = pCDMProgramArray->numPrograms;
        pCDMProgram = &pCDMProgramArray->programs[numPrograms];


        type = (TRUE == pIpeIQSettings->tfParameters.parameters[1].moduleCfg.EN) ?
            IPE_TF_PROGRAM : PROGRAM_TYPE_SKIP;
        result = FillNPSCDMProgram(ppIPECmdBuffer,
                                   pCDMProgramArray,
                                   pCDMProgram,
                                   type,
                                   ProgramArrayTFDS4,
                                   m_TFSinglePassCmdBufferSize,
                                   m_TFPassOffset[PASS_NAME_DC_4]);

        // CDMProgramArray :: NPS : TF DS16 Pass
        pCDMProgramArray =
            reinterpret_cast<CDMProgramArray*>(pCDMPayload + GetCDMProgramArrayOffsetFromTop(ProgramArrayTFDS16));

        pCDMProgramArray->allocator   = 0;
        pCDMProgramArray->numPrograms = 0;

        numPrograms = pCDMProgramArray->numPrograms;
        pCDMProgram = &pCDMProgramArray->programs[numPrograms];

        type = (TRUE == pIpeIQSettings->tfParameters.parameters[2].moduleCfg.EN) ?
            IPE_TF_PROGRAM : PROGRAM_TYPE_SKIP;
        result = FillNPSCDMProgram(ppIPECmdBuffer,
                                   pCDMProgramArray,
                                   pCDMProgram,
                                   type,
                                   ProgramArrayTFDS16,
                                   m_TFSinglePassCmdBufferSize,
                                   m_TFPassOffset[PASS_NAME_DC_16]);

        // CDMProgramArray :: NPS : TF DS64 Pass
        pCDMProgramArray =
            reinterpret_cast<CDMProgramArray*>(pCDMPayload + GetCDMProgramArrayOffsetFromTop(ProgramArrayTFDS64));

        pCDMProgramArray->allocator   = 0;
        pCDMProgramArray->numPrograms = 0;

        numPrograms = pCDMProgramArray->numPrograms;
        pCDMProgram = &pCDMProgramArray->programs[numPrograms];

        type = (TRUE == pIpeIQSettings->tfParameters.parameters[3].moduleCfg.EN) ?
            IPE_TF_PROGRAM : PROGRAM_TYPE_SKIP;
        result = FillNPSCDMProgram(ppIPECmdBuffer,
                                   pCDMProgramArray,
                                   pCDMProgram,
                                   type,
                                   ProgramArrayTFDS64,
                                   m_TFSinglePassCmdBufferSize,
                                   m_TFPassOffset[PASS_NAME_DC_64]);
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillICACDMprograms
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillICACDMprograms(
    IpeFrameProcessData*    pFrameProcessData,
    CmdBuffer**             ppIPECmdBuffer,
    ProgramType             programType,
    CDMProgramArrayOrder    programArrayOrder,
    ICAProgramOrder         programIndex)
{
    CamxResult          result           = CamxResultSuccess;
    UINT8*              pCDMPayload      = NULL;
    CDMProgramArray*    pCDMProgramArray = NULL;
    CdmProgram*         pCDMProgram      = NULL;

    if (m_ICALUTCount[programIndex] > 0)
    {
        pCDMPayload                                      = reinterpret_cast<UINT8*>(pFrameProcessData);
        pCDMProgramArray                                 =
            reinterpret_cast<CDMProgramArray*>(pCDMPayload + GetCDMProgramArrayOffsetFromTop(programArrayOrder));

        pCDMProgramArray->allocator                      = 0;
        pCDMProgramArray->numPrograms                    = 0;
        pCDMProgram                                      = &pCDMProgramArray->programs[pCDMProgramArray->numPrograms];
        pCDMProgram->hasSingleReg                        = 0;
        pCDMProgram->programType                         = programType;
        pCDMProgram->uID                                 = 0;
        pCDMProgram->cdmBaseAndLength.bitfields.LEN      = ((cdm_get_cmd_header_size(CDMCmdDMI) * RegisterWidthInBytes)
            * m_ICALUTCount[programIndex]) - 1;
        pCDMProgram->cdmBaseAndLength.bitfields.RESERVED = 0;
        pCDMProgram->cdmBaseAndLength.bitfields.BASE     = 0;

        /// @todo (CAMX-1033) Change below numPrograms to ProgramIndex once firmware support of program skip is available.
        INT offset = GetCDMProgramOffset(programArrayOrder, pCDMProgramArray->numPrograms);
        CAMX_ASSERT(offset >= 0);
        result = ppIPECmdBuffer[CmdBufferFrameProcess]->AddNestedCmdBufferInfo(offset,
                                                                               ppIPECmdBuffer[CmdBufferDMIHeader],
                                                                               m_ICALUTOffset[programIndex]);
        (pCDMProgramArray->numPrograms)++;
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::GetMetadataTags
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::GetMetadataTags(
    ISPInputData* pModuleInput)
{
    CamxResult    result                 = CamxResultSuccess;
    CamxResult    resultContrast         = CamxResultSuccess;
    UINT32        metaTag                = 0;
    UINT32        metaTagSharpness       = 0;
    UINT32        metaTagContrast        = 0;
    UINT32        metaTagMFNRTotalFrames = 0;
    ISPTonemapPoint*    pBlueTonemapCurve   = NULL;
    ISPTonemapPoint*    pGreenTonemapCurve  = NULL;
    ISPTonemapPoint*    pRedTonemapCurve    = NULL;
    const PlatformStaticCaps*   pStaticCaps = HwEnvironment::GetInstance()->GetPlatformStaticCaps();
    // Populate default value
    pModuleInput->pHALTagsData->saturation         = 5;
    pModuleInput->pHALTagsData->sharpness          = 1.0f;
    pModuleInput->pHALTagsData->noiseReductionMode = NoiseReductionModeFast;

    result = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.saturation",
                                                      "use_saturation",
                                                      &metaTag);
    result = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.sharpness",
                                                      "strength",
                                                      &metaTagSharpness);
    result = VendorTagManager::QueryVendorTagLocation("org.quic.camera2.mfnrconfigs",
                                                      "MFNRTotalNumFrames",
                                                      &metaTagMFNRTotalFrames);

    static const UINT VendorTagsIPE[] =
    {
        metaTag | InputMetadataSectionMask,
        InputEdgeMode,
        InputControlVideoStabilizationMode,
        metaTagSharpness| InputMetadataSectionMask,
        metaTagMFNRTotalFrames | InputMetadataSectionMask,
        InputColorCorrectionAberrationMode,
        InputNoiseReductionMode,
        InputTonemapMode,
        InputColorCorrectionMode,
        InputControlMode,
        InputTonemapCurveBlue,
        InputTonemapCurveGreen,
        InputTonemapCurveRed,
        InputColorCorrectionGains,
        InputColorCorrectionMode,
        InputColorCorrectionTransform,
        InputControlAEMode,
        InputControlAWBMode,
        InputControlAWBLock,
    };

    const static UINT length = CAMX_ARRAY_SIZE(VendorTagsIPE);
    VOID* pData[length] = { 0 };
    UINT64 vendorTagsIPEDataIPEOffset[length] = { 0 };

    GetDataList(VendorTagsIPE, pData, vendorTagsIPEDataIPEOffset, length);

    Utils::Memcpy(&pModuleInput->pHALTagsData->saturation, pData[0], sizeof(&pModuleInput->pHALTagsData->saturation));
    Utils::Memcpy(&pModuleInput->pHALTagsData->edgeMode,   pData[1], sizeof(&pModuleInput->pHALTagsData->edgeMode));

    Utils::Memcpy(&pModuleInput->pHALTagsData->controlVideoStabilizationMode,
        pData[2], sizeof(&pModuleInput->pHALTagsData->controlVideoStabilizationMode));

    if (IPEProcessingType::IPEProcessingTypeDefault != m_instanceProperty.processingType)
    {
        pModuleInput->pipelineIPEData.numOfFrames = *(static_cast<UINT *>(pData[4]));
        CAMX_LOG_INFO(CamxLogGroupPProc, "Total number of MFNR Frames = %d", pModuleInput->pipelineIPEData.numOfFrames);
        if (pModuleInput->pipelineIPEData.numOfFrames < 3)
        {
            pModuleInput->pipelineIPEData.numOfFrames = 3;
            CAMX_LOG_WARN(CamxLogGroupPProc, "hardcoded Total number of MFNR frames to 3");
        }
        else if (pModuleInput->pipelineIPEData.numOfFrames > 8)
        {
            pModuleInput->pipelineIPEData.numOfFrames = 8;
        }
    }
    if (NULL != pData[3])
    {
        pModuleInput->pHALTagsData->sharpness =
        static_cast<FLOAT> (*(static_cast<UINT *>(pData[3]))) / pStaticCaps->sharpnessRange.defValue;
    }
    pModuleInput->pHALTagsData->colorCorrectionAberrationMode   = *(static_cast<UINT8*>(pData[5]));
    pModuleInput->pHALTagsData->noiseReductionMode              = *(static_cast<UINT8*>(pData[6]));
    pModuleInput->pHALTagsData->tonemapCurves.tonemapMode       = *(static_cast<UINT8*>(pData[7]));
    pModuleInput->pHALTagsData->colorCorrectionMode             = *(static_cast<UINT8*>(pData[8]));
    pModuleInput->pHALTagsData->controlMode                     = *(static_cast<UINT8*>(pData[9]));
    pBlueTonemapCurve                                           = static_cast<ISPTonemapPoint*>(pData[10]);
    pGreenTonemapCurve                                          = static_cast<ISPTonemapPoint*>(pData[11]);
    pRedTonemapCurve                                            = static_cast<ISPTonemapPoint*>(pData[12]);
    pModuleInput->pHALTagsData->colorCorrectionGains            = *(static_cast<ColorCorrectionGain*>(pData[13]));
    pModuleInput->pHALTagsData->colorCorrectionMode             = *(static_cast<UINT8*>(pData[14]));
    pModuleInput->pHALTagsData->colorCorrectionTransform        =
        *(static_cast<ISPColorCorrectionTransform*>(pData[15]));
    pModuleInput->pHALTagsData->controlAEMode                   = *(static_cast<UINT8*>(pData[16]));
    pModuleInput->pHALTagsData->controlAWBMode                  = *(static_cast<UINT8*>(pData[17]));
    pModuleInput->pHALTagsData->controlAWBLock                  = *(static_cast<UINT8*>(pData[18]));

    resultContrast = VendorTagManager::QueryVendorTagLocation("org.codeaurora.qcamera3.contrast",
                                                              "level",
                                                              &metaTagContrast);
    if (CamxResultSuccess == resultContrast)
    {
        static const UINT VendorTagContrast[] =
        {
            metaTagContrast | InputMetadataSectionMask,
        };

        const static UINT lengthContrast    = CAMX_ARRAY_SIZE(VendorTagContrast);
        VOID* pDataContrast[lengthContrast] = { 0 };
        UINT64 vendorTagsContrastIPEOffset[lengthContrast] = { 0 };

        GetDataList(VendorTagContrast, pDataContrast, vendorTagsContrastIPEOffset, lengthContrast);
        if (NULL != pDataContrast[0])
        {
            UINT8 appLevel = *(static_cast<UINT8*>(pDataContrast[0]));
            if (appLevel > 0)
            {
                pModuleInput->pHALTagsData->contrastLevel = appLevel - 1;
            }
            else
            {
                pModuleInput->pHALTagsData->contrastLevel = 5;
            }
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Manual Contrast Level = %d", pModuleInput->pHALTagsData->contrastLevel);
        }
        else
        {
            CAMX_LOG_WARN(CamxLogGroupPProc, "Cannot obtain Contrast Level. Set default to 5");
            pModuleInput->pHALTagsData->contrastLevel = 5;
        }
    }
    else
    {
        CAMX_LOG_WARN(CamxLogGroupPProc, "No Contrast Level available. Set default to 5");
        pModuleInput->pHALTagsData->contrastLevel = 5; // normal without contrast change
    }

    // Deep copy tone map curves, only when the tone map is contrast curve
    if (TonemapModeContrastCurve == pModuleInput->pHALTagsData->tonemapCurves.tonemapMode)
    {
        pModuleInput->pHALTagsData->tonemapCurves.curvePoints = static_cast<INT32>(
            GetDataCountFromPipeline(InputTonemapCurveBlue, 0, GetPipeline()->GetPipelineId(), TRUE));

        CAMX_ASSERT(NULL != pBlueTonemapCurve);
        CAMX_ASSERT(NULL != pGreenTonemapCurve);
        CAMX_ASSERT(NULL != pRedTonemapCurve);

        if (pModuleInput->pHALTagsData->tonemapCurves.curvePoints > 0)
        {
            // Red tone map curve
            Utils::Memcpy(pModuleInput->pHALTagsData->tonemapCurves.tonemapCurveRed,
                          pRedTonemapCurve,
                          (sizeof(ISPTonemapPoint) * pModuleInput->pHALTagsData->tonemapCurves.curvePoints));


            // Blue tone map curve
            Utils::Memcpy(pModuleInput->pHALTagsData->tonemapCurves.tonemapCurveBlue,
                          pBlueTonemapCurve,
                          (sizeof(ISPTonemapPoint) * pModuleInput->pHALTagsData->tonemapCurves.curvePoints));


            // Green tone map curve
            Utils::Memcpy(pModuleInput->pHALTagsData->tonemapCurves.tonemapCurveGreen,
                          pGreenTonemapCurve,
                          (sizeof(ISPTonemapPoint) * pModuleInput->pHALTagsData->tonemapCurves.curvePoints));
        }
    }

    return result;
}


////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::GetEISMargin
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::GetEISMargin()
{
    CamxResult    result          = CamxResultSuccess;
    UINT32        marginEISV2Tag  = 0;
    UINT32        marginEISV3Tag  = 0;

    if (0 != (IPEStabilizationType::IPEStabilizationTypeEIS2 & m_instanceProperty.stabilizationType))
    {
        result = VendorTagManager::QueryVendorTagLocation("org.quic.camera.eisrealtime", "RequestedMargin", &marginEISV2Tag);
        CAMX_ASSERT(CamxResultSuccess == result);

        UINT   marginTags[1] = { marginEISV2Tag | UsecaseMetadataSectionMask };
        VOID*  pData[1]      = { 0 };
        UINT64 offset[1]     = { 0 };

        result = GetDataList(marginTags, pData, offset, CAMX_ARRAY_SIZE(marginTags));
        if (CamxResultSuccess == result)
        {
            if (NULL != pData[0])
            {
                m_EISMarginRequest = *static_cast<MarginRequest*>(pData[0]);
            }
        }

        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "EISv2 margin requested: W %f, H %f", m_EISMarginRequest.widthMargin,
                         m_EISMarginRequest.heightMargin);
    }
    else if (0 != (IPEStabilizationType::IPEStabilizationTypeEIS3 & m_instanceProperty.stabilizationType))
    {
        result = VendorTagManager::QueryVendorTagLocation("org.quic.camera.eislookahead", "RequestedMargin", &marginEISV3Tag);
        CAMX_ASSERT(CamxResultSuccess == result);

        UINT   marginTags[1] = { marginEISV3Tag | UsecaseMetadataSectionMask };
        VOID*  pData[1]      = { 0 };
        UINT64 offset[1]     = { 0 };

        result = GetDataList(marginTags, pData, offset, CAMX_ARRAY_SIZE(marginTags));
        if (CamxResultSuccess == result)
        {

            if (NULL != pData[0])
            {
                m_EISMarginRequest = *static_cast<MarginRequest*>(pData[0]);
            }
        }

        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "EISv3 margin requested:  W %f, H %f", m_EISMarginRequest.widthMargin,
                         m_EISMarginRequest.heightMargin);
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupPProc, "No margin for stabilization type %d", m_instanceProperty.stabilizationType);
    }

    CAMX_ASSERT(m_EISMarginRequest.widthMargin == m_EISMarginRequest.heightMargin);
    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::UpdateClock
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::UpdateClock(
    CSLICPClockBandwidthRequest* pICPClockBandwidthRequest)
{
    UINT          frameCycles;
    UINT          FPS               = DefaultFPS;
    UINT64        budgetNS;
    FLOAT         overHead          = IPEClockOverhead;
    FLOAT         efficiency        = IPEClockEfficiency;
    FLOAT         budget;

    // Framecycles calculation considers Number of Pixels processed in the current frame, Overhead and Efficiency
    if (0 != m_FPS)
    {
        FPS = m_FPS;
    }

    // 4K 60 FPS requires higer IPE clock so adjusting the clock overhead
    if ((60 == FPS) && (3840 == m_fullInputWidth) && (2160 == m_fullInputHeight))
    {
        overHead = IPEClockOverheadFor4K60;
    }

    frameCycles                             = pICPClockBandwidthRequest->frameCycles;
    frameCycles                             = static_cast<UINT>((frameCycles * overHead) / efficiency);

    // Budget is the Max duration of current frame to process
    budget                                  = 1.0f / FPS;
    budgetNS                                = static_cast<UINT64>(budget * NanoSecondMult);

    pICPClockBandwidthRequest->budgetNS     = budgetNS;
    pICPClockBandwidthRequest->frameCycles  = frameCycles;
    pICPClockBandwidthRequest->realtimeFlag = IsRealTime();

    CAMX_LOG_VERBOSE(CamxLogGroupPProc, "FPS = %d budget = %lf budgetNS = %lld fc = %d", FPS, budget, budgetNS, frameCycles);
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::CalculateIPERdBandwidth
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::CalculateIPERdBandwidth(
    PerRequestActivePorts*  pPerRequestPorts,
    IPEBandwidth*           pBandwidth)
{
    UINT   srcWidth               = 0;
    UINT   srcHeight              = 0;
    FLOAT  bppSrc                 = IPEBpp8Bit;
    FLOAT  overhead               = IPEBandwidthOverhead;
    FLOAT  EISOverhead            = IPEEISOverhead;
    FLOAT  IPEUbwcRdCr            = IPEUBWCRdCompressionRatio;
    FLOAT  IPEUbwcMCTFr           = IPEUBWCMctfReadCompressionRatio;
    DOUBLE swMargin               = IPESwMargin;
    BOOL   UBWCEnable             = FALSE;
    UINT   FPS                    = pBandwidth->FPS;
    FLOAT  readBandwidthPartial   = 0;
    UINT64 readBandwidthPass0;
    UINT64 readBandwidthPass1;
    UINT64 readBandwidthPass2;
    UINT64 readBandwidthPass3;
    UINT   parentNodeID           = IFE;
    BOOL   isSnapshotStream       = FALSE;

    pBandwidth->readBW.unCompressedBW = 0;
    pBandwidth->readBW.compressedBW   = 0;
    for (UINT i = 0; i < pPerRequestPorts->numInputPorts; i++)
    {
        PerRequestInputPortInfo* pInputPort = &pPerRequestPorts->pInputPorts[i];
        if (pInputPort->portId == CSLIPEInputPortIdFull)
        {
            const ImageFormat* pImageFormat = GetInputPortImageFormat(i);
            srcWidth                        = m_fullInputWidth;
            srcHeight                       = m_fullInputHeight;
            parentNodeID = GetParentNodeType(pInputPort->portId);

            if (IsNodeInPipeline(BPS))
            {
                isSnapshotStream = TRUE;
                CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Snapshot stream : Src W = %d Src H = %d", srcWidth, srcHeight);
            }
            if (srcHeight < IPEPartialRdSourceHeight)
            {
                readBandwidthPartial = IPEPartialRdMultiplication * FPS;
                readBandwidthPartial = (readBandwidthPartial * srcHeight) / IPEPartialRdSourceHeight;
            }
            else
            {
                readBandwidthPartial = IPEPartialRdMultiplication * FPS;
            }
            pBandwidth->partialBW = readBandwidthPartial;
            if (NULL != pImageFormat)
            {
                if ((TRUE == ImageFormatUtils::Is10BitFormat(pImageFormat->format)))
                {
                    bppSrc   = IPEBpp10Bit;
                }
                UBWCEnable   = ImageFormatUtils::IsUBWC(pImageFormat->format);
            }
            break;
        }
    }
    if (isSnapshotStream == TRUE)
    {

        // Pass0_RdAB = (((src_ImgW/64/2 * src_ImgH/64/2) * 8 * jpegOvhd ) * fps

        readBandwidthPass0 = static_cast<UINT64>((((srcWidth/64.0/2) * (srcHeight/64.0/2)) * 8 * IPESnapshotOverhead) * FPS);

        // Pass1_RdAB = ((src_ImgW/16/2 * src_ImgH/16/2 * 8 * jpegOvhd ) +
        // ((src_ImgW/64/2 * src_ImgH/64/2 * 102)/8 * jpegOvhd) ) * fps

        readBandwidthPass1 = static_cast<UINT64>((((srcWidth/16.0/2) * (srcHeight/16.0/2)) * 8 * IPESnapshotOverhead) +
                   (((((srcWidth/64.0/2) * (srcHeight/64.0/2)) * 102) / 8.0) * IPESnapshotOverhead)* FPS);

        // Pass2_RdAB = ((src_ImgW/4/2 * src_ImgH/4/2 * 8 * jpegOvhd ) +
        // ((src_ImgW/16/2 * src_ImgH/16/2 * 102)/8 * jpegOvhd) ) * fps

        readBandwidthPass2 = static_cast<UINT64>((((srcWidth/4.0/2) * (srcHeight/4.0/2)) * 8 * IPESnapshotOverhead) +
                   (((((srcWidth/16.0/2) * (srcHeight/16.0/2)) * 102) / 8.0) * IPESnapshotOverhead)* FPS);

        // Pass3_RdAB = ((src_ImgW  *  src_ImgH  *   jpegRdBPP * Ovhd/IPE_UBWC_RdCr )  +
        // ((src_ImgW/4/2 * src_imgH/4/2 * 102)/8) ) * fps

        readBandwidthPass3 = static_cast<UINT64>(((srcWidth * srcHeight) * IPESnapshotRdBPP10bit *
                IPESnapshotOverhead/IPEUbwcRdCr) + (((((srcWidth/4.0/2) * (srcHeight/4.0/2)) * 102) / 8.0))* FPS);

        // IPE_RdAB_Frame   =  (Pass0_RdAB (DS64) + Pass1_RdAB (DS16) + Pass2_RdAB (DS4) + Pass3_RdAB (1:1))*SW_Margin

        pBandwidth->readBW.unCompressedBW = static_cast<UINT64>(
            (readBandwidthPass0 + readBandwidthPass1 + readBandwidthPass2 + readBandwidthPass3) * swMargin);
        CAMX_LOG_VERBOSE(CamxLogGroupPProc,
            "Snapshot bw: sw = %d sh = %d Pass0: %d Pass1:%llu Pass2: %llu Pass3: %llu BW = %llu",
            srcWidth, srcHeight, readBandwidthPass0, readBandwidthPass1, readBandwidthPass2, readBandwidthPass3,
            pBandwidth->readBW.unCompressedBW);

        if (TRUE == UBWCEnable)
        {
            IPEUbwcRdCr  = IPEUBWCRdCompressionRatio10Bit; // As BPS output is always 10bit
            readBandwidthPass3 = static_cast<UINT64>(
                ((srcWidth * srcHeight) * IPESnapshotRdBPP10bit * IPESnapshotOverhead/IPEUbwcRdCr) +
                (((((srcWidth/4.0/2) * (srcHeight/4.0/2)) * 102) / 8.0))* FPS);
            pBandwidth->readBW.compressedBW = static_cast<UINT64>
                ((readBandwidthPass0 + readBandwidthPass1 + readBandwidthPass2 + readBandwidthPass3)* swMargin);
            CAMX_LOG_VERBOSE(CamxLogGroupPProc,
                "Snapshot cbw: sw = %d sh = %d Pass0: %llu Pass1:%llu Pass2:%llu Pass3:%llu cbw = %llu",
                srcWidth, srcHeight, readBandwidthPass0, readBandwidthPass1, readBandwidthPass2, readBandwidthPass3,
                pBandwidth->readBW.compressedBW);
        }
        else
        {
            pBandwidth->writeBW.compressedBW = pBandwidth->writeBW.unCompressedBW;
        }
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Snapshot Rd: cbw = %llu bw = %llu", pBandwidth->readBW.compressedBW,
            pBandwidth->readBW.unCompressedBW);
    }
    else
    {
        // Calculate Uncompressed Bandwidth
        // Pass0_RdAB(DS16) = ( ((src_Img_w/DS16/N_PDT * src_Img_H/DS16/N_PDT) * PD_TS * Overhead * NS  ) * fps
        readBandwidthPass0 = static_cast<UINT64>((((srcWidth/16.0/2) * (srcHeight/16.0/2)) * 8 * overhead * 2) * FPS);

        // Pass1_RdAB(DS4) = ( ((src_Img_w/DS4/N_PDT * src_Img_H/DS4/N_PDT) * PD_TS * Overhead * NS  ) +
        //                  ((src_Img_W/DS16/N_PDT * src_Img_H/DS16/N_PDT * PDI_bits)/8 * Oveahead) +
        //                  ((src_Img_W/DS16 * src_Img_H/DS16 * TFI_bits)/8  * Overhead)   ) * fps
        readBandwidthPass1 = static_cast<UINT64>(
            (((srcWidth/4.0/2) * (srcHeight/4.0/2)) * 8 * overhead * 2) +
            (((((srcWidth/16.0/2) * (srcHeight/16.0/2)) * 102) / 8.0) * overhead) +
            (((((srcWidth/16.0) * (srcHeight/16.0)) * 4) / 8.0) * overhead)) * FPS;

        // Pass2_RdAB(1:1) = ( (src_Img_W * src_Img_H * Bytes_per_pix * Overhead /  UBWC_Comp * fmt)  +
        //                  ((src_Img_W/DS4/N_PDT * src_img_H/DS4/N_PDT * PDI_bits)/8 * Overhead) +
        //                  ((src_ImgW/DS4 * src_imgH/DS4 * TFI_bits)/8) *Ovearhead ) * fps
        readBandwidthPass2 = static_cast<UINT64>(
            ((srcWidth * srcHeight * bppSrc * overhead * EISOverhead) / IPEUbwcRdCr) +
            ((((srcWidth/4.0/2) * (srcHeight/4.0/2)) * 102) / 8.0) +
            ((((srcWidth/4.0) * (srcHeight/4.0)) * 4) / 8.0));

        if (0 != (IPEStabilizationType::IPEStabilizationMCTF & m_instanceProperty.stabilizationType))
        {
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Including MCTF BW");
            readBandwidthPass2 += static_cast<UINT64>((srcWidth * srcHeight * bppSrc * overhead) / IPEUbwcMCTFr);
        }

        readBandwidthPass2 *= FPS;

        pBandwidth->readBW.unCompressedBW = static_cast<UINT64>(
            (readBandwidthPass0 + readBandwidthPass1 + readBandwidthPass2 + readBandwidthPartial)* swMargin);
        CAMX_LOG_VERBOSE(CamxLogGroupPProc,
            "Preview/Video bw: sw = %d sh = %d Pass0:%llu Pass1:%llu Pass2:%llu pr: %llu BW = %llu",
            srcWidth, srcHeight, readBandwidthPass0, readBandwidthPass1, readBandwidthPass2, readBandwidthPartial,
            pBandwidth->readBW.unCompressedBW);

        // Calculate Compressed Bandwidth
        if (TRUE == UBWCEnable)
        {
            if (IPEBpp10Bit == bppSrc)
            {
                IPEUbwcRdCr  = IPEUBWCRdCompressionRatio10Bit;
                IPEUbwcMCTFr = IPEUBWCMctfReadCompressionRatio10Bit;
            }
            else
            {
                IPEUbwcRdCr  = IPEUBWCRdCompressionRatio8Bit;
                IPEUbwcMCTFr = IPEUBWCMctfReadCompressionRatio8Bit;
            }
            readBandwidthPass2 = static_cast<UINT64>(
                ((srcWidth * srcHeight * bppSrc * overhead * EISOverhead) / IPEUbwcRdCr) +
                ((((srcWidth/4.0/2) * (srcHeight/4.0/2)) * 102) / 8.0) +
                ((((srcWidth/4.0) * (srcHeight/4.0)) * 4) / 8.0));

            if (0 != (IPEStabilizationType::IPEStabilizationMCTF & m_instanceProperty.stabilizationType))
            {
                readBandwidthPass2 += static_cast<UINT64>((srcWidth * srcHeight * bppSrc * overhead) / IPEUbwcMCTFr);
            }

            readBandwidthPass2 *= FPS;

            pBandwidth->readBW.compressedBW = static_cast<UINT64>(
                (readBandwidthPass0 + readBandwidthPass1 + readBandwidthPass2 + readBandwidthPartial) * swMargin);
            CAMX_LOG_VERBOSE(CamxLogGroupPProc,
                "Preview/Video cbw: sw = %d sh = %d Pass0:%llu Pass1:%llu Pass2:%llu pr: %llu cbw = %llu",
                srcWidth, srcHeight, readBandwidthPass0, readBandwidthPass1, readBandwidthPass2, readBandwidthPartial,
                pBandwidth->readBW.compressedBW);
        }
        else
        {
            pBandwidth->readBW.compressedBW = pBandwidth->readBW.unCompressedBW;
        }
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Preview/Video Rd: cbw = %llu bw = %llu", pBandwidth->readBW.compressedBW,
            pBandwidth->readBW.unCompressedBW);
    }
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::CalculateIPEWrBandwidth
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::CalculateIPEWrBandwidth(
    PerRequestActivePorts*  pPerRequestPorts,
    IPEBandwidth*           pBandwidth)
{
    IPEImageInfo        source;
    IPEImageInfo        video;
    IPEImageInfo        preview;
    BOOL                video_enable;
    BOOL                previewEnable;
    DOUBLE              swMargin             = IPESwMargin;
    FLOAT               IPEUbwcPreviewCr      = IPEUBWCWrPreviewCompressionRatio;
    FLOAT               IPEUbwcVideoCr        = IPEUBWCWrVideoCompressionRatio;
    DOUBLE              IPEUbwcMCTFCr         = IPEUBWCWrMctfCompressionRatio;
    UINT                FPS                   = pBandwidth->FPS;
    UINT64              writeBandwidthPass0;
    UINT64              writeBandwidthPass1;
    UINT64              writeBandwidthPass2;
    UINT64              writeBandwidthPass3;
    UINT64              writeBandwidthPartial;
    UINT                numBuffers;
    UINT  parentNodeID           = IFE;
    BOOL  isSnapshotStream       = FALSE;

    pBandwidth->writeBW.unCompressedBW = 0;
    pBandwidth->writeBW.compressedBW   = 0;

    source.width                              = 0;
    source.height                             = 0;
    source.bpp                                = IPEBpp8Bit;
    source.UBWCEnable                         = FALSE;
    // Check UBWC and BPP Info for Input Ports and get Dimesions
    for (UINT i = 0; i < pPerRequestPorts->numInputPorts; i++)
    {
        PerRequestInputPortInfo* pInputPort = &pPerRequestPorts->pInputPorts[i];
        if (pInputPort->portId == CSLIPEInputPortIdFull)
        {
            const ImageFormat* pImageFormat = GetInputPortImageFormat(i);
            source.width                    = m_fullInputWidth;
            source.height                   = m_fullInputHeight;
            parentNodeID = GetParentNodeType(pInputPort->portId);

            if (IsNodeInPipeline(BPS))
            {
                isSnapshotStream = TRUE;
                CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Snapshot stream : Src W = %u Src H = %u", source.width, source.height);
            }
            if (NULL != pImageFormat)
            {
                if ((TRUE == ImageFormatUtils::Is10BitFormat(pImageFormat->format)))
                {
                    source.bpp                  = IPEBpp10Bit;
                }
                source.UBWCEnable               = ImageFormatUtils::IsUBWC(pImageFormat->format);
            }
            break;
        }
    }
    if (isSnapshotStream == TRUE)
    {
        // Pass0_WrAB = ( ((src_ImgW/64/2 * src_ImgH/64/2 * 102)/8) ) * fps

        writeBandwidthPass0 = static_cast<UINT64>(((((source.width/64.0/2) * (source.height/64.0/2)) * 102) / 8.0) * FPS);

        // Pass1_WrAB = (((src_ImgW/16/2 * src_ImgH/16/2 * 102)/8) )  * fps

        writeBandwidthPass1 = static_cast<UINT64>(((((source.width/16.0/2) * (source.height/16.0/2)) * 102) / 8.0) * FPS);

        // Pass2_WrAB = ( ((src_ImgW/4/2 * src_ImgH/4/2 * 102)/8) )  *  fps

        writeBandwidthPass2 = static_cast<UINT64>(((((source.width/4.0/2) * (source.height/4.0/2)) * 102) / 8.0) * FPS);

        // Pass3_WrAB = ((src_ImgW  *  src_ImgH     *  jpegWrBPP) )  * fps

        writeBandwidthPass3 = static_cast<UINT64>((((source.width) * (source.height)) * IPESnapshotWrBPP8bit) * FPS);

        // IPE_WrAB_Frame   =  (Pass0_WrAB (DS64) + Pass1_WrAB (DS16) + Pass2_WrAB (DS4) + Pass3_WrAB (1:1))*SW_Margin

        pBandwidth->writeBW.unCompressedBW = static_cast<UINT64>(
            (writeBandwidthPass0 + writeBandwidthPass1 + writeBandwidthPass2 + writeBandwidthPass3) * swMargin);

        pBandwidth->writeBW.compressedBW = pBandwidth->writeBW.unCompressedBW;


        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Snapshot bw: pass0:%llu pass1:%llu pass2:%llu pass3 = %llu BW = %llu",
                writeBandwidthPass0, writeBandwidthPass1, writeBandwidthPass2, writeBandwidthPass2,
                pBandwidth->writeBW.compressedBW);
    }
    else
    {
        video_enable                              = FALSE;
        video.width                               = 0;
        video.height                              = 0;
        video.bpp                                 = IPEBpp8Bit;
        video.UBWCEnable                          = FALSE;

        previewEnable                             = FALSE;
        preview.width                             = 0;
        preview.height                            = 0;
        preview.bpp                               = IPEBpp8Bit;
        preview.UBWCEnable                        = FALSE;
        // Check UBWC and BPP Info for Output Ports and get Dimensions
        for (UINT i = 0; i < pPerRequestPorts->numOutputPorts; i++)
        {
            PerRequestOutputPortInfo* pOutputPort   = &pPerRequestPorts->pOutputPorts[i];
            numBuffers                              = pOutputPort->numOutputBuffers;
            if (IPEOutputPortVideo == pOutputPort->portId)
            {
                const ImageFormat* pImageFormatVideo = GetOutputPortImageFormat(i);
                video.width                          = pImageFormatVideo->width;
                video.height                         = pImageFormatVideo->height;
                video_enable                         = TRUE;
                if (TRUE == ImageFormatUtils::Is10BitFormat(pImageFormatVideo->format))
                {
                    video.bpp      = IPEBpp10Bit;
                }
                video.UBWCEnable   = ImageFormatUtils::IsUBWC(pImageFormatVideo->format);
            }
            if (IPEOutputPortDisplay == pOutputPort->portId)
            {
                const ImageFormat* pImageFormatPreview = GetOutputPortImageFormat(i);
                preview.width                          = pImageFormatPreview->width;
                preview.height                         = pImageFormatPreview->height;
                previewEnable                          = TRUE;
                if (TRUE == ImageFormatUtils::Is10BitFormat(pImageFormatPreview->format))
                {
                    preview.bpp      = IPEBpp10Bit;
                }
                preview.UBWCEnable     = ImageFormatUtils::IsUBWC(pImageFormatPreview->format);
            }
        }

        // Calculate uncompressed bandwidth
        // Pass0_WrAB(DS16) = ( ((src_Img_W/DS16/N_PDT * src_Img_H/DS16/N_PDT * PDI_bits)/8) +
        //                       ((src_Img_W/DS16 * src_Img_H/DS16 * TFI_bits)/8 ) ) * FPS
        writeBandwidthPass0 = static_cast<UINT64>(
            ((((source.width/16.0/2) * (source.height/16.0/2)) * 102) / 8.0) +
            ((((source.width/16.0) * (source.height/16.0)) * 4) / 8.0)) * FPS;

        // Pass1_WrAB = ( (src_Img_W/DS16/N_PDT  *  src_Img_H/DS16/N_PDT* PD_TS)
        //                ((src_Img_W/DS4/N_PDT * src_Img_H/DS4/N_PDT * PDI_bits)/8)  +
        //                ((src_Img_W/DS4 * src_Img_H/DS4 * TFI_bits)/8)   )  *  FPS
        writeBandwidthPass1 = static_cast<UINT64>(
            (((source.width/16.0/2) * (source.height/16.0/2)) * 8) +
            ((((source.width/4.0/2) * (source.height/4.0/2)) * 102) / 8.0) +
            ((((source.width/4.0) * (source.height/4.0)) * 4) / 8.0)) * FPS;

        // Pass2_WrAB = ((src_Img_W  *  src_Img_H  *  Bytes_per_pix  *  UBWC_CompMCTF )   +
        //               (vid_Img_W  *  vid_img_H  *  Bytes_per_pix  /  UBWC_CompVideo )  *  vid_enable  +
        //               (prev_Img_W  *  pre_img_H  *  Bytes_per_pix  / UBWC_CompPrev )  * prev_enable  +
        //               (src_Img_W/DS4/N_PDT  *  src_Img_H/DS4/N_PDT * PD_TS)  )    *  FPS
        writeBandwidthPass2 = static_cast<UINT64>(
            (((video.width * video.height * video.bpp) / IPEUbwcVideoCr) * video_enable) +
            (((preview.width * preview.height * preview.bpp) / IPEUbwcPreviewCr) * previewEnable) +
            (((source.width/4.0/2)  * (source.height/4.0/2)) * 8));

        if (0 != (IPEStabilizationType::IPEStabilizationMCTF & m_instanceProperty.stabilizationType))
        {
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Including MCTF BW");
            writeBandwidthPass2 += static_cast<UINT64>((source.width * source.height * source.bpp) / IPEUbwcMCTFCr);
        }

        writeBandwidthPass2 *= FPS;

        writeBandwidthPartial = static_cast<UINT64>(pBandwidth->partialBW);

        pBandwidth->writeBW.unCompressedBW = static_cast<UINT64>(
            (writeBandwidthPass0 + writeBandwidthPass1 + writeBandwidthPass2 + writeBandwidthPartial) * swMargin);

        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Preview/Video bw: srcw=%u srch=%u vidw=%u vidh=%u prevw=%u prevh=%u",
            source.width, source.height, video.width, video.height, preview.width, preview.height);

        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Preview/Video bw  pass0:%llu pass1:%llu pass2:%llu pw = %llu uBW = %llu",
            writeBandwidthPass0, writeBandwidthPass1, writeBandwidthPass2, writeBandwidthPartial,
            pBandwidth->writeBW.unCompressedBW);

        // Calculate Compressed bandwidth
        if ((TRUE == preview.UBWCEnable) || (TRUE == video.UBWCEnable) || (TRUE == source.UBWCEnable))
        {
            if (TRUE == preview.UBWCEnable)
            {
                if (IPEBpp10Bit == preview.bpp)
                {
                    IPEUbwcPreviewCr = IPEUBWCWrPreviewCompressionRatio10Bit;
                }
                else
                {
                    IPEUbwcPreviewCr = IPEUBWCWrPreviewCompressionRatio8Bit;
                }
            }
            if (TRUE == video.UBWCEnable)
            {
                if (IPEBpp10Bit == video.bpp)
                {
                    IPEUbwcVideoCr = IPEUBWCWrVideoCompressionRatio10Bit;
                }
                else
                {
                    IPEUbwcVideoCr = IPEUBWCWrVideoCompressionRatio8Bit;
                }
            }
            if (TRUE == source.UBWCEnable)
            {
                if (IPEBpp10Bit == source.bpp)
                {
                    IPEUbwcMCTFCr = IPEUBWCWrMctfCompressionRatio10Bit;
                }
                else
                {
                    IPEUbwcMCTFCr = IPEUBWCWrMctfCompressionRatio8Bit;
                }
            }

            writeBandwidthPass2 = static_cast<UINT64>(
                (((video.width * video.height * video.bpp) / IPEUbwcVideoCr) * video_enable) +
                (((preview.width * preview.height * preview.bpp) / IPEUbwcPreviewCr) * previewEnable) +
                (((source.width/4.0/2)  * (source.height/4.0/2)) * 8));

            if (0 != (IPEStabilizationType::IPEStabilizationMCTF & m_instanceProperty.stabilizationType))
            {
                writeBandwidthPass2 += static_cast<UINT64>((source.width * source.height * source.bpp) / IPEUbwcMCTFCr);
            }

            writeBandwidthPass2 *= FPS;

            pBandwidth->writeBW.compressedBW = static_cast<UINT64>(
                (writeBandwidthPass0 + writeBandwidthPass1 + writeBandwidthPass2 + writeBandwidthPartial) * swMargin);

            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Preview/Video cbw: srcw=%u srch=%u vidw = %u vidh = %u prevw = %u prevh = %u",
                source.width, source.height, video.width, video.height, preview.width, preview.height);

            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Preview/Video cbw: pass0:%llu pass1:%llu pass2:%llu pw = %llu cBW = %llu",
                writeBandwidthPass0, writeBandwidthPass1, writeBandwidthPass2, writeBandwidthPartial,
                pBandwidth->writeBW.compressedBW);
        }
        else
        {
            pBandwidth->writeBW.compressedBW = pBandwidth->writeBW.unCompressedBW;
        }
    }

    CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Wr: cbw = %llu bw = %llu", pBandwidth->writeBW.compressedBW,
            pBandwidth->writeBW.unCompressedBW);
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::UpdateBandwidth
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::UpdateBandwidth(
    ExecuteProcessRequestData*   pExecuteProcessRequestData,
    CSLICPClockBandwidthRequest* pICPClockBandwidthRequest)
{
    PerRequestActivePorts*  pPerRequestPorts   = pExecuteProcessRequestData->pEnabledPortsInfo;
    struct IPEBandwidth     bandwidth;
    UINT                    FPS                = DefaultFPS;

    if (0 != m_FPS)
    {
        FPS = m_FPS;
    }

    bandwidth.FPS = FPS;
    CalculateIPERdBandwidth(pPerRequestPorts, &bandwidth);
    CalculateIPEWrBandwidth(pPerRequestPorts, &bandwidth);

    pICPClockBandwidthRequest->unCompressedBW =
        bandwidth.readBW.unCompressedBW + bandwidth.writeBW.unCompressedBW;
    pICPClockBandwidthRequest->compressedBW = bandwidth.readBW.compressedBW + bandwidth.writeBW.compressedBW;

    CAMX_LOG_VERBOSE(CamxLogGroupPProc, "bandWidth = %lld compressedBW = %lld",
        pICPClockBandwidthRequest->unCompressedBW, pICPClockBandwidthRequest->compressedBW);
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::CheckAndUpdateClockBW
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::CheckAndUpdateClockBW(
    CmdBuffer*                   pCmdBuffer,
    ExecuteProcessRequestData*   pExecuteProcessRequestData,
    CSLICPClockBandwidthRequest* pICPClockBandwidthRequest)
{
    UpdateClock(pICPClockBandwidthRequest);
    UpdateBandwidth(pExecuteProcessRequestData, pICPClockBandwidthRequest);
    PacketBuilder::WriteGenericBlobData(pCmdBuffer, CSLICPGenericBlobCmdBufferClk,
        sizeof(CSLICPClockBandwidthRequest), reinterpret_cast<BYTE*>(pICPClockBandwidthRequest));
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::ExecuteProcessRequest
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::ExecuteProcessRequest(
    ExecuteProcessRequestData* pExecuteProcessRequestData)
{
    CAMX_ASSERT(NULL != pExecuteProcessRequestData);
    CAMX_ASSERT(NULL != pExecuteProcessRequestData->pNodeProcessRequestData);
    CAMX_ASSERT(NULL != pExecuteProcessRequestData->pNodeProcessRequestData->pCaptureRequest);
    CAMX_ASSERT(NULL != pExecuteProcessRequestData->pTuningModeData);

    CamxResult  result    = CamxResultSuccess;
    Packet*     pIQPacket = NULL;
    CmdBuffer*  pIPECmdBuffer[IPECmdBufferMaxIds] = { NULL };

    AECFrameControl         AECUpdateData    = {};
    AWBFrameControl         AWBUpdateData    = {};
    AECStatsControl         AECStatsUpdate   = {};
    AWBStatsControl         AWBStatsUpdate   = {};
    AFStatsControl          AFStatsUpdate    = {};
    ISPInputData            moduleInput      = {};
    NodeProcessRequestData* pNodeRequestData = pExecuteProcessRequestData->pNodeProcessRequestData;
    UINT64                  requestId        = pNodeRequestData->pCaptureRequest->requestId;
    PerRequestActivePorts*  pPerRequestPorts = pExecuteProcessRequestData->pEnabledPortsInfo;
    BOOL                    useDependencies  = GetHwContext()->GetStaticSettings()->enableIPEDependencies;
    UINT                    parentNodeID     = IFE;
    INT                     sequenceNumber   = 0;
    UINT32                  cameraId         = 0;
    BOOL                    isMasterCamera   = TRUE;
    BOOL                    isPendingBuffer  = FALSE;

    // Initialize ICA parameters
    moduleInput.ICAConfigData.ICAInGridParams.gridTransformEnable                  = 0;
    moduleInput.ICAConfigData.ICAInInterpolationParams.customInterpolationEnabled  = 0;
    moduleInput.ICAConfigData.ICAInPerspectiveParams.perspectiveTransformEnable    = 0;
    moduleInput.ICAConfigData.ICARefGridParams.gridTransformEnable                 = 0;
    moduleInput.ICAConfigData.ICARefPerspectiveParams.perspectiveTransformEnable   = 0;
    moduleInput.ICAConfigData.ICARefInterpolationParams.customInterpolationEnabled = 0;
    moduleInput.ICAConfigData.ICAReferenceParams.perspectiveTransformEnable        = 0;

    // PublishICADependencies(pNodeRequestData);
    // This check takes care of overridesettings & operation mode together for FOVC.
    if (TRUE == m_instanceProperty.enableFOVC)
    {
        m_FOVCEnabled = TRUE;
    }
    else
    {
        m_FOVCEnabled = FALSE;
    }

    for (UINT i = 0; i < pPerRequestPorts->numInputPorts; i++)
    {
        PerRequestInputPortInfo* pInputPort = &pPerRequestPorts->pInputPorts[i];

        if ((NULL != pInputPort) && (TRUE == pInputPort->flags.isPendingBuffer))
        {
            isPendingBuffer = TRUE;
        }
        if (NULL != pInputPort && IPEInputPortFull == pInputPort->portId)
        {
            parentNodeID = GetParentNodeType(pInputPort->portId);
        }
    }

    if (TRUE == isPendingBuffer)
    {
        sequenceNumber = pNodeRequestData->processSequenceId;
    }
    else
    {
        sequenceNumber = 1;
    }

    if (TRUE == useDependencies)
    {
        sequenceNumber = pNodeRequestData->processSequenceId;
    }

    if (0 == sequenceNumber)
    {
        if (TRUE == useDependencies)
        {
            BOOL allOutputPortsAreLoopback = TRUE;

            for (UINT portIndex = 0; portIndex < pPerRequestPorts->numOutputPorts; portIndex++)
            {
                PerRequestOutputPortInfo* pOutputPort = &pPerRequestPorts->pOutputPorts[portIndex];
                if (FALSE == pOutputPort->flags.isLoopback)
                {
                    allOutputPortsAreLoopback = FALSE;
                    break;
                }
            }

            SetDependencies(pNodeRequestData, parentNodeID, allOutputPortsAreLoopback);

            // Need to determine if this IPE instance is for EISv3
            // If yes, then we need to publish metadata early to not delay preview
            if (0 != (IPEStabilizationType::IPEStabilizationTypeEIS3 & m_instanceProperty.stabilizationType))
            {
                ProcessMetadataDone(requestId);
            }
        }
    }

    if (1 == sequenceNumber)
    {
        if (TRUE == IsMultiCameraUsecase())
        {
            UINT              tag[]                    = { 0 };
            static const UINT Length                   = CAMX_ARRAY_SIZE(tag);
            VOID*             pData[Length]            = { 0 };
            UINT64            configDataOffset[Length] = { 0 };

            VendorTagManager::QueryVendorTagLocation("com.qti.chi.multicamerainfo", "MultiCameraIdRole", &tag[0]);
            tag[0] |= InputMetadataSectionMask;
            GetDataList(tag, pData, configDataOffset, Length);
            if (NULL != pData[0])
            {
                MultiCameraIdRole* pInputMetadata = NULL;
                pInputMetadata = (static_cast<MultiCameraIdRole*>(pData[0]));
                cameraId = pInputMetadata->currentCameraId;

                if ((CameraRoleTypeDefault == pInputMetadata->currentCameraRole) ||
                    (CameraRoleTypeWide    == pInputMetadata->currentCameraRole))
                {
                    isMasterCamera = TRUE;
                }
                else
                {
                    isMasterCamera = FALSE;
                }

                CAMX_LOG_VERBOSE(CamxLogGroupPProc, "#1: cameraId = %d, isMasterCamera = %d", cameraId, isMasterCamera);
            }
            else
            {
                CAMX_LOG_VERBOSE(CamxLogGroupPProc, "cannot get Input Metadata for Optical Zoom");
            }
        }
        else
        {
            cameraId = GetPipeline()->GetCameraId();
            isMasterCamera  = TRUE;
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "#2: cameraId=%d, isMasterCamera=%d", cameraId, isMasterCamera);
        }

        moduleInput.pTuningDataManager = GetTuningDataManagerWithCameraId(cameraId);
        moduleInput.pHwContext         = GetHwContext();

        UpdateICADependencies(&moduleInput);

        // Get CmdBuffer for request
        CAMX_ASSERT(NULL != m_pIQPacketManager);
        CAMX_ASSERT(NULL != m_pIPECmdBufferManager[CmdBufferFrameProcess]);
        CAMX_ASSERT(NULL != m_pIPECmdBufferManager[CmdBufferIQSettings]);

        pIQPacket                            = GetPacketForRequest(requestId, m_pIQPacketManager);
        pIPECmdBuffer[CmdBufferFrameProcess] =
            GetCmdBufferForRequest(requestId, m_pIPECmdBufferManager[CmdBufferFrameProcess]);
        pIPECmdBuffer[CmdBufferIQSettings]   =
            GetCmdBufferForRequest(requestId, m_pIPECmdBufferManager[CmdBufferIQSettings]);
        pIPECmdBuffer[CmdBufferGenericBlob]  =
            GetCmdBufferForRequest(requestId, m_pIPECmdBufferManager[CmdBufferGenericBlob]);
        CSLICPClockBandwidthRequest        ICPClockBandwidthRequest;

        if (NULL != m_pIPECmdBufferManager[CmdBufferPreLTM])
        {
            pIPECmdBuffer[CmdBufferPreLTM] = GetCmdBufferForRequest(requestId, m_pIPECmdBufferManager[CmdBufferPreLTM]);
        }

        if (NULL != m_pIPECmdBufferManager[CmdBufferPostLTM])
        {
            pIPECmdBuffer[CmdBufferPostLTM] = GetCmdBufferForRequest(requestId, m_pIPECmdBufferManager[CmdBufferPostLTM]);
        }

        if (NULL != m_pIPECmdBufferManager[CmdBufferDMIHeader])
        {
            pIPECmdBuffer[CmdBufferDMIHeader] = GetCmdBufferForRequest(requestId, m_pIPECmdBufferManager[CmdBufferDMIHeader]);
        }

        if (m_pIPECmdBufferManager[CmdBufferNPS] != NULL)
        {
            pIPECmdBuffer[CmdBufferNPS] = GetCmdBufferForRequest(requestId, m_pIPECmdBufferManager[CmdBufferNPS]);
        }

        if (NULL != m_pIPECmdBufferManager[CmdBufferStriping])
        {
            pIPECmdBuffer[CmdBufferStriping] = GetCmdBufferForRequest(requestId, m_pIPECmdBufferManager[CmdBufferStriping]);
        }

        if (NULL != m_pIPECmdBufferManager[CmdBufferBLMemory])
        {
            pIPECmdBuffer[CmdBufferBLMemory] = GetCmdBufferForRequest(requestId, m_pIPECmdBufferManager[CmdBufferBLMemory]);
        }

        IpeFrameProcess*        pFrameProcess;
        IpeFrameProcessData*    pFrameProcessData;
        IpeIQSettings*          pIPEIQsettings;

        pFrameProcess = reinterpret_cast<IpeFrameProcess*>(
            pIPECmdBuffer[CmdBufferFrameProcess]->BeginCommands(CmdBufferFrameProcessSizeBytes / 4));
        CAMX_ASSERT(NULL != pFrameProcess);

        pFrameProcess->userArg = m_hDevice;
        pFrameProcessData      = &pFrameProcess->cmdData;
        pIPEIQsettings         =
            reinterpret_cast<IpeIQSettings*>(pIPECmdBuffer[CmdBufferIQSettings]->BeginCommands(sizeof(IpeIQSettings) / 4));

        // Setup the Input data for IQ Parameter
        moduleInput.frameNum                                   = requestId;
        moduleInput.resetReferenceInput                        = m_resetReferenceInput;
        // Reset the flag after populating the moduleInput structure
        m_resetReferenceInput                                  = FALSE;
        moduleInput.pHwContext                                 = GetHwContext();
        moduleInput.pAECUpdateData                             = &AECUpdateData;
        moduleInput.pAWBUpdateData                             = &AWBUpdateData;
        moduleInput.pAECStatsUpdateData                        = &AECStatsUpdate;
        moduleInput.pAWBStatsUpdateData                        = &AWBStatsUpdate;
        moduleInput.pAFStatsUpdateData                         = &AFStatsUpdate;
        moduleInput.pIPETuningMetadata                         = m_pTuningMetadata;
        moduleInput.pipelineIPEData.pFrameProcessData          = pFrameProcessData;
        moduleInput.pipelineIPEData.pIPEIQSettings             = pIPEIQsettings;
        moduleInput.pipelineIPEData.ppIPECmdBuffer             = pIPECmdBuffer;
        moduleInput.pipelineIPEData.batchFrameNum              = pNodeRequestData->pCaptureRequest->numBatchedFrames;
        moduleInput.pipelineIPEData.numOutputRefPorts          = m_numOutputRefPorts;
        moduleInput.pipelineIPEData.realtimeFlag               = m_realTimeIPE;
        moduleInput.pHALTagsData                               = &m_HALTagsData;
        moduleInput.pipelineIPEData.instanceProperty           = m_instanceProperty;
        moduleInput.pipelineIPEData.inputDimension.widthPixels = m_fullInputWidth;
        moduleInput.pipelineIPEData.inputDimension.heightLines = m_fullInputHeight;
        moduleInput.pipelineIPEData.numPasses                  = m_numPasses;
        moduleInput.sensorID                                   = cameraId;
        moduleInput.pCalculatedData                            = &m_ISPData;
        moduleInput.opticalCenterX                             = m_fullInputWidth / 2;
        moduleInput.opticalCenterY                             = m_fullInputHeight / 2;
        moduleInput.fDData.numberOfFace                        = 0;
        moduleInput.pipelineIPEData.pWarpGeometryData          = NULL;
        moduleInput.pipelineIPEData.compressiononOutput        = m_compressiononOutput;
        Utils::Memset(moduleInput.fDData.faceCenterX, 0x0, sizeof(moduleInput.fDData.faceCenterX));
        Utils::Memset(moduleInput.fDData.faceCenterY, 0x0, sizeof(moduleInput.fDData.faceCenterY));
        Utils::Memset(moduleInput.fDData.faceRadius, 0x0, sizeof(moduleInput.fDData.faceRadius));

        // @note: need to set it here now before getting the data
        moduleInput.pipelineIPEData.upscalingFactorMFSR        = 1.0f;
        moduleInput.mfnrFrameNum                               = 0;
        moduleInput.pipelineIPEData.numOfFrames                = 5;  // m_mfnrTotalNumFrames
        moduleInput.pipelineIPEData.isDigitalZoomEnabled       = 0;
        moduleInput.pipelineIPEData.digitalZoomStartX          = 0;

        // Get HAL tags
        result = GetMetadataTags(&moduleInput);

        if ((IPEProcessingType::IPEMFNRBlend == m_instanceProperty.processingType) &&
            (IPEProfileId::IPEProfileIdNPS == m_instanceProperty.profileId))
        {
            moduleInput.mfnrFrameNum = (requestId - 1) % (moduleInput.pipelineIPEData.numOfFrames-2) + 1;
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "MFNR Blend Stage FrameNum = %d", moduleInput.mfnrFrameNum);
        }
        else if ((IPEProcessingType::IPEMFNRPostfilter == m_instanceProperty.processingType) &&
                 (IPEProfileId::IPEProfileIdNPS        == m_instanceProperty.profileId))
        {
            moduleInput.mfnrFrameNum = moduleInput.pipelineIPEData.numOfFrames-1;
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "MFNR Postfilter Stage FrameNum = %d", moduleInput.mfnrFrameNum);
        }
        else if (IPEProcessingType::IPEMFNRPrefilter == m_instanceProperty.processingType)
        {
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "MFNR Prefilter Stage FrameNum = %d", moduleInput.mfnrFrameNum);
        }

        moduleInput.tuningModeChanged = ISPIQModule::IsTuningModeDataChanged(
                                            pExecuteProcessRequestData->pTuningModeData,
                                            &m_tuningData);

        // Needed to have different tuning data for different instances of a node within same pipeline
        //
        // Also, cache tuning mode selector data for comparison for next frame, to help
        // optimize tuning data (tree) search in the IQ modules
        if (TRUE == moduleInput.tuningModeChanged)
        {
            Utils::Memcpy(&m_tuningData, pExecuteProcessRequestData->pTuningModeData, sizeof(ChiTuningModeParameter));

            if ((IPEProfileId::IPEProfileIdDefault    != m_instanceProperty.profileId)    &&
                (IPEProcessingType::IPEMFNRPostfilter == m_instanceProperty.processingType))
            {
                m_tuningData.TuningMode[static_cast<UINT32>(ModeType::Feature2)].subMode.feature2 =
                    ChiModeFeature2SubModeType::MFNRBlend;
            }

            if (IPEProcessingType::IPEProcessingPreview == m_instanceProperty.processingType)
            {
                // This node instnace is preview instance.. Overwrite the tuning param to preview
                m_tuningData.TuningMode[static_cast<UINT32>(ModeType::Usecase)].subMode.usecase =
                    ChiModeUsecaseSubModeType::Preview;
            }
        }

        // Now refer to the updated tuning mode selector data
        moduleInput.pTuningData = &m_tuningData;

        if (TRUE == useDependencies)
        {
            SetAAAInputData(&moduleInput, parentNodeID);
        }
        else
        {
            HardcodeAAAInputData(&moduleInput, parentNodeID);
        }

        if (TRUE == m_OEMStatsSettingEnable)
        {
            GetOEMStatsConfig(&moduleInput, parentNodeID);
        }

        if (TRUE == m_OEMIQSettingEnable)
        {
            GetOEMIQConfig(&moduleInput, parentNodeID);
        }

        if (CamxResultSuccess == result)
        {
            result = GetFaceROI(&moduleInput, parentNodeID);
        }

        // Check for mandatory buffers (even for bypass test)
        if ((NULL == pIQPacket)                            ||
            (NULL == pIPECmdBuffer[CmdBufferFrameProcess]) ||
            (NULL == pIPECmdBuffer[CmdBufferIQSettings]))
        {
            CAMX_LOG_ERROR(CamxLogGroupPProc,
                           "%s: Null IQPacket or CmdBuffer %x, %x %x",
                           __FUNCTION__,
                           pIQPacket,
                           pIPECmdBuffer[CmdBufferFrameProcess],
                           pIPEIQsettings);

            result = CamxResultENoMemory;
        }

        if (CamxResultSuccess == result)
        {
            GetGammaOutput(moduleInput.pCalculatedData, parentNodeID);
        }

        if (CamxResultSuccess == result)
        {
            GetADRCInfoOutput();
        }

        if (CamxResultSuccess == result)
        {
            result = FillIQSetting(&moduleInput, pIPEIQsettings, pPerRequestPorts);
        }
        if (CamxResultSuccess == result)
        {
            for (UINT i = 0; i < pPerRequestPorts->numInputPorts; i++)
            {
                PerRequestInputPortInfo* pInputPort = &pPerRequestPorts->pInputPorts[i];

                if (TRUE == moduleInput.resetReferenceInput)
                {
                    if ((IPEInputPortFullRef == pInputPort->portId)  ||
                        (IPEInputPortDS4Ref == pInputPort->portId)  ||
                        (IPEInputPortDS16Ref == pInputPort->portId))
                    {
                        CAMX_LOG_INFO(CamxLogGroupPProc, "Skipping input port %d due to reset reference", pInputPort->portId);
                        continue;
                    }

                }
                if ((NULL != pInputPort) && (NULL != pInputPort->pImageBuffer))
                {
                    result = pIQPacket->AddIOConfig(pInputPort->pImageBuffer,
                                                    pInputPort->portId,
                                                    CSLIODirection::CSLIODirectionInput,
                                                    pInputPort->phFence,
                                                    1,
                                                    NULL,
                                                    NULL);
                    CAMX_ASSERT(NULL != pInputPort->phFence);
                    CAMX_ASSERT(NULL != pNodeRequestData->pCaptureRequest);

                    if (CamxResultSuccess == result)
                    {
                        CAMX_LOG_INFO(CamxLogGroupPProc,
                                      "IPE:%d reporting Input config, portId=%d, imgBuf=0x%x, hFence=%d, request=%llu",
                                      InstanceID(),
                                      pInputPort->portId,
                                      pInputPort->pImageBuffer,
                                      *(pInputPort->phFence),
                                      pNodeRequestData->pCaptureRequest->requestId);
                    }
                    else
                    {
                        CAMX_LOG_ERROR(CamxLogGroupPProc, "Failed to create IO config packet result = %d", result);
                    }
                    if (CamxResultSuccess == result)
                    {
                        if ((1 < pNodeRequestData->pCaptureRequest->numBatchedFrames) &&
                           (TRUE == IsReferenceInputPort(pInputPort->portId)))
                        {
                            PerRequestOutputPortInfo* pCorrespondingOutputPort = NULL;
                            pCorrespondingOutputPort = GetCorrespondingOutputReferencePort(pPerRequestPorts,
                                                                                           pInputPort->portId);

                            result = FillInputFrameSetDataForBatchReferencePorts(
                                        pIPECmdBuffer[CmdBufferFrameProcess],
                                        pInputPort->portId,
                                        pInputPort->pImageBuffer,
                                        pCorrespondingOutputPort->pImageBuffer[0],
                                        pNodeRequestData->pCaptureRequest->numBatchedFrames);
                        }
                        else
                        {
                            result = FillInputFrameSetData(pIPECmdBuffer[CmdBufferFrameProcess],
                                                           pInputPort->portId,
                                                           pInputPort->pImageBuffer,
                                                           pNodeRequestData->pCaptureRequest->numBatchedFrames);
                        }
                    }

                    if ((CamxResultSuccess == result) &&
                        (IPEInputPortFull == pInputPort->portId))
                    {
                        parentNodeID = GetParentNodeType(pInputPort->portId);
                        const ImageFormat* pImageFormat = pInputPort->pImageBuffer->GetFormat();
                        if (NULL != pImageFormat)
                        {
                            moduleInput.pipelineIPEData.fullInputFormat = pImageFormat->format;
                        }
                    }
                }
                else
                {
                    CAMX_LOG_ERROR(CamxLogGroupPProc, "%s: Input Port/Image Buffer is Null ", __FUNCTION__);

                    result = CamxResultEInvalidArg;
                }

                if (CamxResultSuccess != result)
                {
                    CAMX_LOG_ERROR(CamxLogGroupPProc, "Input Port: Add IO config failed i %d", i);
                    break;
                }
            }
        }

        UINT32 metaTag = 0;
        INT32  disableZoomCrop = FALSE;

        if (CamxResultSuccess == result)
        {
            result = VendorTagManager::QueryVendorTagLocation("org.quic.camera2.ref.cropsize",
                                                              "DisableZoomCrop",
                                                              &metaTag);
        }

        if (CamxResultSuccess == result)
        {
            metaTag |= InputMetadataSectionMask;

            static const UINT32 PropertiesIPE[] =
            {
                metaTag
            };
            UINT length = CAMX_ARRAY_SIZE(PropertiesIPE);
            VOID* pData[1] = { 0 };
            UINT64 propertyDataIPEOffset[1] = { 0 };

            result = GetDataList(PropertiesIPE, pData, propertyDataIPEOffset, length);
            if (CamxResultSuccess == result)
            {
                disableZoomCrop = *reinterpret_cast<INT32*>(pData[0]);
            }
        }

        if ((CamxResultSuccess == result) && (FALSE == disableZoomCrop))
        {
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "%s: zoom operaton from IPE", __FUNCTION__);
            result = FillFrameZoomWindow(&moduleInput, parentNodeID);
        }

        if (CamxResultSuccess == result)
        {
            result = ProgramIQConfig(&moduleInput);
        }
        if (CamxResultSuccess == result)
        {
            pIQPacket->SetRequestId(GetCSLSyncId(requestId));
            pIQPacket->SetOpcode(CSLDeviceType::CSLDeviceTypeICP, CSLPacketOpcodesIPEUpdate);
        }
        if (CamxResultSuccess == result)
        {
            result = FillCDMProgramArrays(pFrameProcessData, pIPEIQsettings, pIPECmdBuffer,
                pNodeRequestData->pCaptureRequest->numBatchedFrames);
        }

        if (CamxResultSuccess == result)
        {
            for (UINT portIndex = 0; portIndex < pPerRequestPorts->numOutputPorts; portIndex++)
            {
                PerRequestOutputPortInfo* pOutputPort   = &pPerRequestPorts->pOutputPorts[portIndex];
                ImageBuffer*              pImageBuffer  = pOutputPort->pImageBuffer[0];

                CAMX_ASSERT(NULL != pOutputPort);

                // Even though there are 4 buffers on same port, AddIOConfig shall be called only once.
                if (NULL != pImageBuffer)
                {
                    result = pIQPacket->AddIOConfig(pImageBuffer,
                        pOutputPort->portId,
                        CSLIODirection::CSLIODirectionOutput,
                        pOutputPort->phFence,
                        1,
                        NULL,
                        NULL);
                }

                pFrameProcessData->numFrameSetsInBatch = pNodeRequestData->pCaptureRequest->numBatchedFrames;

                UINT32 numBuffers = 1;
                BOOL   isBatchBuffer = FALSE;
                switch( pOutputPort->portId)
                {
                    case IPEOutputPortDisplay:
                    case IPEOutputPortVideo:
                        numBuffers = pOutputPort->numOutputBuffers;
                        isBatchBuffer = FALSE;
                        break;
                    case IPEOutputPortFullRef:
                    case IPEOutputPortDS4Ref:
                    case IPEOutputPortDS16Ref:
                        numBuffers = pNodeRequestData->pCaptureRequest->numBatchedFrames;
                        isBatchBuffer = TRUE;
                        break;
                    default:
                        break;
                }

                for (UINT bufferIndex = 0; bufferIndex < numBuffers; bufferIndex++)
                {
                    if (TRUE == isBatchBuffer)
                    {
                        pImageBuffer = pOutputPort->pImageBuffer[0];
                    }
                    else
                    {
                        pImageBuffer = pOutputPort->pImageBuffer[bufferIndex];
                    }

                    if (NULL != pImageBuffer)
                    {
                        if (CamxResultSuccess == result)
                        {
                            // IPE will always output to non batched image buffer
                            result = FillOutputFrameSetData(pIPECmdBuffer[CmdBufferFrameProcess],
                                                            pOutputPort->portId,
                                                            pImageBuffer,
                                                            bufferIndex);
                        }

                        CAMX_LOG_INFO(CamxLogGroupPProc,
                                      "IPE:%d reporting I/O config, portId=%d, imgBuf=0x%x, hFence=%d, request=%llu",
                                      InstanceID(),
                                      pOutputPort->portId,
                                      pImageBuffer,
                                      *(pOutputPort->phFence),
                                      pNodeRequestData->pCaptureRequest->requestId);
                    }
                    else
                    {
                        CAMX_LOG_ERROR(CamxLogGroupPProc, "%s: Output Port/Image is Null ", __FUNCTION__);
                        result = CamxResultEInvalidArg;
                    }

                    if (CamxResultSuccess != result)
                    {
                        CAMX_LOG_ERROR(CamxLogGroupPProc, "Output Port: Add IO config failed");
                        break;
                    }
                }

                if (CamxResultSuccess != result)
                {
                    CAMX_LOG_ERROR(CamxLogGroupPProc, "Output Port: Add IO config failed");
                    break;
                }
            }

            if (CamxResultSuccess == result)
            {
                /// @todo (CAMX-732) Get Scratch buffer from topology from loopback port
                if (NULL != m_pScratchMemoryBuffer[0])
                {
                    pFrameProcessData->scratchBufferSize = m_firmwareScratchMemSize;
                    // Patch scratch buffer: pFrameProcessData->scratchBufferAddress
                    UINT32 scratchBufferOffset =
                        static_cast <UINT32>(offsetof(IpeFrameProcessData, scratchBufferAddress));
                    result = pIPECmdBuffer[CmdBufferFrameProcess]->AddNestedBufferInfo(scratchBufferOffset,
                                                                                       m_pScratchMemoryBuffer[0]->hHandle,
                                                                                       0);
                }
                else
                {
                    pFrameProcessData->scratchBufferAddress = 0;
                    pFrameProcessData->scratchBufferSize = 0;
                }

                if (CamxResultSuccess == result)
                {
                    result = FillFrameUBWCParams(pFrameProcessData);
                }

                if (CamxResultSuccess == result)
                {
                    result = FillFramePerfParams(pFrameProcessData);
                }

                if ((CamxResultSuccess == result) && (TRUE == m_capability.swStriping))
                {
                    result = FillStripingParams(pFrameProcessData, pIPEIQsettings, pIPECmdBuffer, &ICPClockBandwidthRequest);
                }

                if (CamxResultSuccess == result)
                {
                    result = PatchBLMemoryBuffer(pFrameProcessData, pIPECmdBuffer);
                }

                if (CamxResultSuccess == result)
                {
                    CheckAndUpdateClockBW(pIPECmdBuffer[CmdBufferGenericBlob], pExecuteProcessRequestData,
                        &ICPClockBandwidthRequest);
                }
                if (CamxResultSuccess == result)
                {
                    result = CommitAllCommandBuffers(pIPECmdBuffer);
                }

                if (CamxResultSuccess == result)
                {
                    result = pIQPacket->CommitPacket();
                }

                if (CamxResultSuccess == result)
                {
                    result = pIQPacket->AddCmdBufferReference(pIPECmdBuffer[CmdBufferFrameProcess], NULL);
                }
                if (TRUE == HwEnvironment::GetInstance()->GetStaticSettings()->dumpIPEFirmwarePayload)
                {
                    // Dump all firmware payload components for debugging purpose only
                    DumpPayload(CmdBufferFrameProcess, pIPECmdBuffer[CmdBufferFrameProcess], requestId);
                    DumpPayload(CmdBufferStriping, pIPECmdBuffer[CmdBufferStriping], requestId);
                    DumpPayload(CmdBufferIQSettings, pIPECmdBuffer[CmdBufferIQSettings], requestId);
                    DumpPayload(CmdBufferPreLTM, pIPECmdBuffer[CmdBufferPreLTM], requestId);
                    DumpPayload(CmdBufferPostLTM, pIPECmdBuffer[CmdBufferPostLTM], requestId);
                    DumpPayload(CmdBufferDMIHeader, pIPECmdBuffer[CmdBufferDMIHeader], requestId);
                    DumpPayload(CmdBufferNPS, pIPECmdBuffer[CmdBufferNPS], requestId);
                }

                if (CamxResultSuccess == result)
                {
                    if (pIPECmdBuffer[CmdBufferGenericBlob]->GetResourceUsedDwords() > 0)
                    {
                        pIPECmdBuffer[CmdBufferGenericBlob]->SetMetadata(static_cast<UINT32>(CSLICPCmdBufferIdGenericBlob));
                        result = pIQPacket->AddCmdBufferReference(pIPECmdBuffer[CmdBufferGenericBlob], NULL);
                    }
                }
                if (CamxResultSuccess == result)
                {
                    CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Submit packets for instance %d request %llu",
                                     InstanceID(), requestId);
                    result = GetHwContext()->Submit(m_hDevice, pIQPacket);
                    if (CamxResultSuccess == result)
                    {
                        CAMX_LOG_INFO(CamxLogGroupPProc, "IPE:%d Submitted packets with requestId = %llu",
                            InstanceID(), requestId);
                    }
                }
                if ((NULL != m_pTuningMetadata) && (TRUE == isMasterCamera) && (CamxResultSuccess == result))
                {
                    // Only use debug data on the master camera
                    DumpTuningMetadata(&moduleInput);
                }
            }
        }

        // Post metadata from IQ modules to metadata
        if (CamxResultSuccess == result)
        {
            result = PostMetadata(&moduleInput);
        }
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::GetCorrespondingOutputReferencePort
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
PerRequestOutputPortInfo* IPENode::GetCorrespondingOutputReferencePort(
    PerRequestActivePorts*   pPerRequestPorts,
    UINT inputPortId)
{
    UINT outputPortId = IPEOutputPortFullRef;

    PerRequestOutputPortInfo* pCorrespondingOutputPort = NULL;

    switch(inputPortId)
    {
        case IPEInputPortFullRef:
            outputPortId = IPEOutputPortFullRef;
            break;
        case IPEInputPortDS4Ref:
            outputPortId = IPEOutputPortDS4Ref;
            break;
        case IPEInputPortDS16Ref:
            outputPortId = IPEOutputPortDS16Ref;
            break;
        case IPEInputPortDS64Ref:
        default:
            CAMX_LOG_ERROR(CamxLogGroupPProc, "Unhandled port type %d", inputPortId);
    }

    for (UINT portIndex = 0; portIndex < pPerRequestPorts->numOutputPorts; portIndex++)
    {
        PerRequestOutputPortInfo* pOutputPort   = &pPerRequestPorts->pOutputPorts[portIndex];
        if (pOutputPort->portId == outputPortId)
        {
            pCorrespondingOutputPort = pOutputPort;
            break;
        }
    }

    return pCorrespondingOutputPort;
}


////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::SetupDeviceResource
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::SetupDeviceResource(
    CSLBufferInfo*     pConfigIOMem,
    CSLDeviceResource* pResource)
{
    CamxResult                  result          = CamxResultSuccess;
    CSLICPAcquireDeviceInfo*    pIcpResource    = NULL;
    CSLICPResourceInfo*         pIcpOutResource = NULL;
    UINT                        countResource   = 0;
    SIZE_T                      resourceSize    = 0;
    UINT                        numOutputPort   = 0;
    UINT                        numInputPort    = 0;
    const ImageFormat*          pImageFormat    = NULL;
    UINT                        inputPortId[IPEMaxInput];
    UINT                        outputPortId[IPEMaxOutput];
    IPE_IO_IMAGES               firmwarePortId;
    UINT                        parentNodeID = IFE;

    // Get Input Port List
    GetAllInputPortIds(&numInputPort, &inputPortId[0]);

    // Get Output Port List
    GetAllOutputPortIds(&numOutputPort, &outputPortId[0]);

    if (numInputPort <= 0 || numInputPort > IPEMaxInput ||
        numOutputPort <= 0 || numOutputPort > IPEMaxOutput)
    {
        CAMX_LOG_ERROR(CamxLogGroupPProc, "invalid input (%u) or output port (%u)", numInputPort, numOutputPort);
        result = CamxResultEUnsupported;
    }

    if (CamxResultSuccess == result)
    {
        static const UINT UsecasePropertiesIPE[]               = { PropertyIDUsecaseFPS, PropertyIDUsecaseBatch };
        const UINT        length                               = CAMX_ARRAY_SIZE(UsecasePropertiesIPE);
        VOID*             pData[length]                        = { 0 };
        UINT64            usecasePropertyDataIPEOffset[length] = { 0 };

        GetDataList(UsecasePropertiesIPE, pData, usecasePropertyDataIPEOffset, length);

        // This is a soft dependency
        if ((NULL != pData[0]) && (NULL != pData[1]))
        {
            m_FPS = *reinterpret_cast<UINT*>(pData[0]);
            m_maxBatchSize = *reinterpret_cast<UINT*>(pData[1]);
        }
        else
        {
            m_FPS          = 30;
            m_maxBatchSize = 1;
        }

        resourceSize                    = sizeof(CSLICPAcquireDeviceInfo) + (sizeof(CSLICPResourceInfo) * (numOutputPort - 1));
        pIcpResource                    = static_cast<CSLICPAcquireDeviceInfo*>(CAMX_CALLOC(resourceSize));

        if (NULL == pIcpResource)
        {
            CAMX_LOG_ERROR(CamxLogGroupPProc, "pIcpResource is NULL");
            result = CamxResultENoMemory;
        }

        if (CamxResultSuccess == result)
        {
            pIcpResource->numOutputResource = numOutputPort;
            pIcpResource->secureMode        = IsSecureMode();

            IpeConfigIo*     pConfigIO;
            IpeConfigIoData* pConfigIOData;

            pConfigIO          = reinterpret_cast<IpeConfigIo*>(pConfigIOMem->pVirtualAddr);
            pConfigIO->userArg = 0;
            pConfigIOData      = &pConfigIO->cmdData;
            CamX::Utils::Memset(pConfigIOData, 0, sizeof(*pConfigIOData));
            pConfigIOData->secureMode = IsSecureMode();

            for (UINT inputPortIndex = 0; inputPortIndex < numInputPort; inputPortIndex++)
            {
                TranslateToFirmwarePortId(inputPortId[inputPortIndex], &firmwarePortId);

                pImageFormat                                                      =
                    GetInputPortImageFormat(inputPortIndex);

                if (NULL == pImageFormat)
                {
                    CAMX_LOG_ERROR(CamxLogGroupPProc, "pImageFormat is NULL");
                    result = CamxResultENoMemory;
                    break;
                }

                pConfigIOData->images[firmwarePortId].info.format                 =
                    TranslateFormatToFirmwareImageFormat(pImageFormat->format);
                pConfigIOData->images[firmwarePortId].info.dimensions.widthPixels = pImageFormat->width;
                pConfigIOData->images[firmwarePortId].info.dimensions.heightLines = pImageFormat->height;
                pConfigIOData->images[firmwarePortId].info.enableByteSwap         =
                    ((pImageFormat->format == CamX::Format::YUV420NV21) ||
                    (pImageFormat->format == CamX::Format::YUV420NV21TP10)) ? 1 : 0;

                CAMX_LOG_INFO(CamxLogGroupPProc,
                              "IPE: %d, Input : firmwarePortId %d format %d, width %d, height %d",
                              InstanceID(), firmwarePortId,
                              pImageFormat->format, pImageFormat->width, pImageFormat->height);
                const YUVFormat* pPlane = &pImageFormat->formatParams.yuvFormat[0];
                for (UINT plane = 0; plane < ImageFormatUtils::GetNumberOfPlanes(pImageFormat); plane++)
                {
                    CAMX_ASSERT(plane <= MAX_NUM_OF_IMAGE_PLANES);
                    pConfigIOData->images[firmwarePortId].bufferLayout[plane].bufferStride         =
                        pPlane[plane].planeStride;
                    pConfigIOData->images[firmwarePortId].bufferLayout[plane].bufferHeight         =
                        pPlane[plane].sliceHeight;
                    pConfigIOData->images[firmwarePortId].metadataBufferLayout[plane].bufferStride =
                        pPlane[plane].metadataStride;
                    pConfigIOData->images[firmwarePortId].metadataBufferLayout[plane].bufferHeight =
                        pPlane[plane].metadataHeight;
                    CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Input: plane %d, stride %d, scanline %d, metastride %d, metascanline %d",
                                     plane,
                                     pPlane[plane].planeStride,
                                     pPlane[plane].sliceHeight,
                                     pPlane[plane].metadataStride,
                                     pPlane[plane].metadataHeight);
                }

                if (inputPortId[inputPortIndex] == IPEInputPortFull)
                {
                    pIcpResource->inputResource.format = static_cast <UINT32>(pImageFormat->format);
                    pIcpResource->inputResource.width  = pImageFormat->width;
                    pIcpResource->inputResource.height = pImageFormat->height;
                    pIcpResource->inputResource.FPS    = m_FPS;

                    m_fullInputWidth  = pImageFormat->width;
                    m_fullInputHeight = pImageFormat->height;
                    m_numPasses++;
                }

                if ((IPEInputPortDS4  == inputPortId[inputPortIndex])  ||
                    (IPEInputPortDS16 == inputPortId[inputPortIndex]) ||
                    (IPEInputPortDS64 == inputPortId[inputPortIndex]))
                {
                    m_numPasses++;
                    CAMX_LOG_INFO(CamxLogGroupPProc, "IPE number of passes %d", m_numPasses);
                }

                parentNodeID = GetParentNodeType(inputPortId[inputPortIndex]);
            }

            if (CamxResultSuccess == result)
            {
                pIcpOutResource    = pIcpResource->outputResource;
                m_fullOutputHeight = 0;
                m_fullOutputWidth  = 0;

                for (UINT outputPortIndex = 0; outputPortIndex < numOutputPort; outputPortIndex++)
                {
                    TranslateToFirmwarePortId(outputPortId[outputPortIndex], &firmwarePortId);

                    pImageFormat                                                      =
                        GetOutputPortImageFormat(OutputPortIndex(firmwarePortId));

                    if (NULL == pImageFormat)
                    {
                        CAMX_LOG_ERROR(CamxLogGroupPProc, "pImageFormat is NULL");
                        result = CamxResultENoMemory;
                        break;
                    }

                    pConfigIOData->images[firmwarePortId].info.format                 =
                        TranslateFormatToFirmwareImageFormat(pImageFormat->format);
                    pConfigIOData->images[firmwarePortId].info.dimensions.widthPixels = pImageFormat->width;
                    pConfigIOData->images[firmwarePortId].info.dimensions.heightLines = pImageFormat->height;
                    pConfigIOData->images[firmwarePortId].info.enableByteSwap         =
                        ((pImageFormat->format == CamX::Format::YUV420NV21) ||
                        (pImageFormat->format == CamX::Format::YUV420NV21TP10)) ? 1 : 0;

                    CAMX_LOG_INFO(CamxLogGroupPProc,
                                  "IPE: %d, Output : firmwarePortId %d format %d, width %d, height %d",
                                  InstanceID(), firmwarePortId,
                                  pImageFormat->format, pImageFormat->width, pImageFormat->height);
                    const YUVFormat* pPlane = &pImageFormat->formatParams.yuvFormat[0];
                    for (UINT plane = 0; plane < ImageFormatUtils::GetNumberOfPlanes(pImageFormat); plane++)
                    {
                        CAMX_ASSERT(plane <= MAX_NUM_OF_IMAGE_PLANES);
                        pConfigIOData->images[firmwarePortId].bufferLayout[plane].bufferStride         =
                            pPlane[plane].planeStride;
                        pConfigIOData->images[firmwarePortId].bufferLayout[plane].bufferHeight         =
                            pPlane[plane].sliceHeight;
                        pConfigIOData->images[firmwarePortId].metadataBufferLayout[plane].bufferStride =
                            pPlane[plane].metadataStride;
                        pConfigIOData->images[firmwarePortId].metadataBufferLayout[plane].bufferHeight =
                            pPlane[plane].metadataHeight;
                        CAMX_LOG_VERBOSE(CamxLogGroupPProc,
                                         "Output: plane %d stride %d, scanline %d, metastride %d, metascanline %d",
                                         plane,
                                         pPlane[plane].planeStride,
                                         pPlane[plane].sliceHeight,
                                         pPlane[plane].metadataStride,
                                         pPlane[plane].metadataHeight);
                    }

                    pIcpOutResource->format = static_cast <UINT32>(pImageFormat->format);
                    pIcpOutResource->width  = pImageFormat->width;
                    pIcpOutResource->height = pImageFormat->height;
                    pIcpOutResource->FPS    = m_FPS;
                    pIcpOutResource++;

                    if ((outputPortId[outputPortIndex] == IPEOutputPortDisplay) ||
                        (outputPortId[outputPortIndex] == IPEOutputPortVideo))
                    {
                        if (m_fullOutputHeight < static_cast<INT32>(pImageFormat->height))
                        {
                            m_fullOutputHeight = pImageFormat->height;
                        }
                        if (m_fullOutputWidth < static_cast<INT32>(pImageFormat->width))
                        {
                            m_fullOutputWidth = pImageFormat->width;
                        }
                        if ( TRUE == ImageFormatUtils::IsUBWC(pImageFormat->format))
                        {
                            m_compressiononOutput = TRUE;
                        }
                    }

                    if (outputPortId[outputPortIndex] >= IPEOutputPortFullRef &&
                        outputPortId[outputPortIndex] <= IPEOutputPortDS64Ref)
                    {
                        CAMX_LOG_VERBOSE(CamxLogGroupPProc,
                                         "Reference port enabled = %d", outputPortId[outputPortIndex]);
                        m_numOutputRefPorts++;
                    }

                }

                if (TRUE == CheckIsIPERealtime())
                {
                    m_realTimeIPE = TRUE;
                    pIcpResource->resourceType = CSLICPResourceIDIPERealTime;
                }
                else
                {
                    m_realTimeIPE = FALSE;
                    pIcpResource->resourceType = CSLICPResourceIDIPENonRealTime;
                }

                UpdateNumberofPassesonDimension(parentNodeID);

                pConfigIOData->maxBatchSize = m_maxBatchSize;

                pIcpResource->hIOConfigCmd = pConfigIOMem->hHandle;
                pIcpResource->IOConfigLen  = sizeof(IpeConfigIo);
                countResource              = 1;

                // Add to the resource list
                pResource->resourceID              = pIcpResource->resourceType;
                pResource->pDeviceResourceParam    = static_cast<VOID*>(pIcpResource);
                pResource->deviceResourceParamSize = resourceSize;

                CAMX_LOG_VERBOSE(CamxLogGroupPProc, "numResources %d", countResource);

                result = GetStabilizationMargins();
                if (CamxResultSuccess != result)
                {
                    CAMX_LOG_ERROR(CamxLogGroupPProc, "Unable to get stabilization margins %d", result);
                }

                if (CamxResultSuccess == result)
                {
                    pConfigIOData->stabilizationMargins.widthPixels = m_stabilizationMargin.widthPixels;
                    pConfigIOData->stabilizationMargins.heightLines = m_stabilizationMargin.heightLines;
                    result = InitializeStripingParams(pConfigIOData);
                    if (CamxResultSuccess != result)
                    {
                        CAMX_LOG_ERROR(CamxLogGroupPProc, "Initialize Striping params failed %d", result);
                    }
                }
            }
        }
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::AcquireDevice
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::AcquireDevice()
{
    CSLICPAcquireDeviceInfo*    pDevInfo;
    CSLDeviceResource           deviceResourceRequest   = { 0 };
    CamxResult                  result                  = CamxResultSuccess;
    CSLBufferInfo               configIOMem             = { 0 };

    /// @todo (CAMX-886) Add CSLMemFlagSharedAccess once available from memory team
    result = CSLAlloc(NameAndInstanceId(),
                      &configIOMem,
                      sizeof(IpeConfigIo),
                      1,
                      (CSLMemFlagUMDAccess | CSLMemFlagSharedAccess | CSLMemFlagHw),
                      &DeviceIndices()[0],
                      1);
    CAMX_LOG_VERBOSE(CamxLogGroupPProc, "CSLAlloc returned configIOMem.fd=%d", configIOMem.fd);

    if (CamxResultSuccess == result)
    {
        CAMX_ASSERT(CSLInvalidHandle != configIOMem.hHandle);
        CAMX_ASSERT(NULL != configIOMem.pVirtualAddr);

        if ((NULL != configIOMem.pVirtualAddr) && (CSLInvalidHandle != configIOMem.hHandle))
        {
            result = SetupDeviceResource(&configIOMem, &deviceResourceRequest);
        }

        if (CamxResultSuccess == result)
        {
            // During acquire device, KMD will create firmware handle and also call config IO
            result = CSLAcquireDevice(GetHwContext()->GetCSLSession(),
                                      &m_hDevice,
                                      DeviceIndices()[0],
                                      &deviceResourceRequest,
                                      1,
                                      NULL,
                                      0,
                                      NameAndInstanceId());

            pDevInfo = reinterpret_cast<CSLICPAcquireDeviceInfo*>(deviceResourceRequest.pDeviceResourceParam);
            // Firmware will provide scratch buffer requirements during configIO
            m_firmwareScratchMemSize = pDevInfo->scratchMemSize;

            if (CamxResultSuccess == result)
            {
                SetDeviceAcquired(TRUE);
                AddCSLDeviceHandle(m_hDevice);
            }
            else
            {
                CAMX_LOG_ERROR(CamxLogGroupPProc, "Acquire IPE Device Failed");
            }

            if (CSLInvalidHandle != configIOMem.hHandle)
            {
                CSLReleaseBuffer(configIOMem.hHandle);
            }

            if (NULL != deviceResourceRequest.pDeviceResourceParam)
            {
                CAMX_FREE(deviceResourceRequest.pDeviceResourceParam);
                deviceResourceRequest.pDeviceResourceParam = NULL;
            }
        }
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupPProc, "Out of memory");
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::ReleaseDevice
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::ReleaseDevice()
{
    CamxResult result = CamxResultSuccess;

    if ((NULL != GetHwContext()) && (0 != m_hDevice))
    {
        result = CSLReleaseDevice(GetHwContext()->GetCSLSession(), m_hDevice);

        if (CamxResultSuccess == result)
        {
            SetDeviceAcquired(FALSE);
        }
        else
        {
            CAMX_ASSERT_ALWAYS_MESSAGE("Failed to release device");
        }
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::ConfigureIPECapability
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::ConfigureIPECapability()
{
    CamxResult  result = CamxResultSuccess;

    GetHwContext()->GetDeviceVersion(CSLDeviceTypeICP, &m_version);
    switch (m_version.majorVersion)
    {
        /// @todo (CAMX-652) Finalize the version number definiation with CSL layer.
        case 0:
            /// @todo (CAMX-727) Implement query capability from hardware/firmware.
            m_capability.numIPEIQModules    = sizeof(IQModulesList) / sizeof(IPEIQModuleInfo);
            m_capability.pIPEIQModuleList   = IQModulesList;
            // Striping library in firmware will be removed in future. Remove this setting once striping in FW is removed.
            m_capability.swStriping         = static_cast<Titan17xContext *>(
                GetHwContext())->GetTitan17xSettingsManager()->GetTitan17xStaticSettings()->IPESwStriping;
            m_capability.maxInputWidth[ICA_MODE_DISABLED]  = IPEMaxInputWidthICADisabled;  // ICA Disabled
            m_capability.maxInputHeight[ICA_MODE_DISABLED] = IPEMaxInputHeightICADisabled; // ICA Disabled
            m_capability.maxInputWidth[ICA_MODE_ENABLED]   = IPEMaxInputWidthICAEnabled;   // ICA Enabled
            m_capability.maxInputHeight[ICA_MODE_ENABLED]  = IPEMaxInputHeightICAEnabled;  // ICA Enabled

            m_capability.minInputWidth   = IPEMinInputWidth;
            m_capability.minInputHeight  = IPEMinInputHeight;

            m_capability.maxDownscale[UBWC_MODE_DISABLED] = IPEMaxDownscaleLinear; // LINEAR Format
            m_capability.maxDownscale[UBWC_MODE_ENABLED]  = IPEMaxDownscaleUBWC;   // UBWC   Format
            m_capability.maxUpscale[UBWC_MODE_DISABLED]   = IPEMaxUpscaleLinear;   // LINEAR Format
            m_capability.maxUpscale[UBWC_MODE_ENABLED]    = IPEMaxUpscaleUBWC;     // UBWC   Format

            m_capability.minOutputWidthUBWC  = IPEMinOutputWidthUBWC;
            m_capability.minOutputHeightUBWC = IPEMinOutputHeightUBWC;

            m_capability.numIPE = GetHwContext()->GetNumberOfIPE();
            break;
        default:
            result = CamxResultEUnsupported;
            CAMX_ASSERT_ALWAYS_MESSAGE("%s: Unsupported Version Number", __FUNCTION__);
            break;
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::UpdateIPEIOLimits
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::UpdateIPEIOLimits(
    BufferNegotiationData* pBufferNegotiationData)
{
    UINT parentNodeID    = IFE;
    UINT numInputPort    = 0;
    UINT inputPortId[IPEMaxInput];

    if (pBufferNegotiationData == NULL)
    {
        CAMX_LOG_ERROR(CamxLogGroupPProc, "Invalid buffer negotiation data pointer");
        return CamxResultEInvalidPointer;
    }

    // Get Input Port List
    GetAllInputPortIds(&numInputPort, &inputPortId[0]);

    parentNodeID = GetParentNodeType(inputPortId[0]);

    if (parentNodeID == BPS)
    {
        // If other ports in addition to full port is notified, then limits change.
        // This is because if Full port along with other ports, DSx are enabled then
        // the limits of the IPE input processing changes due to NPS limitation.
        if (pBufferNegotiationData->numOutputPortsNotified > 1)
        {
            m_capability.minInputWidth  = IPEMinInputWidthMultiPassOffline;
            m_capability.minInputHeight = IPEMinInputHeightMultiPassOffline;
        }
        else
        {
            m_capability.minInputWidth  = IPEMinInputWidth;
            m_capability.minInputHeight = IPEMinInputHeight;
        }
    }
    else
    {
        // If other ports in addition to full port is notified, then limits change.
        // This is because if Full port along with other ports, DSx are enabled then
        // the limits of the IPE input processing changes due to NPS limitation.
        if (pBufferNegotiationData->numOutputPortsNotified > 1)
        {
            m_capability.minInputWidth  = IPEMinInputWidthMultiPass;
            m_capability.minInputHeight = IPEMinInputHeightMultiPass;
        }
        else
        {
            m_capability.minInputWidth  = IPEMinInputWidth;
            m_capability.minInputHeight = IPEMinInputHeight;
        }
    }

    return CamxResultSuccess;

}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// GetModuleProcessingSection
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
IPEProcessingSection GetModuleProcessingSection(
    ISPIQModuleType ipeIQModule)
{
    IPEProcessingSection type = IPEProcessingSection::IPEInvalidSection;

    switch (ipeIQModule)
    {
        case ISPIQModuleType::IPEICA:
        case ISPIQModuleType::IPERaster22:
        case ISPIQModuleType::IPERasterPD:
        case ISPIQModuleType::IPEANR:
        case ISPIQModuleType::IPETF:
            type = IPEProcessingSection::IPENPS;
            break;
        case ISPIQModuleType::IPECAC:
        case ISPIQModuleType::IPECrop:
        case ISPIQModuleType::IPEChromaUpsample:
        case ISPIQModuleType::IPECST:
        case ISPIQModuleType::IPELTM:
            type = IPEProcessingSection::IPEPPSPreLTM;
            break;

        case ISPIQModuleType::IPEColorCorrection:
        case ISPIQModuleType::IPEGamma:
        case ISPIQModuleType::IPE2DLUT:
        case ISPIQModuleType::IPEChromaEnhancement:
        case ISPIQModuleType::IPEChromaSuppression:
        case ISPIQModuleType::IPESCE:
        case ISPIQModuleType::IPEASF:
        case ISPIQModuleType::IPEUpscaler:
        case ISPIQModuleType::IPEGrainAdder:
        case ISPIQModuleType::IPEDownScaler:
        case ISPIQModuleType::IPEFOVCrop:
        case ISPIQModuleType::IPEClamp:
            type = IPEProcessingSection::IPEPPSPostLTM;
            break;
        default:
            CAMX_LOG_ERROR(CamxLogGroupPProc, "Unsupported IQ module type");
            break;
    }
    return type;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// GetProcessingSectionForProfile
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
IPEProcessingSection GetProcessingSectionForProfile(
    IPEProfileId propertyValue)
{
    IPEProcessingSection type = IPEProcessingSection::IPEAll;
    switch (propertyValue)
    {
        case IPEProfileId::IPEProfileIdNPS:
            type = IPEProcessingSection::IPENPS;
            break;
        case IPEProfileId::IPEProfileIdPPS:
            type = IPEProcessingSection::IPEPPS;
            break;
        case IPEProfileId::IPEProfileIdScale:
            break;
        case IPEProfileId::IPEProfileIdNoZoomCrop:
            type = IPEProcessingSection::IPENoZoomCrop;
            break;
        case IPEProfileId::IPEProfileIdDefault:
            type = IPEProcessingSection::IPEAll;
            break;
        default:
            CAMX_LOG_ERROR(CamxLogGroupPProc, "Unsupported IQ module type");
            break;
    }
    return type;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::CreateIPEIQModules
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::CreateIPEIQModules()
{
    CamxResult              result                      = CamxResultSuccess;
    IPEIQModuleInfo*        pIQModule                   = m_capability.pIPEIQModuleList;
    IPEModuleCreateData     moduleInputData             = { 0 };
    IPEProcessingSection    instanceSection             = IPEProcessingSection::IPEAll;
    IPEProcessingSection    moduleSection               = IPEProcessingSection::IPEAll;
    BOOL                    moduleDependeciesEnabled    = TRUE;

    instanceSection = GetProcessingSectionForProfile(m_instanceProperty.profileId);
    // This is a special case where we want all IQ blocks, but want to skip the fillzoomwindow

    if (IPEProcessingSection::IPENoZoomCrop == instanceSection)
    {
        m_nodePropDisableZoomCrop = TRUE;
        // set the instanceSection back to IPEALL to simplify reuse the logic for IPEAll
        instanceSection = IPEProcessingSection::IPEAll;
    }
    moduleInputData.initializationData.pipelineIPEData.pDeviceIndex = &m_deviceIndex;
    moduleInputData.initializationData.requestQueueDepth            = GetPipeline()->GetRequestQueueDepth();

    m_numIPEIQModulesEnabled                                        = 0;

    /// @todo (CAMX-268) remove below once TF module changes are merged
    UINT                numOutputPort = 0;
    UINT                outputPortId[IPEMaxOutput];

    for (UINT count = 0; count < m_capability.numIPEIQModules; count++)
    {
        moduleDependeciesEnabled = TRUE;
        if (m_instanceProperty.profileId == IPEProfileId::IPEProfileIdScale)
        {
            // For only scale, need to enable CST and Chroma Enhancement
            if ((pIQModule[count].moduleType == ISPIQModuleType::IPECST) ||
                (pIQModule[count].moduleType == ISPIQModuleType::IPEChromaEnhancement))
            {
                CAMX_LOG_INFO(CamxLogGroupPProc, " Scale Processing Block");
                /// @todo (CAMX-728) Setup & initialize IQ pipeline
                moduleInputData.path = pIQModule[count].path;
                result = pIQModule[count].IQCreate(&moduleInputData);
                if (CamxResultSuccess == result)
                {
                    m_pEnabledIPEIQModule[m_numIPEIQModulesEnabled] = moduleInputData.pModule;
                    m_numIPEIQModulesEnabled++;
                }
                else
                {
                    CAMX_ASSERT_ALWAYS_MESSAGE("%s: Failed to Create IQ Module, count = %d", __FUNCTION__, count);
                    break;
                }

            }
            continue;
        }

        if (instanceSection != IPEProcessingSection::IPEAll)
        {
            moduleSection = GetModuleProcessingSection(pIQModule[count].moduleType);
            if ((moduleSection == IPEProcessingSection::IPEPPSPreLTM) || (moduleSection == IPEProcessingSection::IPEPPSPostLTM))
            {
                moduleSection = IPEProcessingSection::IPEPPS;
            }
            // In case of Invalid Processing section only moduleDependeciesEnabled should be FALSE
            if (instanceSection != moduleSection)
            {
                moduleDependeciesEnabled = FALSE;
            }
        }

        /// @todo (CAMX-735) Link IPE IQ modules with new Chromatix adapter
        if ((TRUE == pIQModule[count].isEnabled) && (TRUE == moduleDependeciesEnabled))
        {
            /// @todo (CAMX-728) Setup & initialize IQ pipeline
            moduleInputData.path = pIQModule[count].path;
            result = pIQModule[count].IQCreate(&moduleInputData);
            if (CamxResultSuccess == result)
            {
                m_pEnabledIPEIQModule[m_numIPEIQModulesEnabled] = moduleInputData.pModule;
                m_numIPEIQModulesEnabled++;
            }
            else
            {
                CAMX_ASSERT_ALWAYS_MESSAGE("%s: Failed to Create IQ Module, count = %d", __FUNCTION__, count);
                break;
            }
        }
    }

    // The clean-up for the error case happens outside this function

    return result;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::Cleanup()
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::Cleanup()
{
    UINT        count = 0;
    CamxResult  result = CamxResultSuccess;

    // De-allocate all of the IQ modules

    for (count = 0; count < m_numIPEIQModulesEnabled; count++)
    {
        if (NULL != m_pEnabledIPEIQModule[count])
        {
            m_pEnabledIPEIQModule[count]->Destroy();
            m_pEnabledIPEIQModule[count] = NULL;
        }
    }
    for (count = 0; count < m_numScratchBuffers; count++)
    {
        if (NULL != m_pScratchMemoryBuffer[count])
        {
            if (CSLInvalidHandle != m_pScratchMemoryBuffer[count]->hHandle)
            {
                CSLReleaseBuffer(m_pScratchMemoryBuffer[count]->hHandle);
            }
            CAMX_FREE(m_pScratchMemoryBuffer[count]);
            m_pScratchMemoryBuffer[count] = NULL;
        }
    }
    m_numIPEIQModulesEnabled = 0;

    // Check if striping in UMD is enabled before destroying striping library context
    if (m_capability.swStriping)
    {
        result = IPEStripingLibraryContextDestroy(&m_hStripingLib);
        if (CamxResultSuccess != result)
        {
            CAMX_LOG_ERROR(CamxLogGroupPProc, "Failed to destroy striping library, error = %d", result);
        }
    }

    if (NULL != m_pTuningMetadata)
    {
        CAMX_FREE(m_pTuningMetadata);
        m_pTuningMetadata = NULL;
    }

    if (NULL != s_pDebugDataWriter)
    {
        s_debugDataWriterCounter--;

        if (0 == s_debugDataWriterCounter)
        {
            CAMX_DELETE s_pDebugDataWriter;
            s_pDebugDataWriter = NULL;
        }
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::PopulateGeneralTuningMetadata()
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::PopulateGeneralTuningMetadata(
    ISPInputData* pInputData)
{
    IpeIQSettings*          pIPEIQsettings          = static_cast<IpeIQSettings*>(pInputData->pipelineIPEData.pIPEIQSettings);
    ChiTuningModeParameter* pTuningModeParameter    = pInputData->pTuningData;
    TuningModeDebugData*    pTuningDebugData        = &pInputData->pIPETuningMetadata->IPETuningModeDebugData;

    // Copy to a packed structure the BPS module configuration used by FW
    CAMX_STATIC_ASSERT(sizeof(IpeIQSettings) <= sizeof(pInputData->pIPETuningMetadata->IPEModuleConfigData));
    Utils::Memcpy(&pInputData->pIPETuningMetadata->IPEModuleConfigData,
                  pIPEIQsettings,
                  sizeof(IpeIQSettings));

    // Populate node information
    pInputData->pIPETuningMetadata->IPENodeInformation.instaceId        = InstanceID();
    pInputData->pIPETuningMetadata->IPENodeInformation.requestId        = pInputData->frameNum;
    pInputData->pIPETuningMetadata->IPENodeInformation.isRealTime       = IsRealTime();
    pInputData->pIPETuningMetadata->IPENodeInformation.profileId        = m_instanceProperty.profileId;
    pInputData->pIPETuningMetadata->IPENodeInformation.processingType   = m_instanceProperty.processingType;

    ChiTuningMode* pTuningMode = NULL;
    for (UINT32 paramNumber = 0; paramNumber <  pTuningModeParameter->noOfSelectionParameter; paramNumber++)
    {
        pTuningMode = &pTuningModeParameter->TuningMode[paramNumber];

        switch (pTuningMode->mode)
        {
            case ChiModeType::Default:
                pTuningDebugData->base = static_cast<UINT32>(pTuningMode->subMode.value);
                break;
            case ChiModeType::Sensor:
                pTuningDebugData->sensor = static_cast<UINT32>(pTuningMode->subMode.value);
                break;
            case ChiModeType::Usecase:
                pTuningDebugData->usecase = static_cast<UINT32>(pTuningMode->subMode.usecase);
                break;
            case ChiModeType::Feature1:
                pTuningDebugData->feature1 = static_cast<UINT32>(pTuningMode->subMode.feature1);
                break;
            case ChiModeType::Feature2:
                pTuningDebugData->feature2 = static_cast<UINT32>(pTuningMode->subMode.feature2);
                break;
            case ChiModeType::Scene:
                pTuningDebugData->scene = static_cast<UINT32>(pTuningMode->subMode.scene);
                break;
            case ChiModeType::Effect:
                pTuningDebugData->effect = static_cast<UINT32>(pTuningMode->subMode.effect);
                break;
            default:
                CAMX_LOG_WARN(CamxLogGroupPProc, "IPE: fail to set tuning mode type");
                break;
        }
    }

    CAMX_LOG_VERBOSE(CamxLogGroupPProc,
                     "TuningMode: ReqID: %llu: Default %u, Sensor %u usecase %u feature1 %u feature2 %u secne %u effect %u",
                     pInputData->frameNum,
                     pTuningDebugData->base,
                     pTuningDebugData->sensor,
                     pTuningDebugData->usecase,
                     pTuningDebugData->feature1,
                     pTuningDebugData->feature2,
                     pTuningDebugData->scene,
                     pTuningDebugData->effect);
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::DumpTuningMetadata()
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::DumpTuningMetadata(
    ISPInputData* pInputData)
{
    CamxResult  result                              = CamxResultSuccess;
    DebugData   debugData                           = { 0 };
    UINT        PropertiesTuning[]                  = { 0 };
    static UINT metaTagDebugDataAll                 = 0;
    const UINT  length                              = CAMX_ARRAY_SIZE(PropertiesTuning);
    VOID*       pData[length]                       = { 0 };
    UINT64      propertyDataTuningOffset[length]    = { 0 };
    DebugData*  pDebugDataPartial                   = NULL;

    if (TRUE == IsRealTime())
    {
        PropertiesTuning[0] = PropertyIDTuningDataIPE;
    }
    else
    {
        VendorTagManager::QueryVendorTagLocation("org.quic.camera.debugdata", "DebugDataAll", &metaTagDebugDataAll);
        PropertiesTuning[0] = metaTagDebugDataAll | InputMetadataSectionMask;
    }

    GetDataList(PropertiesTuning, pData, propertyDataTuningOffset, length);
    pDebugDataPartial = reinterpret_cast<DebugData*>(pData[0]);
    if (NULL == pDebugDataPartial || NULL == pDebugDataPartial->pData || InstanceID() > 1)
    {
        // Debug-data buffer not available
        CAMX_LOG_WARN(CamxLogGroupPProc, "Debug-data requested but buffer not available");
        return;
    }

    if (TRUE == IsRealTime())
    {
        debugData.pData = pDebugDataPartial->pData;
        debugData.size  = pDebugDataPartial->size;
        // Use first half for Real time data
        debugData.size  = debugData.size / DebugDataPartitionsIPE;
    }
    else // Using copy done for offline processing
    {
        SIZE_T instanceOffset = 0;
        debugData.size  = HAL3MetadataUtil::DebugDataSize(DebugDataType::IPETuning);
        // Use second half for offline data
        // Only instance 0 & 1 supported for debug data
        debugData.size  = debugData.size / DebugDataPartitionsIPE;
        instanceOffset  = (InstanceID() + 1) * debugData.size;
        debugData.pData = Utils::VoidPtrInc(pDebugDataPartial->pData,
                                           (HAL3MetadataUtil::DebugDataOffset(DebugDataType::IPETuning)) + (instanceOffset));
    }

    // Populate any metadata obtained direclty from base IPE node
    PopulateGeneralTuningMetadata(pInputData);

    if ((s_debugDataRequestId != pInputData->frameNum)                                                              ||
        (FALSE == (s_pDebugDataWriter->IsSameBufferPointer(static_cast<BYTE*>(debugData.pData), debugData.size))))
    {
        // Set new requestId
        s_debugDataRequestId = pInputData->frameNum;
        // Set the buffer pointer
        s_pDebugDataWriter->SetBufferPointer(static_cast<BYTE*>(debugData.pData), debugData.size);
    }

    if (TRUE == IsRealTime())
    {
        // Add IPE tuning metadata tags
        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPENodeInfo,
                                       DebugDataTagType::TuningIQNodeInfo,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPENodeInformation,
                                       sizeof(pInputData->pIPETuningMetadata->IPENodeInformation));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPETuningMode,
                                       DebugDataTagType::TuningModeInfo,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPETuningModeDebugData,
                                       sizeof(pInputData->pIPETuningMetadata->IPETuningModeDebugData));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEGammaPackedLUT,
                                       DebugDataTagType::TuningGammaIPECurve,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.gamma),
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.gamma,
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.gamma));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEModulesConfigRegister,
                                      DebugDataTagType::UInt32,
                                      CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPEModuleConfigData.modulesConfigData),
                                      &pInputData->pIPETuningMetadata->IPEModuleConfigData.modulesConfigData,
                                      sizeof(pInputData->pIPETuningMetadata->IPEModuleConfigData.modulesConfigData));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEICEInputPackedLUT,
                                       DebugDataTagType::TuningICELUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.ICALUT[TuningICEInput],
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.ICALUT[TuningICEInput]));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEICEReferencePackedLUT,
                                       DebugDataTagType::TuningICELUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.ICALUT[TuningICEReference],
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.ICALUT[TuningICEReference]));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEANRRegister,
                                       DebugDataTagType::TuningANRConfig,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPEANRData.ANRData),
                                       &pInputData->pIPETuningMetadata->IPEANRData.ANRData,
                                       sizeof(pInputData->pIPETuningMetadata->IPEANRData.ANRData));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPETFRegister,
                                       DebugDataTagType::TuningTFConfig,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPETFData.TFData),
                                       &pInputData->pIPETuningMetadata->IPETFData.TFData,
                                       sizeof(pInputData->pIPETuningMetadata->IPETFData.TFData));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPECACRegister,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPECACData.CACConfig),
                                       &pInputData->pIPETuningMetadata->IPECACData.CACConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPECACData.CACConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPECSTRegister,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPECSTData.CSTConfig),
                                       &pInputData->pIPETuningMetadata->IPECSTData.CSTConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPECSTData.CSTConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPELTMPackedLUT,
                                       DebugDataTagType::TuningLTMLUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.LTMLUT,
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.LTMLUT));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPECCRegister,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPECCData.colorCorrectionConfig),
                                       &pInputData->pIPETuningMetadata->IPECCData.colorCorrectionConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPECCData.colorCorrectionConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPE2DLUTRegister,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPE2DLUTData.LUT2DConfig),
                                       &pInputData->pIPETuningMetadata->IPE2DLUTData.LUT2DConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPE2DLUTData.LUT2DConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPE2DLUTPackedLUT,
                                       DebugDataTagType::Tuning2DLUTLUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.LUT2D,
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.LUT2D));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEChromaEnhancementRegister,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPEChromaEnhancementData.CEConfig),
                                       &pInputData->pIPETuningMetadata->IPEChromaEnhancementData.CEConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPEChromaEnhancementData.CEConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEChromasuppressionRegister,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPEChromaSuppressionData.CSConfig),
                                       &pInputData->pIPETuningMetadata->IPEChromaSuppressionData.CSConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPEChromaSuppressionData.CSConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPESCERegister,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPESCEData.SCEConfig),
                                       &pInputData->pIPETuningMetadata->IPESCEData.SCEConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPESCEData.SCEConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEASFRegister,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPEASFData.ASFConfig),
                                       &pInputData->pIPETuningMetadata->IPEASFData.ASFConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPEASFData.ASFConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEASFPackedLUT,
                                       DebugDataTagType::TuningASFLUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.ASFLUT,
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.ASFLUT));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEUpscalerPackedLUT,
                                       DebugDataTagType::TuningUpscalerLUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.upscalerLUT,
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.upscalerLUT));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEGrainAdderPackedLUT,
                                       DebugDataTagType::TuningGrainAdderLUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.grainAdderLUT,
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.grainAdderLUT));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPELTMExposureIndex,
                                       DebugDataTagType::Float,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPELTMExposureData.exposureIndex),
                                       &pInputData->pIPETuningMetadata->IPELTMExposureData.exposureIndex,
                                       sizeof(pInputData->pIPETuningMetadata->IPELTMExposureData.exposureIndex));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEASFFace,
                                       DebugDataTagType::TuningFaceData,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEASFFaceDetection,
                                       sizeof(pInputData->pIPETuningMetadata->IPEASFFaceDetection));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEANRFace,
                                       DebugDataTagType::TuningFaceData,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEANRFaceDetection,
                                       sizeof(pInputData->pIPETuningMetadata->IPEANRFaceDetection));

        // Make a copy in main metadata pool
        static const UINT PropertiesDebugData[] = { PropertyIDDebugDataAll };
        VOID* pSrcData[1] = { 0 };
        const UINT lengthAll = CAMX_ARRAY_SIZE(PropertiesDebugData);
        UINT64 propertyDataTuningAllOffset[lengthAll] = { 0 };
        GetDataList(PropertiesDebugData, pSrcData, propertyDataTuningAllOffset, lengthAll);

        result = VendorTagManager::QueryVendorTagLocation("org.quic.camera.debugdata", "DebugDataAll", &metaTagDebugDataAll);
        const UINT TuningVendorTag[] = { metaTagDebugDataAll };
        const VOID* pDstData[1] = { pSrcData[0] };
        UINT pDataCount[1] = { sizeof(DebugData) };

        WriteDataList(TuningVendorTag, pDstData, pDataCount, 1);
    }
    else // Handle offline data
    {
        // Add IPE tuning metadata tags
        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPENodeInfoOffline,
                                       DebugDataTagType::TuningIQNodeInfo,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPENodeInformation,
                                       sizeof(pInputData->pIPETuningMetadata->IPENodeInformation));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPETuningModeOffline,
                                       DebugDataTagType::TuningModeInfo,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPETuningModeDebugData,
                                       sizeof(pInputData->pIPETuningMetadata->IPETuningModeDebugData));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEGammaPackedLUTOffline,
                                       DebugDataTagType::TuningGammaIPECurve,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.gamma),
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.gamma,
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.gamma));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEModulesConfigRegisterOffline,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPEModuleConfigData.modulesConfigData),
                                       &pInputData->pIPETuningMetadata->IPEModuleConfigData.modulesConfigData,
                                       sizeof(pInputData->pIPETuningMetadata->IPEModuleConfigData.modulesConfigData));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEICEInputPackedLUTOffline,
                                       DebugDataTagType::TuningICELUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.ICALUT[TuningICEInput],
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.ICALUT[TuningICEInput]));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEICEReferencePackedLUTOffline,
                                       DebugDataTagType::TuningICELUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.ICALUT[TuningICEReference],
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.ICALUT[TuningICEReference]));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEANRRegisterOffline,
                                       DebugDataTagType::TuningANRConfig,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPEANRData.ANRData),
                                       &pInputData->pIPETuningMetadata->IPEANRData.ANRData,
                                       sizeof(pInputData->pIPETuningMetadata->IPEANRData.ANRData));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPETFRegisterOffline,
                                       DebugDataTagType::TuningTFConfig,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPETFData.TFData),
                                       &pInputData->pIPETuningMetadata->IPETFData.TFData,
                                       sizeof(pInputData->pIPETuningMetadata->IPETFData.TFData));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPECACRegisterOffline,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPECACData.CACConfig),
                                       &pInputData->pIPETuningMetadata->IPECACData.CACConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPECACData.CACConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPECSTRegisterOffline,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPECSTData.CSTConfig),
                                       &pInputData->pIPETuningMetadata->IPECSTData.CSTConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPECSTData.CSTConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPELTMPackedLUTOffline,
                                       DebugDataTagType::TuningLTMLUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.LTMLUT,
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.LTMLUT));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPECCRegisterOffline,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPECCData.colorCorrectionConfig),
                                       &pInputData->pIPETuningMetadata->IPECCData.colorCorrectionConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPECCData.colorCorrectionConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPE2DLUTRegisterOffline,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPE2DLUTData.LUT2DConfig),
                                       &pInputData->pIPETuningMetadata->IPE2DLUTData.LUT2DConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPE2DLUTData.LUT2DConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPE2DLUTPackedLUTOffline,
                                       DebugDataTagType::Tuning2DLUTLUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.LUT2D,
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.LUT2D));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEChromaEnhancementRegisterOffline,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPEChromaEnhancementData.CEConfig),
                                       &pInputData->pIPETuningMetadata->IPEChromaEnhancementData.CEConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPEChromaEnhancementData.CEConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEChromasuppressionRegisterOffline,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPEChromaSuppressionData.CSConfig),
                                       &pInputData->pIPETuningMetadata->IPEChromaSuppressionData.CSConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPEChromaSuppressionData.CSConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPESCERegisterOffline,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPESCEData.SCEConfig),
                                       &pInputData->pIPETuningMetadata->IPESCEData.SCEConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPESCEData.SCEConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEASFRegisterOffline,
                                       DebugDataTagType::UInt32,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPEASFData.ASFConfig),
                                       &pInputData->pIPETuningMetadata->IPEASFData.ASFConfig,
                                       sizeof(pInputData->pIPETuningMetadata->IPEASFData.ASFConfig));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEASFPackedLUTOffline,
                                       DebugDataTagType::TuningASFLUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.ASFLUT,
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.ASFLUT));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEUpscalerPackedLUTOffline,
                                       DebugDataTagType::TuningUpscalerLUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.upscalerLUT,
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.upscalerLUT));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEGrainAdderPackedLUTOffline,
                                       DebugDataTagType::TuningGrainAdderLUT,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.grainAdderLUT,
                                       sizeof(pInputData->pIPETuningMetadata->IPEDMIData.packedLUT.grainAdderLUT));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPELTMExposureIndexOffline,
                                       DebugDataTagType::Float,
                                       CAMX_ARRAY_SIZE(pInputData->pIPETuningMetadata->IPELTMExposureData.exposureIndex),
                                       &pInputData->pIPETuningMetadata->IPELTMExposureData.exposureIndex,
                                       sizeof(pInputData->pIPETuningMetadata->IPELTMExposureData.exposureIndex));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEASFFaceOffline,
                                       DebugDataTagType::TuningFaceData,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEASFFaceDetection,
                                       sizeof(pInputData->pIPETuningMetadata->IPEASFFaceDetection));

        s_pDebugDataWriter->AddDataTag(DebugDataTagID::TuningIPEANRFaceOffline,
                                       DebugDataTagType::TuningFaceData,
                                       1,
                                       &pInputData->pIPETuningMetadata->IPEANRFaceDetection,
                                       sizeof(pInputData->pIPETuningMetadata->IPEANRFaceDetection));
    }
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::PostMetadata
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::PostMetadata(
    const ISPInputData* pInputData)
{
    CamxResult  result                               = CamxResultSuccess;
    const VOID* ppData[NumIPEMetadataOutputTags]     = { 0 };
    UINT        pDataCount[NumIPEMetadataOutputTags] = { 0 };
    UINT        index                                = 0;
    UINT        effectMap                            = 0;
    UINT        sceneMap                             = 0;
    UINT        modeMap                              = 0;

    ChiModeType              mode   = ChiModeType::Default;
    ChiModeEffectSubModeType effect = ChiModeEffectSubModeType::None;
    ChiModeSceneSubModeType  scene  = ChiModeSceneSubModeType::None;

    if (pInputData->pTuningData)
    {
        for (UINT i = 0; i < pInputData->pTuningData->noOfSelectionParameter; i++)
        {
            mode = pInputData->pTuningData->TuningMode[i].mode;
            if (ChiModeType::Effect == mode)
            {
                effect = pInputData->pTuningData->TuningMode[i].subMode.effect;
                effectMap = IPEEffectMap[static_cast<UINT>(effect)].to;
            }
            if (ChiModeType::Scene == mode)
            {
                scene = pInputData->pTuningData->TuningMode[i].subMode.scene;
                sceneMap = IPESceneMap[static_cast<UINT>(scene)].to;
            }
        }
    }

    // Get Control mode, Scene mode and effects mode from HAL tag
    static const UINT vendorTagsControlMode[] = { InputControlMode , InputControlSceneMode , InputControlEffectMode};
    const SIZE_T numTags                      = CAMX_ARRAY_SIZE(vendorTagsControlMode);
    VOID*        pData[numTags]                 = { 0 };
    UINT64       pDataModeOffset[numTags]     = { 0 };

    GetDataList(vendorTagsControlMode, pData, pDataModeOffset, numTags);

    if (NULL != pData[0])
    {
        Utils::Memcpy(&modeMap, pData[0], sizeof(modeMap));
    }

    if (NULL != pData[1])
    {
        Utils::Memcpy(&sceneMap, pData[1], sizeof(sceneMap));
    }

    if (NULL != pData[2])
    {
        Utils::Memcpy(&effectMap, pData[2], sizeof(effectMap));
    }

    pDataCount[index] = 1;
    ppData[index]     = &modeMap;
    index++;

    pDataCount[index] = 1;
    ppData[index]     = &effectMap;
    index++;

    pDataCount[index] = 1;
    ppData[index]     = &sceneMap;
    index++;

    pDataCount[index] = 1;
    ppData[index]     = &(pInputData->pCalculatedData->metadata.edgeMode);
    index++;

    pDataCount[index] = 1;
    ppData[index]     = &(pInputData->pHALTagsData->controlVideoStabilizationMode);
    index++;

    pDataCount[index] = 1;
    ppData[index]     = &(pInputData->pCalculatedData->metadata.colorCorrectionAberrationMode);
    index++;

    pDataCount[index] = 1;
    ppData[index]     = &(pInputData->pHALTagsData->noiseReductionMode);
    index++;

    pDataCount[index] = 1;
    ppData[index]     = &pInputData->pCalculatedData->toneMapData.tonemapMode;
    index++;

    pDataCount[index] = 1;
    ppData[index] = &pInputData->pCalculatedData->colorCorrectionMode;
    index++;

    pDataCount[index] = 3 * 3;
    ppData[index] = &pInputData->pCalculatedData->CCTransformMatrix;
    index++;

    if (pInputData->pCalculatedData->toneMapData.curvePoints > 0)
    {
        pDataCount[index] = pInputData->pCalculatedData->toneMapData.curvePoints;
        ppData[index]     = &pInputData->pCalculatedData->toneMapData.tonemapCurveBlue;
        index++;

        pDataCount[index] = pInputData->pCalculatedData->toneMapData.curvePoints;
        ppData[index]     = &pInputData->pCalculatedData->toneMapData.tonemapCurveGreen;
        index++;

        pDataCount[index] = pInputData->pCalculatedData->toneMapData.curvePoints;
        ppData[index]     = &pInputData->pCalculatedData->toneMapData.tonemapCurveRed;
        index++;

        WriteDataList(IPEMetadataOutputTags, ppData, pDataCount, NumIPEMetadataOutputTags);
    }
    else
    {
        WriteDataList(IPEMetadataOutputTags, ppData, pDataCount, NumIPEMetadataOutputTags-3);
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::ProgramIQConfig()
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::ProgramIQConfig(
    ISPInputData* pInputData)
{
    CamxResult      result        = CamxResultSuccess;
    UINT            count         = 0;
    UINT            path          = 0;
    UINT            index         = 0;

    IPEIQModuleData moduleData;

    // Call IQInterface to Set up the Trigger data
    Node* pBaseNode = this;
    if (TRUE == IsRealTime())
    {
        CAMX_LOG_INFO(CamxLogGroupPProc, "Realtime ...");
        IQInterface::IQSetupTriggerData(pInputData, pBaseNode, GetStaticSettings()->maxPipelineDelay, NULL);
    }
    else
    {
        CAMX_LOG_INFO(CamxLogGroupPProc, "Offline ...");
        IQInterface::IQSetupTriggerData(pInputData, pBaseNode, 0, NULL);
    }

    if (TRUE == GetHwContext()->GetStaticSettings()->enableIPEIQLogging)
    {
        CAMX_LOG_INFO(CamxLogGroupISP, "Begin of dumping IPE Trigger ------");
        IQInterface::s_interpolationTable.IQTriggerDataDump(&pInputData->triggerData);
        CAMX_LOG_INFO(CamxLogGroupISP, "End of dumping IPE Trigger ------");
    }

    for (count = 0; count < m_numIPEIQModulesEnabled; count++)
    {
        if (TRUE == m_adrcInfo.isADRCEnabled)
        {
            // Update AEC Gain values for ADRC use cases, before GTM(includes) will be triggered by shortGain,
            // betweem GTM & LTM(includes) will be by shortGain*power(DRCGain, gtm_perc) and post LTM will be
            // by shortGain*DRCGain
            IQInterface::UpdateAECGain(m_pEnabledIPEIQModule[count]->GetIQType(), pInputData, m_adrcInfo.percentageOfGTM);
        }

        result = m_pEnabledIPEIQModule[count]->Execute(pInputData);
        if (CamxResultSuccess != result)
        {
            CAMX_ASSERT_ALWAYS_MESSAGE("%s: Failed to Run IQ Config, count %d", __FUNCTION__, count);
            break;
        }

        m_pEnabledIPEIQModule[count]->GetModuleData(&moduleData);

        switch (m_pEnabledIPEIQModule[count]->GetIQType())
        {
            case ISPIQModuleType::IPELTM:
                m_preLTMLUTOffset[ProgramIndexLTM]      = m_pEnabledIPEIQModule[count]->GetLUTOffset();
                break;
            case ISPIQModuleType::IPEGamma:
                m_postLTMLUTOffset[ProgramIndexGLUT]    = m_pEnabledIPEIQModule[count]->GetLUTOffset();
                break;
            case ISPIQModuleType::IPE2DLUT:
                m_postLTMLUTOffset[ProgramIndex2DLUT]   = m_pEnabledIPEIQModule[count]->GetLUTOffset();
                break;
            case ISPIQModuleType::IPEASF:
                m_postLTMLUTOffset[ProgramIndexASF]     = m_pEnabledIPEIQModule[count]->GetLUTOffset();
                break;
            case ISPIQModuleType::IPEUpscaler:
                m_postLTMLUTOffset[ProgramIndexUpscale] = m_pEnabledIPEIQModule[count]->GetLUTOffset();
                break;
            case ISPIQModuleType::IPEGrainAdder:
                m_postLTMLUTOffset[ProgramIndexGRA]     = m_pEnabledIPEIQModule[count]->GetLUTOffset();
                break;
            case ISPIQModuleType::IPEANR:
                for (UINT passNum = PASS_NAME_FULL; passNum < PASS_NAME_MAX; passNum++)
                {
                    m_ANRPassOffset[passNum] = moduleData.offsetPass[passNum];
                }
                m_ANRSinglePassCmdBufferSize = moduleData.singlePassCmdLength;
                break;
            case ISPIQModuleType::IPEICA:
                path                  = moduleData.IPEPath;
                CAMX_ASSERT((path == IPEPath::REFERENCE) || (path == IPEPath::INPUT));
                index                 =
                    (path == REFERENCE) ? ProgramIndexICA2 : ProgramIndexICA1;
                m_ICALUTOffset[index] = m_pEnabledIPEIQModule[count]->GetLUTOffset();
                break;
            case ISPIQModuleType::IPETF:
                for (UINT passNum = PASS_NAME_FULL; passNum < PASS_NAME_MAX; passNum++)
                {
                    m_TFPassOffset[passNum] = moduleData.offsetPass[passNum];
                }
                m_TFSinglePassCmdBufferSize = moduleData.singlePassCmdLength;
                break;
            default:
                break;
        }
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::SetIQModuleNumLUT
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void IPENode::SetIQModuleNumLUT(
    ISPIQModuleType type,
    UINT            numLUTs,
    INT             path)
{
    UINT index = 0;
    switch (type)
    {
        case ISPIQModuleType::IPELTM:
            m_preLTMLUTCount[ProgramIndexLTM]      = numLUTs;
            break;
        case ISPIQModuleType::IPEGamma:
            m_postLTMLUTCount[ProgramIndexGLUT]    = numLUTs;
            break;
        case ISPIQModuleType::IPE2DLUT:
            m_postLTMLUTCount[ProgramIndex2DLUT]   = numLUTs;
            break;
        case ISPIQModuleType::IPEASF:
            m_postLTMLUTCount[ProgramIndexASF]     = numLUTs;
            break;
        case ISPIQModuleType::IPEUpscaler:
            m_postLTMLUTCount[ProgramIndexUpscale] = numLUTs;
            break;
        case ISPIQModuleType::IPEGrainAdder:
            m_postLTMLUTCount[ProgramIndexGRA]     = numLUTs;
            break;
        case ISPIQModuleType::IPEICA:
            index                =
                (path == (IPEPath::REFERENCE)) ? ProgramIndexICA2 : ProgramIndexICA1;
            m_ICALUTCount[index] = numLUTs;
            break;
        default:
            break;
    }
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::UpdateIQCmdSize
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void IPENode::UpdateIQCmdSize()
{
    IPEProcessingSection section    = IPEProcessingSection::IPEInvalidSection;
    UINT                 numLUTs    = 0;
    ISPIQModuleType      type;
    INT                  path       = -1;
    IPEIQModuleData      moduleData;

    for (UINT count = 0; count < m_numIPEIQModulesEnabled; count++)
    {
        numLUTs = m_pEnabledIPEIQModule[count]->GetNumLUT();
        type    = m_pEnabledIPEIQModule[count]->GetIQType();
        section = GetModuleProcessingSection(type);
        switch (section)
        {
            case IPEProcessingSection::IPEPPSPreLTM:
                m_maxCmdBufferSizeBytes[CmdBufferPreLTM]      += m_pEnabledIPEIQModule[count]->GetIQCmdLength();
                m_maxCmdBufferSizeBytes[CmdBufferDMIHeader]   += numLUTs *
                    cdm_get_cmd_header_size(CDMCmdDMI) * RegisterWidthInBytes;
                break;
            case IPEProcessingSection::IPEPPSPostLTM:
                m_maxCmdBufferSizeBytes[CmdBufferPostLTM]     += m_pEnabledIPEIQModule[count]->GetIQCmdLength();
                m_maxCmdBufferSizeBytes[CmdBufferDMIHeader]   += numLUTs *
                    cdm_get_cmd_header_size(CDMCmdDMI) * RegisterWidthInBytes;
                break;
            case IPEProcessingSection::IPENPS:
                m_maxCmdBufferSizeBytes[CmdBufferNPS]         +=
                    m_pEnabledIPEIQModule[count]->GetIQCmdLength();
                m_maxCmdBufferSizeBytes[CmdBufferDMIHeader]   +=
                    numLUTs * cdm_get_cmd_header_size(CDMCmdDMI) * RegisterWidthInBytes;
                m_pEnabledIPEIQModule[count]->GetModuleData(&moduleData);
                path                                           = moduleData.IPEPath;
                CAMX_ASSERT((path == IPEPath::REFERENCE) || (path == IPEPath::INPUT));
                break;
            default:
                CAMX_LOG_WARN(CamxLogGroupPProc, "%s: invalid module type %d", __FUNCTION__, type);
                break;
        }
        SetIQModuleNumLUT(type, numLUTs, path);
    }

}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::IsFDEnabled
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
BOOL IPENode::IsFDEnabled(
    VOID)
{
    static const UINT PropertiesFDettings[] =
    {
        InputStatisticsFaceDetectMode
    };

    static const UINT   Length                                   = CAMX_ARRAY_SIZE(PropertiesFDettings);
    VOID*               pData[Length]                            = { 0 };
    UINT64              propertyDataFDFrameSettingOffset[Length] = { 1 };

    GetDataList(PropertiesFDettings, pData, propertyDataFDFrameSettingOffset, Length);

    if (NULL != pData[0])
    {
        StatisticsFaceDetectModeValues fdEnable = {};
        Utils::Memcpy(&fdEnable, pData[0], sizeof(StatisticsFaceDetectModeValues));
        if (StatisticsFaceDetectModeValues::StatisticsFaceDetectModeOff != fdEnable)
        {
            return TRUE;
        }
    }
    return FALSE;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::SetDependencies
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::SetDependencies(
    NodeProcessRequestData*  pNodeRequestData,
    UINT                     parentNodeId,
    BOOL                     allOutputPortsAreLoopback)
{
    UINT32 count = 0;
    UINT32        metaTagFDRoi = 0;
    UINT32        result       = CamxResultSuccess;

    result = VendorTagManager::QueryVendorTagLocation(VendorTagSectionOEMFDResults,
           VendorTagNameOEMFDResults, &metaTagFDRoi);

    CAMX_LOG_VERBOSE(CamxLogGroupPProc, "IPE: ProcessRequest: Setting dependency for Req#%llu",
                     pNodeRequestData->pCaptureRequest->requestId);

    if (parentNodeId == IFE)
    {
        UINT32 metaTagAppliedCrop = 0;
        if (CamxResultSuccess == VendorTagManager::QueryVendorTagLocation("org.quic.camera.ifecropinfo",
                                "AppliedCrop", &metaTagAppliedCrop))
        {
            pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = metaTagAppliedCrop;
        }
    }

    if (TRUE == m_isStatsNodeAvailable)
    {
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Set dependency for real time pipeline");

        // 3A dependency
        pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDAECFrameControl;
        pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDAWBFrameControl;
        pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDAECStatsControl;
        pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDAWBStatsControl;
        pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDAFStatsControl;



        if (parentNodeId == IFE)
        {
            // IFE dependency
            pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDIFEDigitalZoom;
            pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDIFEScaleOutput;
            pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDIFEGammaOutput;

            if (TRUE == m_FOVCEnabled)
            {
                pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDFOVCFrameInfo;
            }

            if ((result == CamxResultSuccess) && IsFDEnabled() &&
                ((pNodeRequestData->pCaptureRequest->requestId) > GetStaticSettings()->minReqFdDependency))
            {
                pNodeRequestData->dependencyInfo[0].propertyDependency.offsets[count] = 1;
                pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = metaTagFDRoi;
            }

        }
        else if (IsNodeInPipeline(BPS))
        {
            pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDBPSGammaOutput;
        }
        else
        {
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "parent Node is not IFE in real time pipeline id %d", parentNodeId);
        }
    }
    else
    {
        if (TRUE == m_OEMStatsSettingEnable)
        {
            if (parentNodeId == IFE)
            {
                pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDIFEDigitalZoom;
                pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDIFEGammaOutput;
            }
            else if (parentNodeId == BPS)
            {
                pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDBPSGammaOutput;
            }
            else
            {
                CAMX_LOG_VERBOSE(CamxLogGroupPProc, "parent Node is not IFE/BPS in OEMSetting pipeline id %d", parentNodeId);
            }
        }
        else
        {
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Set dependency for none real time pipeline");

            if (IsNodeInPipeline(BPS))
            {
                // BPS dependency
                pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = PropertyIDBPSGammaOutput;
            }
        }
    }

    // Don't need to add parent check, because ipe just use this property when parent node is chinode.
    if (TRUE == m_instanceProperty.enableCHICropInfoPropertyDependency)
    {
        UINT32 metaTag = 0;
        if (CDKResultSuccess == VendorTagManager::QueryVendorTagLocation("com.qti.cropregions",
                "ChiNodeResidualCrop", &metaTag))
        {
            pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] = metaTag;
        }
    }

    if (0 < count)
    {
        /// @todo (CAMX-739) Need to add AWB update support and other dependency here
        pNodeRequestData->dependencyInfo[0].propertyDependency.count = count;
    }

    // Set Dependency for ADRC Info.
    SetADRCDependencies(pNodeRequestData);

    // ICA dependency needed for offline pipeline in case of MFNR / MFSR uscases
    SetICADependencies(pNodeRequestData, allOutputPortsAreLoopback);

    if (0 < pNodeRequestData->dependencyInfo[0].propertyDependency.count)
    {
        pNodeRequestData->dependencyInfo[0].dependencyFlags.hasPropertyDependency = TRUE;
    }

    // ExecuteProcessRequest always requires sequenceId 1, purposefully reporting dep regardless of having a dependency or not
    pNodeRequestData->numDependencyLists                  = 1;
    pNodeRequestData->dependencyInfo[0].processSequenceId = 1;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::InitializeStripingParams
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::InitializeStripingParams(
    IpeConfigIoData* pConfigIOData)
{
    CamxResult     result       = CamxResultSuccess;
    ResourceParams params       = { 0 };
    UINT32         titanVersion = static_cast<Titan17xContext *>(GetHwContext())->GetTitanVersion();
    UINT32         hwVersion    = static_cast<Titan17xContext *>(GetHwContext())->GetHwVersion();

    CAMX_ASSERT(NULL != pConfigIOData);

    // Check if striping in UMD is enabled before creating striping library context
    if (m_capability.swStriping)
    {
        result = IPEStripingLibraryContextCreate(pConfigIOData,
                                                 NULL,
                                                 titanVersion,
                                                 hwVersion,
                                                 &m_hStripingLib,
                                                 &m_maxCmdBufferSizeBytes[CmdBufferStriping],
                                                 &m_maxCmdBufferSizeBytes[CmdBufferBLMemory]);

        if (CamxResultSuccess != result)
        {
            CAMX_LOG_ERROR(CamxLogGroupPProc, "Stripinglib ctxt failed result %d,ConfigIO %p,titanversion 0x%x,hwversion 0x%x",
                               result, pConfigIOData, titanVersion, hwVersion);
        }

        /// @todo (CAMX-1270) Make inline function for populating params
        if (CamxResultSuccess == result)
        {
            params                              = { 0 };
            params.resourceSize                 = m_maxCmdBufferSizeBytes[CmdBufferStriping];
            params.poolSize                     = m_IPECmdBlobCount * params.resourceSize;
            params.usageFlags.cmdBuffer         = 1;
            params.cmdParams.type               = CmdType::FW;
            params.alignment                    = CamxCommandBufferAlignmentInBytes;
            // Striping command buffer will not have any nested address
            params.cmdParams.enableAddrPatching = 0;
            params.cmdParams.maxNumNestedAddrs  = 0;
            params.memFlags                     = CSLMemFlagUMDAccess;
            params.pDeviceIndices               = &m_deviceIndex;
            params.numDevices                   = 1;

            result = CreateCmdBufferManager(&params, &m_pIPECmdBufferManager[CmdBufferStriping]);
        }

        if (CamxResultSuccess == result)
        {
            params                              = { 0 };
            params.resourceSize                 = m_maxCmdBufferSizeBytes[CmdBufferBLMemory];
            params.poolSize                     = m_IPECmdBlobCount * params.resourceSize;
            params.usageFlags.cmdBuffer         = 1;
            params.cmdParams.type               = CmdType::FW;
            params.alignment                    = CamxCommandBufferAlignmentInBytes;
            // Striping command buffer will not have any nested address
            params.cmdParams.enableAddrPatching = 0;
            params.cmdParams.maxNumNestedAddrs  = 0;
            params.memFlags                     = (TRUE == m_enableIPEHangDump) ? CSLMemFlagUMDAccess : 0;
            params.pDeviceIndices               = &m_deviceIndex;
            params.numDevices                   = 1;

            result = CreateCmdBufferManager(&params, &m_pIPECmdBufferManager[CmdBufferBLMemory]);
        }
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::FillStripingParams
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::FillStripingParams(
    IpeFrameProcessData*         pFrameProcessData,
    IpeIQSettings*               pIPEIQsettings,
    CmdBuffer**                  ppIPECmdBuffer,
    CSLICPClockBandwidthRequest* pICPClockBandwidthRequest)
{
    CamxResult                     result          = CamxResultSuccess;
    IPEStripingLibExecuteParams    stripeParams    = { 0 };
    UINT32*                        pStripeMem      = NULL;
    UINT32                         offset;
    IPEStripingLibExecuteMetaData metaDataBuffer   = { 0 };

    CAMX_ASSERT(NULL != ppIPECmdBuffer[CmdBufferStriping]);
    pStripeMem = reinterpret_cast<UINT32*>(
        ppIPECmdBuffer[CmdBufferStriping]->BeginCommands(m_maxCmdBufferSizeBytes[CmdBufferStriping] / 4));

    if (NULL != pStripeMem)
    {
        stripeParams.iq                 = pIPEIQsettings;
        stripeParams.ica1               = &pIPEIQsettings->ica1Parameters;
        stripeParams.ica2               = &pIPEIQsettings->ica2Parameters;
        stripeParams.zoom               = &pIPEIQsettings->ica1Parameters.zoomWindow;
        stripeParams.prevZoom           = &pIPEIQsettings->ica2Parameters.zoomWindow;
        stripeParams.maxNumOfCoresToUse = pFrameProcessData->maxNumOfCoresToUse;

        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "## rt %d blob %u processingType %d, ica1 pers %u, grid %u, ica2 persp %u, grid %u"
                        "anr %d %d %d %d, tf %d, %d, %d %d, cac %d, ltm %d, cc %d glut %d lut %d, chromaEn %d,chromasup %d"
                        "skin %d, asf %d, gra %d, refine %d, %d, %d",
                        IsRealTime(), m_IPECmdBlobCount, m_instanceProperty.processingType,
                        pIPEIQsettings->ica1Parameters.isPerspectiveEnable,
                        pIPEIQsettings->ica1Parameters.isGridEnable,
                        pIPEIQsettings->ica2Parameters.isPerspectiveEnable,
                        pIPEIQsettings->ica2Parameters.isGridEnable,
                        pIPEIQsettings->anrParameters.parameters[0].moduleCfg.EN,
                        pIPEIQsettings->anrParameters.parameters[1].moduleCfg.EN,
                        pIPEIQsettings->anrParameters.parameters[2].moduleCfg.EN,
                        pIPEIQsettings->anrParameters.parameters[3].moduleCfg.EN,
                        pIPEIQsettings->tfParameters.parameters[PASS_NAME_FULL].moduleCfg.EN,
                        pIPEIQsettings->tfParameters.parameters[PASS_NAME_DC_4].moduleCfg.EN,
                        pIPEIQsettings->tfParameters.parameters[PASS_NAME_DC_16].moduleCfg.EN,
                        pIPEIQsettings->tfParameters.parameters[PASS_NAME_DC_64].moduleCfg.EN,
                        pIPEIQsettings->cacParameters.moduleCfg.EN,
                        pIPEIQsettings->ltmParameters.moduleCfg.EN,
                        pIPEIQsettings->colorCorrectParameters.moduleCfg.EN,
                        pIPEIQsettings->glutParameters.moduleCfg.EN,
                        pIPEIQsettings->lut2dParameters.moduleCfg.EN,
                        pIPEIQsettings->chromaEnhancementParameters.moduleCfg.EN,
                        pIPEIQsettings->chromaSupressionParameters.moduleCfg.EN,
                        pIPEIQsettings->skinEnhancementParameters.moduleCfg.EN,
                        pIPEIQsettings->asfParameters.moduleCfg.EN,
                        pIPEIQsettings->graParameters.moduleCfg.EN,
                        pIPEIQsettings->refinementParameters.dc[0].refinementCfg.TRENABLE,
                        pIPEIQsettings->refinementParameters.dc[1].refinementCfg.TRENABLE,
                        pIPEIQsettings->refinementParameters.dc[2].refinementCfg.TRENABLE);

        result = IPEStripingLibraryExecute(m_hStripingLib, &stripeParams, pStripeMem, &metaDataBuffer);
        if (CamxResultSuccess == result)
        {
            offset =
                static_cast<UINT32>(offsetof(IpeFrameProcess, cmdData)) +
                static_cast<UINT32>(offsetof(IpeFrameProcessData, stripingLibOutAddr));
            result = ppIPECmdBuffer[CmdBufferFrameProcess]->AddNestedCmdBufferInfo(
                offset, ppIPECmdBuffer[CmdBufferStriping], 0);
            pICPClockBandwidthRequest->frameCycles = metaDataBuffer.pixelCount;
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Num pixels = %d", metaDataBuffer.pixelCount);
        }
        else
        {
            CAMX_LOG_ERROR(CamxLogGroupPProc, "Striping Library execution failed %d", result);
        }
    }
    else
    {
        CAMX_LOG_ERROR(CamxLogGroupPProc, "Invalid Striping memory");
        result = CamxResultENoMemory;
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::PatchBLMemoryBuffer
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::PatchBLMemoryBuffer(
    IpeFrameProcessData* pFrameProcessData,
    CmdBuffer**          ppIPECmdBuffer)
{
    CamxResult result = CamxResultSuccess;

    if ((NULL != ppIPECmdBuffer[CmdBufferBLMemory]) && (0 != m_maxCmdBufferSizeBytes[CmdBufferBLMemory]))
    {
        pFrameProcessData->cdmBufferSize = m_maxCmdBufferSizeBytes[CmdBufferBLMemory];

        UINT offset =
            static_cast<UINT32>(offsetof(IpeFrameProcess, cmdData)) +
            static_cast<UINT32>(offsetof(IpeFrameProcessData, cdmBufferAddress));

        result = ppIPECmdBuffer[CmdBufferFrameProcess]->AddNestedCmdBufferInfo(offset,
                                                                               ppIPECmdBuffer[CmdBufferBLMemory],
                                                                               0);
    }

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::GetFaceROI
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::GetFaceROI(
    ISPInputData* pInputData,
    UINT          parentNodeId)
{
    CamxResult    result                      = CamxResultSuccess;
    FaceROIInformation faceRoiData            = {};
    RectangleCoordinate* pRoiRect             = NULL;
    CHIRectangle  roiRect                     = {};
    IFECropInfo   cropInfo                    = {};
    CHIDimension  referenceFrameDimension     = {};
    CHIDimension  currentFrameDimension       = {};
    CHIRectangle  currentFrameMapWrtReference = {};
    CHIRectangle  roiWrtReferenceFrame        = {};
    UINT32        metaTagAppliedCrop          = 0;
    UINT32        metaTagFDRoi                = 0;

    result = VendorTagManager::QueryVendorTagLocation(VendorTagSectionOEMFDResults,
           VendorTagNameOEMFDResults, &metaTagFDRoi);

    if (CamxResultSuccess == result)
    {
        result = VendorTagManager::QueryVendorTagLocation("org.quic.camera.ifecropinfo",
               "AppliedCrop", &metaTagAppliedCrop);
    }
    else
    {
        CAMX_LOG_VERBOSE(CamxLogGroupCPP, "query FD vendor failed result %d",
            result);
        return CamxResultSuccess;
    }

    static UINT GetProps[] =
    {
        metaTagFDRoi,
        metaTagAppliedCrop,
    };

    GetProps[0] = metaTagFDRoi | InputMetadataSectionMask;

    static const UINT GetPropsLength          = CAMX_ARRAY_SIZE(GetProps);
    VOID*             pData[GetPropsLength]   = { 0 };
    UINT64            offsets[GetPropsLength] = { 0 , 0 };

    offsets[0] = (BPS == parentNodeId) ? 0 : 2;
    GetDataList(GetProps, pData, offsets, GetPropsLength);

    if (NULL != pData[0])
    {
        Utils::Memcpy(&faceRoiData, pData[0], sizeof(FaceROIInformation));

        // Translate face roi if IFE is parent. If BPS is parent then don't translate
        if (IFE == parentNodeId)
        {
            if (NULL != pData[1])
            {
                Utils::Memcpy(&cropInfo, pData[1], sizeof(IFECropInfo));
                currentFrameMapWrtReference.top    = cropInfo.fullPath.top;
                currentFrameMapWrtReference.left   = cropInfo.fullPath.left;
                currentFrameMapWrtReference.width  = cropInfo.fullPath.width;
                currentFrameMapWrtReference.height = cropInfo.fullPath.height;

                CAMX_LOG_VERBOSE(CamxLogGroupCPP, "currentFrameWrtReference T:%d L:%d W:%d H:%d",
                    currentFrameMapWrtReference.top, currentFrameMapWrtReference.left,
                    currentFrameMapWrtReference.width, currentFrameMapWrtReference.height);
            }
            else
            {
                CAMX_LOG_ERROR(CamxLogGroupCPP, "no applied crop data");
                return CamxResultEFailed;
            }

            const SensorMode* pSenserData = NULL;
            GetSensorModeData(&pSenserData);

            if (NULL != pSenserData)
            {
                referenceFrameDimension.height = pSenserData->resolution.outputHeight;
                referenceFrameDimension.width  = pSenserData->resolution.outputWidth;

                CAMX_LOG_VERBOSE(CamxLogGroupCPP, "reference W:%d H:%d",
                    referenceFrameDimension.width, referenceFrameDimension.height);
            }
            else
            {
                CAMX_LOG_ERROR(CamxLogGroupCPP, "no ref crop data");
                return CamxResultEFailed;
            }

            // Input width/height
            currentFrameDimension.width  = m_fullInputWidth;
            currentFrameDimension.height = m_fullInputHeight;
            CAMX_LOG_VERBOSE(CamxLogGroupCPP, "current dim W:%d H:%d",
                currentFrameDimension.width, currentFrameDimension.height);

        }

        pInputData->fDData.numberOfFace = static_cast<UINT16>(
            (faceRoiData.ROICount > MAX_FACE_NUM) ? MAX_FACE_NUM : faceRoiData.ROICount);

        CAMX_LOG_VERBOSE(CamxLogGroupCPP, "Face ROI is published face num %d max %d",
            pInputData->fDData.numberOfFace, MAX_FACE_NUM);

        for (UINT16 i = 0; i<pInputData->fDData.numberOfFace; i++)
        {
            pRoiRect = &faceRoiData.unstabilizedROI[i].faceRect;

            roiWrtReferenceFrame.left   = pRoiRect->left;
            roiWrtReferenceFrame.top    = pRoiRect->top;
            roiWrtReferenceFrame.width  = pRoiRect->width;
            roiWrtReferenceFrame.height = pRoiRect->height;

            roiRect = Translator::ConvertROIFromReferenceToCurrent(
                &referenceFrameDimension, &currentFrameDimension,
                &currentFrameMapWrtReference, &roiWrtReferenceFrame);

            pInputData->fDData.faceCenterX[i] = static_cast<INT16>(roiRect.left + (roiRect.width / 2));
            pInputData->fDData.faceCenterY[i] = static_cast<INT16>(roiRect.top + (roiRect.height / 2));
            pInputData->fDData.faceRadius[i]  = static_cast<INT16>(Utils::MinUINT32(roiRect.width, roiRect.height));

            CAMX_LOG_VERBOSE(CamxLogGroupCPP, " center x:%d y:%d r:%d",
                pInputData->fDData.faceCenterX[i], pInputData->fDData.faceCenterY[i], pInputData->fDData.faceRadius[i]);
        }
    }
    else
    {
        CAMX_LOG_WARN(CamxLogGroupCPP, "Face ROI is not published");
    }
    return CamxResultSuccess;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::SetAAAInputData()
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::SetAAAInputData(
    ISPInputData* pInputData,
    UINT          parentNodeID)
{
    VOID*   pData[IPEVendorTagMax]   = { 0 };
    UINT64  offsets[IPEVendorTagMax] = { 0 };

    if (TRUE == m_isStatsNodeAvailable)
    {
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Get 3A properties for realtime pipeline");

        // Note, the property order should matched what defined in the enum IPEVendorTagId
        static const UINT Properties3A[] =
        {
            PropertyIDAECStatsControl,
            PropertyIDAWBStatsControl,
            PropertyIDAFStatsControl,
            PropertyIDAECFrameControl,
            PropertyIDAWBFrameControl,
        };
        static const UINT PropertySize = CAMX_ARRAY_SIZE(Properties3A);
        GetDataList(Properties3A, pData, offsets, PropertySize);
    }
    else
    {
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Get 3A properties for non realtime pipeline");
        static const UINT Properties3A[] =
        {
            PropertyIDAECStatsControl | InputMetadataSectionMask,
            PropertyIDAWBStatsControl | InputMetadataSectionMask,
            PropertyIDAFStatsControl  | InputMetadataSectionMask,
            PropertyIDAECFrameControl | InputMetadataSectionMask,
            PropertyIDAWBFrameControl | InputMetadataSectionMask,
        };
        static const UINT PropertySize = CAMX_ARRAY_SIZE(Properties3A);
        GetDataList(Properties3A, pData, offsets, PropertySize);
    }

    CAMX_ASSERT(NULL != pData[IPEVendorTagAECStats]);
    CAMX_ASSERT(NULL != pInputData->pAECStatsUpdateData);
    Utils::Memcpy(pInputData->pAECStatsUpdateData, pData[IPEVendorTagAECStats], sizeof(AECStatsControl));

    CAMX_ASSERT(NULL != pData[IPEVendorTagAWBStats]);
    CAMX_ASSERT(NULL != pInputData->pAWBStatsUpdateData);
    Utils::Memcpy(pInputData->pAWBStatsUpdateData, pData[IPEVendorTagAWBStats], sizeof(AWBStatsControl));

    CAMX_ASSERT(NULL != pData[IPEVendorTagAFStats]);
    CAMX_ASSERT(NULL != pInputData->pAFStatsUpdateData);
    Utils::Memcpy(pInputData->pAFStatsUpdateData, pData[IPEVendorTagAFStats], sizeof(AFStatsControl));

    CAMX_ASSERT(NULL != pData[IPEVendorTagAECFrame]);
    CAMX_ASSERT(NULL != pInputData->pAECUpdateData);
    Utils::Memcpy(pInputData->pAECUpdateData, pData[IPEVendorTagAECFrame], sizeof(AECFrameControl));

    CAMX_ASSERT(NULL != pData[IPEVendorTagAWBFrame]);
    CAMX_ASSERT(NULL != pInputData->pAWBUpdateData);
    Utils::Memcpy(pInputData->pAWBUpdateData, pData[IPEVendorTagAWBFrame], sizeof(AWBFrameControl));

    pInputData->parentNodeID = parentNodeID;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::HardcodeSettings
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::HardcodeSettings(
    ISPInputData* pInputData)
{
    pInputData->pipelineIPEData.hasTFRefInput             = 0;
    pInputData->pipelineIPEData.numOfFrames               = 2;
    pInputData->pipelineIPEData.isDigitalZoomEnabled      = 0;
    pInputData->pipelineIPEData.upscalingFactorMFSR       = 1.0f;
    pInputData->pipelineIPEData.digitalZoomStartX         = 0;
    pInputData->pipelineIPEData.digitalZoomStartY         = 0;
    pInputData->pipelineIPEData.pWarpGeometryData         = NULL;
    pInputData->lensPosition                              = 1.0f;
    pInputData->lensZoom                                  = 1.0f;
    pInputData->preScaleRatio                             = 1.0f;
    pInputData->postScaleRatio                            = 1.0f;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::SetICADependencies
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::SetICADependencies(
    NodeProcessRequestData*  pNodeRequestData,
    BOOL                     allOutputPortsAreLoopback)
{
    // @todo (CAMX-2690) Provide this input based on usecase local variable setting to zero for now
    UINT32 count         = pNodeRequestData->dependencyInfo[0].propertyDependency.count;

    if (((IPEProcessingType::IPEMFNRBlend == m_instanceProperty.processingType) ||
        (IPEProcessingType::IPEMFNRPostfilter == m_instanceProperty.processingType)) &&
        (IPEProfileId::IPEProfileIdNPS == m_instanceProperty.profileId))
    {
        pNodeRequestData->dependencyInfo[0].dependencyFlags.hasPropertyDependency = TRUE;
        pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count]  = m_IPEICATAGLocation[0];
        count++;
    }

    if ((0 != (IPEStabilizationType::IPEStabilizationMCTF & m_instanceProperty.stabilizationType)) &&
        (GetHwContext()->GetStaticSettings()->enableMCTF))
    {
        pNodeRequestData->dependencyInfo[0].dependencyFlags.hasPropertyDependency = TRUE;
        pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count]  = m_IPEICATAGLocation[6];
        count++;
    }

    if ((0 != (IPEStabilizationType::IPEStabilizationTypeEIS2 & m_instanceProperty.stabilizationType)) &&
        (FALSE == allOutputPortsAreLoopback))
    {
        pNodeRequestData->dependencyInfo[0].dependencyFlags.hasPropertyDependency = TRUE;
        pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count]  = m_IPEICATAGLocation[0];
        count++;
        //  Use grid flag and stabilization type to enable this property
        if (TRUE == static_cast<Titan17xContext*>(GetHwContext())->GetTitan17xSettingsManager()->
            GetTitan17xStaticSettings()->enableICAInGrid)
        {
            pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count] = m_IPEICATAGLocation[1];
            count++;
        }

        // Commented out for EIS only. Might be needed for EIS + MCTF
        // pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count]  = m_IPEICATAGLocation[3];
        // count++;
    }

    if ((0 != (IPEStabilizationType::IPEStabilizationTypeEIS3 & m_instanceProperty.stabilizationType)) &&
        (FALSE == allOutputPortsAreLoopback))
    {
        pNodeRequestData->dependencyInfo[0].dependencyFlags.hasPropertyDependency = TRUE;
        pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count]  = m_IPEICATAGLocation[7];
        count++;

        //  Use grid flag and stabilization type to enable this property
        if (TRUE == static_cast<Titan17xContext*>(GetHwContext())->GetTitan17xSettingsManager()->
            GetTitan17xStaticSettings()->enableICAInGrid)
        {
            pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count] = m_IPEICATAGLocation[8];
            count++;
        }

        // Commented out for EIS only. Might be needed for EIS + MCTF
        // pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count]  = m_IPEICATAGLocation[7];
        // count++;
    }

    if (TRUE == m_OEMICASettingEnable)
    {
        pNodeRequestData->dependencyInfo[0].dependencyFlags.hasPropertyDependency = TRUE;
        pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count]  = m_IPEICATAGLocation[1];
        count++;

        pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count]  = m_IPEICATAGLocation[4];
        count++;
    }
    pNodeRequestData->dependencyInfo[0].propertyDependency.count = count;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::SetADRCDependencies
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::SetADRCDependencies(
    NodeProcessRequestData*  pNodeRequestData)
{
    UINT32 count     = pNodeRequestData->dependencyInfo[0].propertyDependency.count;
    if (TRUE == IsNodeInPipeline(IFE))
    {
        pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] =
            PropertyIDIFEADRCInfoOutput;
    }
    else if (TRUE == IsNodeInPipeline(BPS))
    {
        pNodeRequestData->dependencyInfo[0].propertyDependency.properties[count++] =
            PropertyIDBPSADRCInfoOutput;
    }
    else
    {
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Don't need Set ADRC Dependency");
    }
    pNodeRequestData->dependencyInfo[0].propertyDependency.count = count;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::UpdateICADependencies
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::UpdateICADependencies(
    ISPInputData* pInputData)
{
    CamxResult     result                               = CamxResultSuccess;
    // Perspective transform dependency for ICA1
    VOID*          pPropertyDataICA[IPEProperties]      = { 0 };
    UINT64         propertyDataICAOffset[IPEProperties] = { 0 };
    UINT           length                               = CAMX_ARRAY_SIZE(m_IPEICATAGLocation);;

    CAMX_ASSERT(length == IPEProperties);

    GetDataList(m_IPEICATAGLocation, pPropertyDataICA, propertyDataICAOffset, length);

    if (NULL != pPropertyDataICA[0])
    {
        if ((IPEProcessingType::IPEMFNRPostfilter == m_instanceProperty.processingType) &&
            (IPEProfileId::IPEProfileIdDefault == m_instanceProperty.profileId))
        {
            CAMX_LOG_INFO(CamxLogGroupPProc, "Skip ICA transforms for MFNR  NPS Postfilter final stage");
        }
        else if (0 != (IPEStabilizationType::IPEStabilizationTypeEIS3 & m_instanceProperty.stabilizationType))
        {
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "Skip EISv2 ICA transforms for EISv3");
        }
        else
        {
            Utils::Memcpy(&pInputData->ICAConfigData.ICAInPerspectiveParams,
                reinterpret_cast<IPEICAPerspectiveTransform*>(pPropertyDataICA[0]),
                sizeof(pInputData->ICAConfigData.ICAInPerspectiveParams));

            CAMX_LOG_INFO(CamxLogGroupPProc, "perspective IN %d , w %d, h %d, r %d, c %d",
                          pInputData->ICAConfigData.ICAInPerspectiveParams.perspectiveTransformEnable,
                          pInputData->ICAConfigData.ICAInPerspectiveParams.transformDefinedOnWidth,
                          pInputData->ICAConfigData.ICAInPerspectiveParams.transformDefinedOnHeight,
                          pInputData->ICAConfigData.ICAInPerspectiveParams.perspetiveGeometryNumolumns,
                          pInputData->ICAConfigData.ICAInPerspectiveParams.perspectiveGeometryNumRows);
        }

    }

    if (NULL != pPropertyDataICA[1])
    {
        if ((0 != (IPEStabilizationType::IPEStabilizationTypeEIS2 & m_instanceProperty.stabilizationType)) &&
            (TRUE == static_cast<Titan17xContext*>(
             GetHwContext())->GetTitan17xSettingsManager()->GetTitan17xStaticSettings()->enableICAInGrid))
        {
            Utils::Memcpy(&pInputData->ICAConfigData.ICAInGridParams,
                reinterpret_cast<IPEICAGridTransform*>(pPropertyDataICA[1]),
                sizeof(pInputData->ICAConfigData.ICAInGridParams));

            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "grid IN %d , w %d, h %d, corner  %d",
                             pInputData->ICAConfigData.ICAInGridParams.gridTransformEnable,
                             pInputData->ICAConfigData.ICAInGridParams.transformDefinedOnWidth,
                             pInputData->ICAConfigData.ICAInGridParams.transformDefinedOnHeight,
                             pInputData->ICAConfigData.ICAInGridParams.gridTransformArrayExtrapolatedCorners);
        }
    }

    if (NULL != pPropertyDataICA[2])
    {
        Utils::Memcpy(&pInputData->ICAConfigData.ICAInInterpolationParams,
            reinterpret_cast<IPEICAInterpolationParams*>(pPropertyDataICA[2]),
            sizeof(pInputData->ICAConfigData.ICAInInterpolationParams));
    }

    if (NULL != pPropertyDataICA[3])
    {
        Utils::Memcpy(&pInputData->ICAConfigData.ICARefPerspectiveParams,
            reinterpret_cast<IPEICAPerspectiveTransform*>(pPropertyDataICA[3]),
            sizeof(pInputData->ICAConfigData.ICARefPerspectiveParams));
    }

    if (NULL != pPropertyDataICA[4])
    {
        if (TRUE == static_cast<Titan17xContext*>(
            GetHwContext())->GetTitan17xSettingsManager()->GetTitan17xStaticSettings()->enableICARefGrid)
        {
            Utils::Memcpy(&pInputData->ICAConfigData.ICARefGridParams,
                reinterpret_cast<IPEICAGridTransform*>(pPropertyDataICA[4]),
                sizeof(pInputData->ICAConfigData.ICARefGridParams));
        }
    }

    if (NULL != pPropertyDataICA[5])
    {
        Utils::Memcpy(&pInputData->ICAConfigData.ICARefInterpolationParams,
            reinterpret_cast<IPEICAInterpolationParams*>(pPropertyDataICA[5]),
            sizeof(pInputData->ICAConfigData.ICARefInterpolationParams));
    }

    if (NULL != pPropertyDataICA[6])
    {
        Utils::Memcpy(&pInputData->ICAConfigData.ICAReferenceParams,
            reinterpret_cast<IPEICAPerspectiveTransform*>(pPropertyDataICA[6]),
            sizeof(pInputData->ICAConfigData.ICAReferenceParams));
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "perspective REF %d , w %d, h %d, r %d, c %d",
                         pInputData->ICAConfigData.ICAReferenceParams.perspectiveTransformEnable,
                         pInputData->ICAConfigData.ICAReferenceParams.transformDefinedOnWidth,
                         pInputData->ICAConfigData.ICAReferenceParams.transformDefinedOnHeight,
                         pInputData->ICAConfigData.ICAReferenceParams.perspetiveGeometryNumolumns,
                         pInputData->ICAConfigData.ICAReferenceParams.perspectiveGeometryNumRows);
    }

    if (NULL != pPropertyDataICA[7])
    {

        if (0 != (IPEStabilizationType::IPEStabilizationTypeEIS3 & m_instanceProperty.stabilizationType))
        {
            Utils::Memcpy(&pInputData->ICAConfigData.ICAInPerspectiveParams,
                          reinterpret_cast<IPEICAPerspectiveTransform*>(pPropertyDataICA[7]),
                          sizeof(pInputData->ICAConfigData.ICAInPerspectiveParams));

            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "perspective lookahead %d , w %d, h %d, r %d, c %d frameNum %llu",
                             pInputData->ICAConfigData.ICAInPerspectiveParams.perspectiveTransformEnable,
                             pInputData->ICAConfigData.ICAInPerspectiveParams.transformDefinedOnWidth,
                             pInputData->ICAConfigData.ICAInPerspectiveParams.transformDefinedOnHeight,
                             pInputData->ICAConfigData.ICAInPerspectiveParams.perspetiveGeometryNumolumns,
                             pInputData->ICAConfigData.ICAInPerspectiveParams.perspectiveGeometryNumRows,
                             pInputData->frameNum);
        }
    }

    if (NULL != pPropertyDataICA[8])
    {
        if ((0 != (IPEStabilizationType::IPEStabilizationTypeEIS3 & m_instanceProperty.stabilizationType)) &&
           (TRUE == static_cast<Titan17xContext*>(GetHwContext())->GetTitan17xSettingsManager()->
                GetTitan17xStaticSettings()->enableICAInGrid))
        {
            Utils::Memcpy(&pInputData->ICAConfigData.ICAInGridParams,
                          reinterpret_cast<IPEICAGridTransform*>(pPropertyDataICA[8]),
                          sizeof(pInputData->ICAConfigData.ICAInGridParams));
            CAMX_LOG_VERBOSE(CamxLogGroupPProc, "grid lookahead %d , w %d, h %d, corner  %d",
                             pInputData->ICAConfigData.ICAInGridParams.gridTransformEnable,
                             pInputData->ICAConfigData.ICAInGridParams.transformDefinedOnWidth,
                             pInputData->ICAConfigData.ICAInGridParams.transformDefinedOnHeight,
                             pInputData->ICAConfigData.ICAInGridParams.gridTransformArrayExtrapolatedCorners);
        }
    }

    // Update margins in pixels
    pInputData->pipelineIPEData.marginDimension.widthPixels = m_stabilizationMargin.widthPixels;
    pInputData->pipelineIPEData.marginDimension.heightLines = m_stabilizationMargin.heightLines;

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::PublishICADependencies
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::PublishICADependencies(
    NodeProcessRequestData* pNodeRequestData)
{

    const VOID*       pData[2]      = { 0 };
    UINT              pDataCount[2] = { 0 };
    CamxResult        result        = CamxResultSuccess;
    UINT              i             = 0;


    // Example as to how perspective and grid are posted
    if ((IPEProcessingType::IPEMFNRBlend                == m_instanceProperty.processingType)          ||
        (IPEProcessingType::IPEMFNRPostfilter           == m_instanceProperty.processingType)          ||
        (0 != (IPEStabilizationType::IPEStabilizationTypeEIS2 & m_instanceProperty.stabilizationType)) ||
        (0 != (IPEStabilizationType::IPEStabilizationTypeEIS3 & m_instanceProperty.stabilizationType)))
    {
        CAMX_UNREFERENCED_PARAM(pNodeRequestData);
        IPEICAPerspectiveTransform transform;
        {
            transform.perspectiveConfidence       = 1;
            transform.perspectiveGeometryNumRows  = 1;
            transform.perspetiveGeometryNumolumns = 1;
            transform.perspectiveTransformEnable  = 1;
            transform.ReusePerspectiveTransform   = 0;
            transform.transformDefinedOnWidth     = m_fullInputWidth;
            transform.transformDefinedOnHeight    = m_fullInputHeight;
            Utils::Memcpy(&transform.perspectiveTransformArray,
                          perspArray, sizeof(perspArray));
        }
        pData[i]      = &transform;
        pDataCount[i] = sizeof(transform);
        i++;

    }

    if ((0 != (IPEStabilizationType::IPEStabilizationTypeEIS2 & m_instanceProperty.stabilizationType)) ||
        (0 != (IPEStabilizationType::IPEStabilizationTypeEIS3 & m_instanceProperty.stabilizationType)))
    {
        IPEICAGridTransform gridTransform;
        {
            gridTransform.gridTransformEnable      = 1;
            gridTransform.reuseGridTransform       = 0;
            gridTransform.transformDefinedOnWidth  = GridTransformDefinedOnWidth;
            gridTransform.transformDefinedOnHeight = GridTransformDefinedOnHeight;

            for (UINT idx = 0; idx < (ICAGridTransformWidth * ICAGridTransformHeight); idx++)
            {
                gridTransform.gridTransformArray[idx].x = gridArrayX[idx];
                gridTransform.gridTransformArray[idx].y = gridArrayY[idx];
            }

            gridTransform.gridTransformArrayExtrapolatedCorners = 0;
            CAMX_STATIC_ASSERT(sizeof(gridArrayX) == sizeof(gridArrayY));
        }
        pData[i] = &gridTransform;
        pDataCount[i] = sizeof(gridTransform);
        i++;
    }

    result = WriteDataList(&m_IPEICATAGLocation[0], pData, pDataCount, i);
    if (CamxResultSuccess != result)
    {
        CAMX_LOG_ERROR(CamxLogGroupPProc, "WriteDataList failed");
    }
    return CamxResultSuccess;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::HardcodeAAAInputData
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::HardcodeAAAInputData(
    ISPInputData* pInputData,
    UINT          parentNodeID)
{
    pInputData->pAECUpdateData->luxIndex                       = 350.0f;
    pInputData->pAECUpdateData->exposureInfo[0].exposureTime   = 1;
    pInputData->pAECUpdateData->exposureInfo[0].linearGain     = 1.0f;
    pInputData->pAECUpdateData->exposureInfo[0].sensitivity    = 1.0f;
    pInputData->pAECUpdateData->exposureInfo[2].sensitivity    = 2.0f;
    pInputData->pAWBUpdateData->AWBGains.rGain                 = 2.043310f;
    pInputData->pAWBUpdateData->AWBGains.gGain                 = 1.0f;
    pInputData->pAWBUpdateData->AWBGains.bGain                 = 1.493855f;
    pInputData->pAWBUpdateData->colorTemperature               = 2600;
    pInputData->pAWBUpdateData->numValidCCMs                   = 1;
    pInputData->pAWBUpdateData->AWBCCM[0].isCCMOverrideEnabled = FALSE;
    pInputData->parentNodeID                                   = parentNodeID;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::IsMultiCameraUsecase
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
BOOL IPENode::IsMultiCameraUsecase()
{
    BOOL       isDualCamera = FALSE;
    UINT32     cameraIdTag  = 0;
    CamxResult result       = CamxResultEFailed;

    result = VendorTagManager::QueryVendorTagLocation("com.qti.chi.multicamerainfo", "MultiCameraIdRole", &cameraIdTag);
    if (CamxResultSuccess == result)
    {
        MultiCameraIdRole*  pInputMetadata             = NULL;
        const UINT          cameraIdProperty[]         = { cameraIdTag | InputMetadataSectionMask };
        const static UINT   length                     = CAMX_ARRAY_SIZE(cameraIdProperty);
        VOID*               pData[length]              = { 0 };
        UINT64              cameraIdDataOffset[length] = { 0 };

        GetDataList(cameraIdProperty, pData, cameraIdDataOffset, 1);

        pInputMetadata = (static_cast<MultiCameraIdRole*>(pData[0]));
        CAMX_LOG_VERBOSE(CamxLogGroupPProc, "is DualCamera currentCameraRole = %d", pInputMetadata->currentCameraRole);
        switch (pInputMetadata->currentCameraRole)
        {
            case CameraRoleTypeWide:
            case CameraRoleTypeTele:
                isDualCamera = TRUE;
                break;
            case CameraRoleTypeDefault:
            default:
                isDualCamera = FALSE;
                break;
        }
    }

    return isDualCamera;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::UpdateNumberofPassesonDimension
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::UpdateNumberofPassesonDimension(
    UINT parentNodeID)
{

    UINT numPasses = 1;
    UINT downScale = 4;
    CAMX_UNREFERENCED_PARAM(parentNodeID);

    for (UINT passNumber = PASS_NAME_DC_4; passNumber < m_numPasses; passNumber++)
    {
        if (((m_fullInputWidth / downScale) >= ICAMinWidthPixels) &&
            ((m_fullInputHeight / downScale) >= ICAMinHeightPixels))
        {
            numPasses++;
        }
        downScale *= 4;
    }

    if (numPasses != m_numPasses)
    {
        m_numPasses = numPasses;
        CAMX_LOG_INFO(CamxLogGroupIQMod, " Update numberofpasses due to unsupported dimension");
    }
    return CamxResultSuccess;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::GetOEMStatsConfig
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::GetOEMStatsConfig(
    ISPInputData* pInputData,
    UINT          parentNodeID)
{
    CamxResult result = CamxResultSuccess;

    UINT32 metadataAECFrameControl   = 0;
    UINT32 metadataAWBFrameControl   = 0;
    UINT32 metadataAECStatsControl   = 0;
    UINT32 metadataAWBStatsControl   = 0;
    UINT32 metadataAFStatsControl    = 0;

    result = VendorTagManager::QueryVendorTagLocation("org.quic.camera2.statsconfigs", "AECFrameControl",
        &metadataAECFrameControl);
    CAMX_ASSERT_MESSAGE((CamxResultSuccess == result), "Fail to query: AECFrameControl");

    result = VendorTagManager::QueryVendorTagLocation("org.quic.camera2.statsconfigs", "AWBFrameControl",
        &metadataAWBFrameControl);
    CAMX_ASSERT_MESSAGE((CamxResultSuccess == result), "Fail to query: AWBFrameControl");

    result = VendorTagManager::QueryVendorTagLocation("org.quic.camera2.statsconfigs", "AECStatsControl",
        &metadataAECStatsControl);
    CAMX_ASSERT_MESSAGE((CamxResultSuccess == result), "Fail to query: AECStatsControl");

    result = VendorTagManager::QueryVendorTagLocation("org.quic.camera2.statsconfigs", "AWBStatsControl",
        &metadataAWBStatsControl);
    CAMX_ASSERT_MESSAGE((CamxResultSuccess == result), "Fail to query: AWBStatsControl");

    result = VendorTagManager::QueryVendorTagLocation("org.quic.camera2.statsconfigs", "AFStatsControl",
        &metadataAFStatsControl);
    CAMX_ASSERT_MESSAGE((CamxResultSuccess == result), "Fail to query: AFStatsControl");

    static const UINT vendorTagsControl3A[] =
    {
        metadataAECStatsControl | InputMetadataSectionMask,
        metadataAWBStatsControl | InputMetadataSectionMask,
        metadataAFStatsControl  | InputMetadataSectionMask,
        metadataAECFrameControl | InputMetadataSectionMask,
        metadataAWBFrameControl | InputMetadataSectionMask,
    };

    const SIZE_T numTags                            = CAMX_ARRAY_SIZE(vendorTagsControl3A);
    VOID*        pVendorTagsControl3A[numTags]      = { 0 };
    UINT64       vendorTagsControl3AOffset[numTags] = { 0 };

    GetDataList(vendorTagsControl3A, pVendorTagsControl3A, vendorTagsControl3AOffset, numTags);

    // Pointers in pVendorTagsControl3A[] guaranteed to be non-NULL by GetDataList() for InputMetadataSectionMask
    Utils::Memcpy(pInputData->pAECStatsUpdateData, pVendorTagsControl3A[IPEVendorTagAECStats], sizeof(AECStatsControl));
    Utils::Memcpy(pInputData->pAWBStatsUpdateData, pVendorTagsControl3A[IPEVendorTagAWBStats], sizeof(AWBStatsControl));
    Utils::Memcpy(pInputData->pAFStatsUpdateData, pVendorTagsControl3A[IPEVendorTagAFStats], sizeof(AFStatsControl));
    Utils::Memcpy(pInputData->pAECUpdateData, pVendorTagsControl3A[IPEVendorTagAECFrame], sizeof(AECFrameControl));
    Utils::Memcpy(pInputData->pAWBUpdateData, pVendorTagsControl3A[IPEVendorTagAWBFrame], sizeof(AWBFrameControl));

    pInputData->parentNodeID = parentNodeID;

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::GetOEMIQConfig
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::GetOEMIQConfig(
    ISPInputData* pInputData,
    UINT          parentNodeID)
{
    CamxResult result = CamxResultSuccess;

    UINT32 metadataIPEIQParam                       = 0;

    result = VendorTagManager::QueryVendorTagLocation("org.quic.camera2.iqsettings", "OEMIPEIQSetting",
        &metadataIPEIQParam);
    CAMX_ASSERT_MESSAGE((CamxResultSuccess == result), "Fail to query: OEMIPEIQSetting");

    static const UINT vendorTagsControlIQ[] =
    {
        metadataIPEIQParam                      | InputMetadataSectionMask,
    };

    const SIZE_T numTags                            = CAMX_ARRAY_SIZE(vendorTagsControlIQ);
    VOID*        pVendorTagsControlIQ[numTags]      = { 0 };
    UINT64       vendorTagsControlIQOffset[numTags] = { 0 };

    GetDataList(vendorTagsControlIQ, pVendorTagsControlIQ, vendorTagsControlIQOffset, numTags);

    pInputData->pOEMIQSetting = pVendorTagsControlIQ[IPEIQParam];
    pInputData->parentNodeID = parentNodeID;

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::NotifyRequestProcessingError()
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::NotifyRequestProcessingError(
    NodeFenceHandlerData* pFenceHandlerData)
{
    CAMX_ASSERT(NULL != pFenceHandlerData);
    OutputPort*     pOutputPort    = pFenceHandlerData->pOutputPort;
    CSLFenceResult  fenceResult    = pFenceHandlerData->fenceResult;

    if (CSLFenceResultFailed == fenceResult)
    {
        if ((IPEOutputPortFullRef == pOutputPort->portId) ||
           (IPEOutputPortDS4Ref == pOutputPort->portId)   ||
           (IPEOutputPortDS16Ref == pOutputPort->portId))
        {
            CAMX_LOG_ERROR(CamxLogGroupPProc, "Fence failure for output port %d req %llu",
                pOutputPort->portId, pFenceHandlerData->requestId);
            m_resetReferenceInput = TRUE;
        }
    }

    if (TRUE == m_enableIPEHangDump)
    {
        CAMX_LOG_INFO(CamxLogGroupPProc, "notify error fence back for request %llu", pFenceHandlerData->requestId);
        CmdBuffer* pBuffer = NULL;
        pBuffer =
            CheckCmdBufferWithRequest(pFenceHandlerData->requestId, m_pIPECmdBufferManager[CmdBufferBLMemory]);
        if (!pBuffer)
        {
            CAMX_LOG_ERROR(CamxLogGroupPProc, "cant find buffer");
            return;
        }
        DumpDebug(CmdBufferBLMemory, pBuffer, pFenceHandlerData->requestId, InstanceID(), IsRealTime(), m_instanceProperty);
    }
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::GetStaticMetadata
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
VOID IPENode::GetStaticMetadata()
{
    HwCameraInfo    cameraInfo;

    HwEnvironment::GetInstance()->GetCameraInfo(GetPipeline()->GetCameraId(), &cameraInfo);

    // Initialize default metadata
    m_HALTagsData.saturation                        = 5;
    m_HALTagsData.colorCorrectionAberrationMode     = ColorCorrectionAberrationModeFast;
    m_HALTagsData.edgeMode                          = EdgeModeFast;
    m_HALTagsData.controlVideoStabilizationMode     = NoiseReductionModeFast;
    m_HALTagsData.sharpness                         = 1;
    // Initialize default metadata
    m_HALTagsData.blackLevelLock                    = BlackLevelLockOff;
    m_HALTagsData.colorCorrectionMode               = ColorCorrectionModeFast;
    m_HALTagsData.controlAEMode                     = ControlAEModeOn;
    m_HALTagsData.controlAWBMode                    = ControlAWBModeAuto;
    m_HALTagsData.controlMode                       = ControlModeAuto;

    m_HALTagsData.noiseReductionMode                = NoiseReductionModeFast;
    m_HALTagsData.shadingMode                       = ShadingModeFast;
    m_HALTagsData.statisticsHotPixelMapMode         = StatisticsHotPixelMapModeOff;
    m_HALTagsData.statisticsLensShadingMapMode      = StatisticsLensShadingMapModeOff;
    m_HALTagsData.tonemapCurves.tonemapMode         = TonemapModeFast;

    // Retrieve the static capabilities for this camera
    CAMX_ASSERT(MaxCurvePoints >= cameraInfo.pPlatformCaps->maxTonemapCurvePoints);
    m_HALTagsData.tonemapCurves.curvePoints = cameraInfo.pPlatformCaps->maxTonemapCurvePoints;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// IPENode::GetStabilizationMargins()
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
CamxResult IPENode::GetStabilizationMargins()
{
    CamxResult          result         = CamxResultSuccess;
    UINT32              marginEISTag   = 0;
    UINT                marginTags[1]  = { 0 };
    VOID*               pData[1]       = { 0 };
    UINT64              offset[1]      = { 0 };
    StabilizationMargin receivedMargin = { 0 };

    if (0 != (IPEStabilizationType::IPEStabilizationTypeEIS2 & m_instanceProperty.stabilizationType))
    {
        result = VendorTagManager::QueryVendorTagLocation("org.quic.camera.eisrealtime",
                                                          "StabilizationMargins", &marginEISTag);
        CAMX_ASSERT(CamxResultSuccess == result);
    }
    else if (0 != (IPEStabilizationType::IPEStabilizationTypeEIS3 & m_instanceProperty.stabilizationType))
    {
        result = VendorTagManager::QueryVendorTagLocation("org.quic.camera.eislookahead",
                                                          "StabilizationMargins", &marginEISTag);
        CAMX_ASSERT(CamxResultSuccess == result);
    }

    marginTags[0] = (marginEISTag | UsecaseMetadataSectionMask);
    result        = GetDataList(marginTags, pData, offset, CAMX_ARRAY_SIZE(marginTags));

    if (CamxResultSuccess == result)
    {
        if (NULL != pData[0])
        {
            receivedMargin                    = *static_cast<StabilizationMargin*>(pData[0]);
            m_stabilizationMargin.widthPixels = Utils::EvenFloorUINT32(receivedMargin.widthPixels);
            m_stabilizationMargin.heightLines = Utils::EvenFloorUINT32(receivedMargin.heightLines);
        }
    }
    CAMX_LOG_VERBOSE(CamxLogGroupCore,
                     "IPE stabilization margins for stabilization type %d set to %ux%u",
                     m_instanceProperty.stabilizationType,
                     m_stabilizationMargin.widthPixels,
                     m_stabilizationMargin.heightLines);

    return result;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::SetScaleRatios
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
BOOL IPENode::SetScaleRatios(
    ISPInputData*     pInputData,
    UINT              parentNodeID,
    IFECropInfo*      pCropInfo,
    IFEScalerOutput*  pIFEScalerOutput)
{
    // initialize with default 1.0f
    pInputData->preScaleRatio  = 1.0f;
    pInputData->postScaleRatio = 1.0f;

    if (NULL == pCropInfo)
    {
        CAMX_LOG_INFO(CamxLogGroupPProc, "pCropInfo is NULL");
        return FALSE;
    }

    CAMX_LOG_INFO(CamxLogGroupPProc, "parentNodeID=%d, input width = %d, height = %d, output width = %d, height = %d",
                 parentNodeID, m_fullInputWidth, m_fullInputHeight, m_fullOutputWidth, m_fullOutputHeight);

    if (BPS == parentNodeID)
    {
        FLOAT ratio1 = static_cast<FLOAT>(pCropInfo->fullPath.width) / static_cast<FLOAT>(m_fullOutputWidth);
        FLOAT ratio2 = static_cast<FLOAT>(pCropInfo->fullPath.height) / static_cast<FLOAT>(m_fullOutputHeight);
        pInputData->postScaleRatio = (ratio1 > ratio2) ? ratio2 : ratio1;

        CAMX_LOG_INFO(CamxLogGroupPProc,
                      "IPE crop width = %d, height = %d, fwidth = %d, fheight = %d, preScaleRatio = %f, postScaleRatio = %f",
                      pCropInfo->fullPath.width,
                      pCropInfo->fullPath.height,
                      m_fullOutputWidth,
                      m_fullOutputHeight,
                      pInputData->preScaleRatio,
                      pInputData->postScaleRatio);
    }
    else if ((IFE == parentNodeID) || (ChiExternalNode == parentNodeID))
    {
        if (NULL == pIFEScalerOutput)
        {
            return FALSE;
        }

        // Update the upsteam modules with previous scaler output
        FLOAT ratio = pIFEScalerOutput->scalingFactor;

        if (ratio > 1.0f)
        {
            pInputData->preScaleRatio = ratio;
            pInputData->postScaleRatio = 1.0f;
        }
        else
        {
            pInputData->preScaleRatio = 1.0f;
            FLOAT ratio1 = static_cast<FLOAT>(pCropInfo->fullPath.width) / static_cast<FLOAT>(m_fullOutputWidth);
            FLOAT ratio2 = static_cast<FLOAT>(pCropInfo->fullPath.height) / static_cast<FLOAT>(m_fullOutputHeight);
            pInputData->postScaleRatio = (ratio1 > ratio2) ? ratio2 : ratio1;
        }

        CAMX_LOG_INFO(CamxLogGroupPProc,
                      "IFE scaling = %f, crop width = %d, height = %d, preScaleRatio = %f, postScaleRatio = %f",
                      pIFEScalerOutput->scalingFactor,
                      pCropInfo->fullPath.width,
                      pCropInfo->fullPath.height,
                      pInputData->preScaleRatio,
                      pInputData->postScaleRatio);
    }
    else
    {
       // Dual Camera Preview / Video case, implement it later due to SAT complexity
    }

    return TRUE;
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// IPENode::CheckIsIPERealtime
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
BOOL IPENode::CheckIsIPERealtime()
{
    // if IPE is part of realtime pipeline
    BOOL isRealTime = FALSE;
    if (TRUE == IsRealTime())
    {
        isRealTime = TRUE;
        if (m_numPasses == 4)
        {
            isRealTime = FALSE;
        }
    }
    else
    {
        // Preview / video part of offline pipeline , check for processing type so does not fall into MFNR category.
        if ((m_instanceProperty.processingType == IPEProcessingType::IPEProcessingTypeDefault) &&
            (m_numOutputRefPorts > 0) && (m_numOutputRefPorts <= 3))
        {
            isRealTime = TRUE;
        }
        // if node does not have BPS in pipeline and number of passess less than 3
        // then this is realtime IPE part of an offline pipeline
        if ((m_numPasses <= 3) && (FALSE == IsNodeInPipeline(BPS)))
        {
            isRealTime = TRUE;
        }
    }
    return isRealTime;
}

CAMX_NAMESPACE_END
