/**
 @file ctc_usw_interrupt.c

 @author  Copyright (C) 2012 Centec Networks Inc.  All rights reserved.

 @date 2012-10-23

 @version v2.0

 This file define sys functions

*/

/****************************************************************************
*
* Header Files
*
****************************************************************************/
#include "sal.h"
#include "sal_fifo.h"
#include "dal.h"
#include "ctc_l2.h"
#include "ctc_port.h"
#include "ctc_linklist.h"
#include "ctc_warmboot.h"
#include "sys_usw_common.h"
#include "sys_usw_chip.h"
#include "sys_usw_register.h"
#include "sys_usw_interrupt.h"
#include "sys_usw_packet.h"
#include "sys_usw_api.h"
#include "sys_usw_dma.h"
#include "sys_usw_dma_priv.h"
#include "drv_api.h"
/****************************************************************************
*
* Defines and Macros
*
*****************************************************************************/
/*flow stats channel report timer*/
#ifdef EMULATION_ENV
#define SYS_USW_DMA_FLOW_STATS_SYNC_TIME  20*1000000000
#define SYS_USW_DMA_FLOW_STATS_SYNC_TIME_FROM_TMM  20*1000000000
#else
#define SYS_USW_DMA_FLOW_STATS_SYNC_TIME  1800000000
#define SYS_USW_DMA_FLOW_STATS_SYNC_TIME_FROM_TMM  1800000000
#endif


/*dot1ae channel report timer*/
#define SYS_USW_DMA_DOT1AE_STATS_SYNC_TIME 5*1000000000
/*npm stats channel report timer*/
#define SYS_USW_DMA_NPM_STATS_SYNC_TIME  1*1000000000UL
/*npm stats table num*/
#define SYS_USW_DMA_NPM_STATS_SYNC_TBL_NUM  4
/*dma write DsMetNonUcLagBlockMask table size*/
#define SYS_USW_DMA_NONUC_BMASK_SIZE 1024*64
/*dma write DsMetNonUcLagBlockMask table size*/
#define SYS_USW_DMA_STATS_FIFO_SYNC_NUM 52
/*stats table size*/
#define SYS_USW_DMA_STATS_WORD 4
/*stats table size*/
#define SYS_USW_DMA_PACKETS_PER_INTR 1
/*auto fecth mode spcial chip address*/
#define SYS_USW_DMA_DIRECT_ADDR 0x80000000

#define SYS_USW_DMA_MIN_DATA_POOL_ID 1

/****************************************************************************
*
* Global and Declaration
*
*****************************************************************************/
sys_dma_master_t* p_usw_dma_master[CTC_MAX_LOCAL_CHIP_NUM_PP] = {NULL};
extern dal_op_t g_dal_op;
extern int32 drv_usw_ftm_get_flow_stats_table_id(uint8 lchip, uint8 ram_idx, uint32* table_id);
extern uint8 drv_ser_get_tcam_scan_enable(uint8 lchip);
extern int32 sys_usw_flow_stats_get_ram_info(uint8 lchip, uint8 blkid, uint32* offset, uint32* total);
extern int32 sys_duet2_dma_get_data_memory(uint8 lchip, uint8 chan_id, uint32 cur_index, 
    uint32 mem_id, uint32 *cfg_size, uint32 **pp_desc_addr, uint32 **pp_logic_addr);
extern int32 sys_duet2_dma_get_flow_stats_memory(uint8 lchip, uint8 block_id,
    uint32** p_mem, uint8* p_desc_done);
extern bool dal_get_soc_active(uint8 lchip);


/****************************************************************************
*
* Function
*
*****************************************************************************/
#define ______INTERNAL_API______
uint32
_sys_usw_dma_encode_table_op_bmp(uint8 lchip, uint8 tbl_level, uint8 pp_id, uint8 dp_id)
{
    uint32 op_bmp = 0;
    if (tbl_level == MEM_TYPE_PER_CORE)
    {
        op_bmp = ((1 << (pp_id / PP_NUM_PER_CORE))  << SYS_DMA_IO_BMP_CORE);

    }
    else if (tbl_level == MEM_TYPE_PER_PP || tbl_level == MEM_TYPE_PEER_PP)
    {
        op_bmp = ((1 << (pp_id / PP_NUM_PER_CORE))  << SYS_DMA_IO_BMP_CORE) | ((1 << (pp_id % PP_NUM_PER_CORE)) << SYS_DMA_IO_BMP_PP);
        DRV_OPER_BMP_PP_MASK2(op_bmp);
    }
    else
    {
        op_bmp = ((1 << (pp_id / PP_NUM_PER_CORE))  << SYS_DMA_IO_BMP_CORE) | ((1 << (pp_id % PP_NUM_PER_CORE)) << SYS_DMA_IO_BMP_PP) | (1 << dp_id);
        DRV_OPER_BMP_PP_MASK2(op_bmp);
    }
    return op_bmp;
}

uint32
_sys_usw_dma_encode_table_op_bmp2(uint8 lchip, uint8 is_read, uint8 core_bmp, uint16 pp_bmp, uint8 dp_bmp)
{
    uint32 op_bmp = 0;
    if (0 == core_bmp)
    {
        op_bmp = is_read ? 0x45 : 0xFF;
    }
    else
    {
        op_bmp = (core_bmp << SYS_DMA_IO_BMP_CORE) | (pp_bmp << SYS_DMA_IO_BMP_PP) | dp_bmp;
    }
    DRV_OPER_BMP_PP_MASK2(op_bmp);
    return op_bmp;
}

int32
sys_usw_dma_read_trigger(uint8 lchip, uint32 vld_num)
{
    uint32 cmd = 0;

    if (0 == vld_num)
    {
        return CTC_E_NONE;
    }
    lchip = SYS_PP_BASE(lchip);
    cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, SYS_DMA_TBL_RD_CHAN_ID, cmd, &vld_num);

    return CTC_E_NONE;
}

int32 sys_usw_dma_wait_desc_finish(uint8 lchip, uint8 is_read, uint32 desc_index)
{
    uint32  cnt = 0;
    int32   ret = CTC_E_NONE;
    bool    done = FALSE;
    DsDesc_m* p_tx_desc_mem;
    sys_dma_chan_t* p_dma_chan;
    sal_mutex_t* p_mutex = NULL;

    lchip = SYS_PP_BASE(lchip);

    p_dma_chan = (sys_dma_chan_t*)&(p_usw_dma_master[lchip]->dma_chan_info[is_read?SYS_DMA_TBL_RD_CHAN_ID:SYS_DMA_TBL_WR_CHAN_ID]);
    p_mutex = p_dma_chan->p_mutex;
    DMA_LOCK(p_mutex);

    p_tx_desc_mem = &(p_dma_chan->p_desc[desc_index].desc_info);

    while(cnt < 100)
    {
    #ifndef CTC_HOT_PLUG_DIS
        if (sys_usw_chip_check_active(lchip) || NULL == p_usw_dma_master[lchip] || !p_usw_dma_master[lchip]->init)
        {
            ret = CTC_E_NOT_INIT;
            break;
        }
    #endif
        SYS_USW_DMA_CACHE_INVALID(lchip, p_tx_desc_mem, sizeof(DsDesc_m));
        if (DRV_IS_DUET2(lchip))
        {
            if (GetDsDescEncap2(V, done_f, p_tx_desc_mem))
            {
                done = TRUE;
                break;
                /* last transmit is done */
            }
        }
        else
        {
            if (GetDsDescEncap(V, done_f, p_tx_desc_mem))
            {
                done = TRUE;
                break;
                /* last transmit is done */
            }
        }

    #ifndef PACKET_TX_USE_SPINLOCK
        sal_task_sleep(1);
    #else
        sal_udelay(1000);
    #endif
        cnt++;
#ifdef DMA_DBG_ON
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "wait cnt,%d\n", cnt);
#endif
    }

    if (!done)
    {
#ifdef DMA_DBG_ON
       SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "last transmit is not done,%d\n", desc_index);
#endif
       ret = CTC_E_DMA;
    }

    DMA_UNLOCK(p_mutex);
    return ret;
}

STATIC INLINE int32
_sys_usw_dma_wait_desc_finish(uint8 lchip, DsDesc_m* p_tx_desc_mem,sys_dma_chan_t* p_dma_chan)
{
    uint32  cnt = 0;
    int32   ret = CTC_E_NONE;
    bool    done = FALSE;

#ifdef EMULATION_ENV
    while(cnt < 10000)
#else
    while(cnt < 100)
#endif
    {
    #ifndef CTC_HOT_PLUG_DIS
        SYS_DMA_INIT_CHECK(lchip);
    #endif
        SYS_USW_DMA_CACHE_INVALID(lchip, p_tx_desc_mem, sizeof(DsDesc_m));
        if (DRV_IS_DUET2(lchip))
        {
            if (GetDsDescEncap2(V, done_f, p_tx_desc_mem))
            {
                done = TRUE;
                break;
                /* last transmit is done */
            }
        }
        else
        {
            if (GetDsDescEncap(V, done_f, p_tx_desc_mem))
            {
                done = TRUE;
                break;
                /* last transmit is done */
            }
        }    
    #ifndef PACKET_TX_USE_SPINLOCK
        sal_task_sleep(1);
    #else
        sal_udelay(1000);
    #endif
        cnt++;
    }

    if (!done)
    {
#ifdef DMA_DBG_ON
       SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "last transmit is not done,%d\n", p_dma_chan->current_index);
#endif
       ret = CTC_E_DMA;
    }
    return ret;
}

STATIC int32
_sys_usw_dma_get_mac_address(uint8 lchip, uint8 mac_type, uint16 mac_id, uint32* p_addr)
{
    uint32 start_addr = 0;
    uint32 tbl_id = 0;
    uint32 index = 0;
    uint32 tbl_step = 0;

    if (DRV_FROM_TMM(lchip))
    {
        if(DRV_IS_TMG(lchip))
        {
            tbl_id = QuadSgmacStatsRam_t;
            index = mac_id * 40;
        }
        else if ((mac_id >= SYS_CPU_MAC_ID_MIN) && (mac_id <= SYS_CPU_MAC_ID_MAX))
        {
            tbl_id = QuadSgmacStatsRam_t;
            index = (mac_id-SYS_CPU_MAC_ID_MIN) *(SYS_STATS_MAC_BASED_STATS_XQMAC_RAM_DEPTH / 4);
        }
        else if (DRV_FROM_AT(lchip))
        {
            tbl_id = ((mac_id & 0x7) >> 2) ? McMacStatsRam1_t : McMacStatsRam0_t;
            index = ((mac_id % MCHIP_CAP(SYS_CAP_MAC_NUM_PER_DP)) / 8 )*SYS_STATS_MAC_BASED_STATS_XQMAC_RAM_DEPTH + ((mac_id % MCHIP_CAP(SYS_CAP_MAC_NUM_PER_DP))& 0x3) * (SYS_STATS_MAC_BASED_STATS_XQMAC_RAM_DEPTH / 4);
        }
        else
        {
            tbl_step = McMacStatsRam1_t - McMacStatsRam0_t;
            tbl_id = McMacStatsRam0_t + ((mac_id%(10*4)) >> 2)* tbl_step;
            index = (mac_id/(10*4))*SYS_STATS_MAC_BASED_STATS_XQMAC_RAM_DEPTH + (mac_id & 0x3) * (SYS_STATS_MAC_BASED_STATS_XQMAC_RAM_DEPTH / 4);
        }
    }
    else
    {
        tbl_id = QuadSgmacStatsRam0_t + (mac_id>>2);
        switch (mac_type)
        {
            case CTC_PORT_SPEED_1G:
            case CTC_PORT_SPEED_10G:
            case CTC_PORT_SPEED_2G5:
            case CTC_PORT_SPEED_20G:
                index = 40*(mac_id%4);
                break;

            case CTC_PORT_SPEED_40G:
            case CTC_PORT_SPEED_100G:
                index = 0;
                break;
            default:
                return CTC_E_INVALID_PARAM;
        }
    }
    CTC_ERROR_RETURN(drv_get_table_property(lchip, DRV_TABLE_PROP_HW_ADDR, tbl_id, index, &start_addr));
    *p_addr = start_addr;
    return CTC_E_NONE;

}

STATIC int32
_sys_usw_dma_reset_channel(uint8 lchip, uint16 chan_id)
{
    uint32 value = 0;
    uint32 cmd   = 0;
    uint32 cnt = 0;
    uint32 tbl_id = 0;
    uint8  clear_done = 0;
    uint32 table_array[] = {DmaPktRx0ClearCtl_t, DmaPktRx1ClearCtl_t, DmaPktRx2ClearCtl_t, DmaPktRx3ClearCtl_t,
        DmaPktRx4ClearCtl_t, DmaPktRx5ClearCtl_t, DmaPktRx6ClearCtl_t, DmaPktRx7ClearCtl_t, DmaPktTx0ClearCtl_t,
        DmaPktTx1ClearCtl_t, DmaPktTx2ClearCtl_t, DmaPktTx3ClearCtl_t, DmaRegRd0ClearCtl_t, DmaRegRd1ClearCtl_t,
        DmaRegRd2ClearCtl_t, DmaRegRd3ClearCtl_t, DmaRegRd4ClearCtl_t, DmaRegRd5ClearCtl_t, DmaRegWr0ClearCtl_t,
        DmaRegWr1ClearCtl_t, DmaInfo0ClearCtl_t, DmaInfo1ClearCtl_t, DmaInfo2ClearCtl_t, DmaInfo3ClearCtl_t,
        DmaInfo4ClearCtl_t, DmaInfo5ClearCtl_t, DmaInfo6ClearCtl_t, DmaInfo7ClearCtl_t, DmaScanClearCtl_t, DmaBatchClearCtl_t};
    
    if (DRV_FROM_AT(lchip))
    {
        tbl_id = table_array[chan_id];
        value = 1;
        cmd = DRV_IOW(tbl_id, 0);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value));

        cnt = 0;
        cmd = DRV_IOR(tbl_id, 1);
        do
        {
            CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value));
            clear_done = value ? 0 : 1;
            cnt++;
        } while(!clear_done&&(cnt<0xffff));

        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value));
        clear_done = value ? 0 : 1;
    }
    else
    {
        value = (1 << chan_id);
        cmd = DRV_IOW(DmaClearCtl_t, DmaClearCtl_dmaClearEn_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value));

        cnt = 0;
        cmd = DRV_IOR(DmaClearPend_t, DmaClearPend_dmaClearPending_f);
        do
        {
            CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value));
            clear_done = !CTC_IS_BIT_SET(value, chan_id);
            cnt++;
        } while(!clear_done&&(cnt<0xffff));

        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value));
        clear_done = !CTC_IS_BIT_SET(value, chan_id);
    }
    
    if (!clear_done)
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, " Channel %u can not reset\n", chan_id);
		return CTC_E_DMA;
    }

    return CTC_E_NONE;
}

STATIC INLINE int32
_sys_usw_dma_set_tx_chip(uint8 lchip, DsDesc_m* p_tx_desc_mem, ctc_pkt_tx_t* tx_pkt, sys_dma_chan_t* p_dma_chan, uint32 phy_addr)
{
    uint32 cmd = 0;
    uint32 vld_num = 0;
    int32 ret = CTC_E_NONE;

    SetDsDescEncap2(V, u1_pkt_eop_f, p_tx_desc_mem, 1);
    SetDsDescEncap2(V, u1_pkt_sop_f, p_tx_desc_mem, 1);
    SetDsDescEncap2(V, cfgSize_f, p_tx_desc_mem, tx_pkt->skb.len+SYS_USW_PKT_HEADER_LEN+SYS_USW_DMA_PKT_TX_CRC_LEN);
    SetDsDescEncap2(V, memAddr_f, p_tx_desc_mem, (phy_addr >> 4));
    SetDsDescEncap2(V, done_f, p_tx_desc_mem, 0);
    SetDsDescEncap2(V, realSize_f, p_tx_desc_mem, tx_pkt->skb.len);
#ifdef ARCTIC
    SetDsDescEncap(V,  highAddr_f, p_tx_desc_mem, p_usw_dma_master[lchip]->dma_high_addr); /*from arctic*/
    SetDsDescEncap(V, u0_pktTx_destId_f, p_tx_desc_mem, (tx_pkt->lchip - lchip) & 0x3); /*from arctic*/
#endif
    SYS_USW_DMA_CACHE_FLUSH(lchip, p_tx_desc_mem, sizeof(DsDesc_m));
    #if 0
    if (p_dma_chan->auto_fetch_en)
    {
        SetDsDescEncap2(V, valid_f, p_tx_desc_mem, 1);
        if DRV_FROM_AT(lchip)
        {
            SetDsDescEncap(V, descValid0_f, p_tx_desc_mem, 1); /*from arctic*/
        }
    }
    else
    #endif
    {
    #if (0 == SDK_WORK_PLATFORM)
         vld_num =  1;
    #else
         {
             uint32 valid_cnt = 0;
             cmd = DRV_IOR(DmaCtlTab_t, DRV_ENTRY_FLAG);
             CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_dma_chan->channel_id, cmd, &vld_num));
             valid_cnt = GetDmaCtlTab(V, vldNum_f, &vld_num);
             valid_cnt += 1;
             SetDmaCtlTab(V, vldNum_f, &vld_num, valid_cnt);
         }
    #endif
    cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_dma_chan->channel_id, DRV_CMD_PP_EN(cmd), &vld_num));
    }

    if(!tx_pkt->callback && (tx_pkt->tx_info.flags & CTC_PKT_FLAG_ZERO_COPY))
    {
       CTC_ERROR_RETURN(_sys_usw_dma_wait_desc_finish(lchip, p_tx_desc_mem, p_dma_chan));
       SetDsDescEncap2(V, memAddr_f, p_tx_desc_mem, 0);
    }
    /* next descriptor, tx_desc_index: 0~tx_desc_num-1*/
    p_dma_chan->current_index =
        (p_dma_chan->current_index == (p_dma_chan->desc_depth - 1)) ? 0 : (p_dma_chan->current_index + 1);

    return ret;
}

#define ______API______
int32
sys_usw_dma_get_flow_stats_sync_mode(uint8 lchip)
{
#if (SDB_MEM_MODEL == SDB_MODE) /*SDB_MEM_MODEL*/
    return 0;
#endif
    return p_usw_dma_master[lchip]->flow_stats_sync_mode;
}

int32
sys_usw_dma_set_cfg_size(uint8 lchip, int32 chan_id, uint32 desc_idx, uint32 enable)
{
    int32 ret = CTC_E_NONE;
    sys_dma_desc_t* p_sys_desc_pad = NULL;
    DsDesc_m* p_desc = NULL;
    uint32 cmd = 0;
    uint32 tbl_id = 0;
    uint32 data_size = 0;
    uint32 dword = 0;
    sys_dma_chan_t* p_chan_info = NULL;
    DmaStaticInfo_m static_info;
    DmaDynInfo_m dyn_info;
    DmaRegRd2TrigCfg_m trigger2_timer;
    uint32 timer_v[2];
    uint32 temp_timer_v[2];
    uint32 cache_cnt = 0;
#ifdef EMULATION_ENV
    uint32 count = 1000000;
    uint8 pp0 = SYS_PP_BASE(lchip) + 2;/*use pp2*/
    uint8 pp1 = SYS_PP_BASE(lchip) + 3;/*use pp3*/
    if (lchip != pp0 && lchip != pp1)
    {
        return CTC_E_NONE;
    }
#else
    uint32 count = 10000;
#endif
    uint16 index = 0;

    lchip = DRV_FROM_AT(lchip)? SYS_PP_BASE(lchip) : lchip;

    SYS_DMA_INIT_CHECK(lchip);
    if (DRV_FROM_AT(lchip) && !p_usw_dma_master[lchip]->flow_stats_sync_mode)
    {
        return CTC_E_NONE;
    }
    sal_memset(&static_info, 0, sizeof(DmaStaticInfo_m));

    p_chan_info = (sys_dma_chan_t*)&p_usw_dma_master[lchip]->dma_chan_info[chan_id];
    if (!p_chan_info->chan_en || chan_id != SYS_DMA_FLOW_STATS_CHAN_ID)
    {
        return CTC_E_NONE;
    }
    p_sys_desc_pad = p_usw_dma_master[lchip]->dma_chan_info[chan_id].p_desc;

    cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, chan_id, cmd, &static_info));
    SetDmaStaticInfo(V, chanEn_f, &static_info, 0);
    cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, chan_id, cmd, &static_info));

    timer_v[0] = 16;/*16 ns*/
    timer_v[1] = 0;
    cmd = DRV_IOR(DmaRegRd2TrigCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger2_timer));
    GetDmaRegRd2TrigCfg(A, cfgRegRd2TrigNs_f, &trigger2_timer, temp_timer_v);
    SetDmaRegRd2TrigCfg(A, cfgRegRd2TrigNs_f, &trigger2_timer, timer_v);
    cmd = DRV_IOW(DmaRegRd2TrigCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, cmd, &trigger2_timer),ret,end0);

    if(DRV_FROM_TMM(lchip))
    {
        if(drv_usw_ftm_get_flow_stats_table_id(lchip, desc_idx%MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM), &tbl_id))
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " drv_usw_ftm_get_flow_stats_table_id error index=[%u] \n",desc_idx);
            ret = CTC_E_NOT_EXIST;
            goto end1;
        }
        dword = TABLE_ENTRY_OFFSET(lchip, tbl_id)>>2;
        data_size = ((DRV_TABLE_MAX_INDEX(lchip, tbl_id)) * dword*4);
    }
    else
    {
        data_size = p_chan_info->data_size;
    }

    cmd = DRV_IOR(DmaDynInfo_t, DRV_ENTRY_FLAG);
    do
    {
        DRV_IOCTL(lchip, chan_id, cmd, &dyn_info);
        count--;
        cache_cnt = GetDmaDynInfo(V, cacheCnt_f, &dyn_info);
    }
    while(count && cache_cnt);
    if (0 == count)
    {
        ret = CTC_E_HW_BUSY;
        goto end1;
    }

    data_size = enable?data_size:SYS_DMA_DESC_NOT_PROC_SIZE;

    for(index=desc_idx; index< p_chan_info->desc_num; index=index+MCHIP_CAP(SYS_CAP_STATS_DMA_PP_BLOCK_NUM))
    {
        p_desc = (DsDesc_m*)&(p_sys_desc_pad[index].desc_info);
        SetDsDescEncap2(V, cfgSize_f, p_desc, data_size);
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(DsDesc_m));
    }

end1:
    SetDmaRegRd2TrigCfg(A, cfgRegRd2TrigNs_f, &trigger2_timer, temp_timer_v);
    cmd = DRV_IOW(DmaRegRd2TrigCfg_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, 0, cmd, &trigger2_timer);
end0:
    cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, chan_id, cmd, &static_info);
    SetDmaStaticInfo(V, chanEn_f, &static_info, 1);
    cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, chan_id, cmd, &static_info);
    return ret;
}

STATIC int32
_sys_usw_dma_get_desc_en_bmp(uint8 lchip, uint8 chan_id, uint32* bmp)
{
    sys_dma_desc_t* p_sys_desc_pad = NULL;
    DsDesc_m* p_desc = NULL;
    sys_dma_chan_t* p_chan_info = NULL;
    uint16 index = 0;

    p_chan_info = (sys_dma_chan_t*)&p_usw_dma_master[lchip]->dma_chan_info[chan_id];
    if (!p_chan_info->chan_en || chan_id != SYS_DMA_FLOW_STATS_CHAN_ID)
    {
        return CTC_E_NONE;
    }
    p_sys_desc_pad = p_usw_dma_master[lchip]->dma_chan_info[chan_id].p_desc;

    for(index=0; index<MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM); index++)
    {
        p_desc = (DsDesc_m*)&(p_sys_desc_pad[index].desc_info);
        if(SYS_DMA_DESC_NOT_PROC_SIZE != (DRV_IS_DUET2(lchip)?GetDsDescEncap2(V, cfgSize_f, p_desc):GetDsDescEncap2(V, cfgSize_f, p_desc)))
        {
            CTC_BMP_SET(bmp, index);
        }
    }

    return CTC_E_NONE;
}

STATIC uint32
_sys_usw_dma_get_chan_type(uint8 lchip, uint8 chan_id)
{
    uint32 type = 0;

    for (type = DRV_DMA_PACKET_RX0_CHAN_ID; type < DRV_DMA_MAX_CHAN_ID; type++)
    {
        if (chan_id == DRV_ENUM(type))
        {
            return type;
        }
    }

    return DRV_DMA_MAX_CHAN_ID;
}


#define _DMA_FUNCTION_INTERFACE

void*
sys_usw_dma_tx_alloc(uint8 lchip, uint32 pkt_size)
{
    void* p_addr = NULL;
    if((lchip >= g_lchip_num) || NULL == p_usw_dma_master[lchip])
    {
        return NULL;
    }
    pkt_size = (pkt_size<SYS_USW_PKT_MIN_PKT_LEN)?SYS_USW_PKT_MIN_PKT_LEN:pkt_size;
    p_addr = SYS_DMA_ALLOC(lchip, pkt_size + CTC_PKT_HDR_ROOM , 0);
    if(NULL == p_addr)
    {
        return NULL;
    }
    return p_addr;
}

int32
sys_usw_dma_tx_free(uint8 lchip, void* addr)
{
    SYS_DMA_INIT_CHECK(lchip);

    if(addr)
    {
        SYS_DMA_FREE(lchip, addr);
    }
    return CTC_E_NONE;
}

int32
sys_usw_dma_tx_alloc2(uint8 lchip, ctc_pkt_buf_pool_t* p_pool)
{
    uintptr new_addr = 0;
    sal_fifo_t* p_fifo = NULL;
    uint16 cnt = 0;

    SYS_DMA_INIT_CHECK(lchip);
    CTC_MIN_VALUE_CHECK(p_pool->pool_id, SYS_USW_DMA_MIN_DATA_POOL_ID);
    CTC_MAX_VALUE_CHECK(p_pool->pool_id, SYS_DMA_PACKET_TX0_CHAN_ID);

    if (!p_pool->num)
    {
        return CTC_E_NONE;
    }

    p_fifo = p_usw_dma_master[lchip]->dma_chan_info[p_pool->pool_id-1].p_data_fifo;
    if (!p_fifo)
    {
        return CTC_E_NO_RESOURCE;
    }

    /* no enough data in data pool */
    if (p_pool->num * sizeof(uint32) > sal_fifo_len(p_fifo))
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, "There is not enough data to alloc from data pool\n");
        return CTC_E_NO_RESOURCE;
    }

    /* alloc data from data pool */
    do{
        sal_fifo_get(p_fifo, (uint8*)&new_addr, sizeof(uint32));
        p_pool->buf_array[cnt] = (void*)new_addr;
    }while(++cnt < p_pool->num);

    return CTC_E_NONE;
}

int32
sys_usw_dma_tx_free2(uint8 lchip, ctc_pkt_buf_pool_t* p_pool)
{
    uintptr new_addr = 0;
    sal_fifo_t* p_fifo = NULL;
    uint16 cnt = 0;
    uint32 cmd = 0;
    uint32 tbl_id = DRV_FROM_AT(lchip) ? DmaCtlIntrFunc0_t : DmaCtlIntrFunc_t;
    uint32 intr_vec[SYS_DMA_INTR_VEC] = {0};
    uint8 is_empty = 0;

    SYS_DMA_INIT_CHECK(lchip);

    CTC_PTR_VALID_CHECK(p_pool->buf_array);
    CTC_MIN_VALUE_CHECK(p_pool->pool_id, SYS_USW_DMA_MIN_DATA_POOL_ID);
    CTC_MAX_VALUE_CHECK(p_pool->pool_id, SYS_DMA_PACKET_TX0_CHAN_ID);

    if (!p_pool->num)
    {
        return CTC_E_NONE;
    }

    p_fifo = p_usw_dma_master[lchip]->dma_chan_info[p_pool->pool_id-1].p_data_fifo;
    if (!p_fifo)
    {
        return CTC_E_NO_RESOURCE;
    }

    /* data num to recycle must not overflow the data pool */
    if (p_pool->num * sizeof(uint32) + sal_fifo_len(p_fifo) > p_fifo->size)
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, "data num to recycle will overflow the data pool, max data num is: %u, current data num in pool is %u \n",\
            (uint32)(p_fifo->size/sizeof(uint32)), (uint32)(sal_fifo_len(p_fifo)/sizeof(uint32)));
        return CTC_E_INVALID_PARAM;
    }

    is_empty = sal_fifo_len(p_fifo)==0;
    /* recycle data into data pool */
    do{
        new_addr = (uintptr)p_pool->buf_array[cnt];
        sal_fifo_put(p_fifo, (uint8*)&new_addr, sizeof(uint32));
    }while(++cnt < p_pool->num);

    /* if buffer pool is emtpy before recycling data into pool, need trigger dma interrupt for rx process affer recycling */
    if (is_empty)
    {
        intr_vec[0] = 1 << (p_pool->pool_id - 1);/* rx channel id = pool_id - 1 */
        cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, INTR_INDEX_VAL_SET, cmd, intr_vec));
    }
    return CTC_E_NONE;
}


int32
sys_usw_dma_function_pause(uint8 lchip, uint8 chan_id, uint8 en)
{
    SYS_DMA_INIT_CHECK(lchip);
    return MCHIP_DMA(lchip)->dma_function_pause(lchip, chan_id, en);
}

int32
sys_usw_dma_batch(uint8 lchip, void* p_batch)
{
    int32 ret = 0;
    SYS_DMA_INIT_CHECK(lchip);
    if (MCHIP_DMA(lchip)->dma_batch_func)
    {
        ret = MCHIP_DMA(lchip)->dma_batch_func(lchip, p_batch);
    }
    else
    {
        return CTC_E_NOT_SUPPORT;
    }
    return ret;
}

/**
 @brief packet DMA TX
*/
int32
sys_usw_dma_pkt_tx(uint8 lchip, ctc_pkt_tx_t* p_pkt_tx)
{
    return MCHIP_DMA(lchip)->dma_pkt_tx(lchip, p_pkt_tx);
}

/**
 @brief Dma register callback function
*/
int32
sys_usw_dma_register_cb(uint8 lchip, uint8 type, void* cb)
{
    SYS_DMA_INIT_CHECK(lchip);

    if (type >= SYS_DMA_CB_MAX_TYPE)
    {
        return CTC_E_INVALID_PARAM;
    }

     p_usw_dma_master[lchip]->dma_cb[type] = (DMA_CB_FUN_P)cb;

    return CTC_E_NONE;
}

/**
 @brief Dma register callback function
*/
int32
sys_usw_dma_register_rx_cb(uint8 lchip, CTC_PKT_RX_CALLBACK cb)
{
    SYS_DMA_INIT_CHECK(lchip);

    p_usw_dma_master[lchip]->dma_rx_cb = cb;

    return CTC_E_NONE;
}

/*
Notice:this interface is used to wait dmactl to finish dma op and write back desc to memory
Before using this interface must config dmactl already begin process data and not finish
Now only used for hashdump
*/
int32
sys_usw_dma_wait_desc_done(uint8 lchip, uint8 chan_id)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    sys_dma_desc_t* p_base_desc = NULL;
    DsDesc_m* p_desc = NULL;
    uint32 cur_index = 0;
    uint32 desc_done = 0;
    uint32 wait_cnt = 0;

    SYS_DMA_INIT_CHECK(lchip);
    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[chan_id];
    p_base_desc = p_dma_chan->p_desc;
    cur_index = p_dma_chan->current_index;

    p_desc = &p_base_desc[cur_index].desc_info;

    do
    {
        SYS_USW_DMA_CACHE_INVALID(lchip, p_desc, sizeof(DsDesc_m));
        desc_done = DRV_IS_DUET2(lchip)?GetDsDescEncap2(V, done_f, p_desc):GetDsDescEncap(V, done_f, p_desc);
        if (desc_done)
        {
            break;
        }

#ifndef PACKET_TX_USE_SPINLOCK
        sal_task_sleep(1);
#else
        sal_udelay(1000);
#endif
        wait_cnt++;

    }while(wait_cnt < 0x1000);

    if (desc_done)
    {
        return CTC_E_NONE;
    }
    else
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " [DMA] Desc not done \n");
        return CTC_E_DMA;
    }

}

int32
sys_usw_dma_clear_chan_data(uint8 lchip, uint8 chan_id)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    sys_dma_desc_t* p_base_desc = NULL;
    DsDesc_m* p_desc = NULL;
    uint32 cur_index = 0;
    int32 ret = 0;
    uint32 vld_num = 0;
    uint32 cmd = 0;
    uint32 desc_done = 0;
    SYS_DMA_INIT_CHECK(lchip);

    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[chan_id];
    DMA_LOCK(p_dma_chan->p_mutex);

    p_base_desc = p_dma_chan->p_desc;
    cur_index = p_dma_chan->current_index;

    ret = sys_usw_dma_wait_desc_done(lchip, chan_id);
    if (ret == CTC_E_NONE)
    {
        for(;; cur_index++)
        {
            if (cur_index >= p_dma_chan->desc_depth)
            {
                cur_index = 0;
            }

            p_desc = &p_base_desc[cur_index].desc_info;
            SYS_USW_DMA_CACHE_INVALID(lchip, p_desc, sizeof(DsDesc_m));

            desc_done = DRV_IS_DUET2(lchip)?GetDsDescEncap2(V, done_f, p_desc):GetDsDescEncap(V, done_f, p_desc);
            if (desc_done)
            {
                /* clear Desc and return Desc to DmaCtl*/
                SetDsDescEncap2(V, done_f, p_desc, 0);
                SetDsDescEncap2(V, reserved0_f, p_desc, 0);
                #if (1 == SDK_WORK_PLATFORM)
                    SetDsDescEncap2(V, realSize_f, p_desc, 0);
                #endif
                SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(DsDesc_m));

                #if (0 == SDK_WORK_PLATFORM)
                    vld_num = 1;
                #else
                    vld_num = p_dma_chan->desc_depth;
                #endif
                cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
                DRV_IOCTL(lchip, p_dma_chan->channel_id, DRV_CMD_PP_EN(cmd), &vld_num);
            }
            else
            {
                break;
            }
        }

        p_dma_chan->current_index = cur_index;
    }

    DMA_UNLOCK(p_dma_chan->p_mutex);
    return ret;

}

int32
sys_usw_dma_knet_reset_en(uint8 lchip, uint8 reset_en)
{
    SYS_DMA_INIT_CHECK(lchip);
    p_usw_dma_master[lchip]->wb_keep_knet = reset_en?0:1;

    return CTC_E_NONE;
}

int32
sys_usw_dma_set_chan_en(uint8 lchip, uint8 chan_id, uint8 chan_en)
{
    uint32 cmd = 0;
    sys_dma_chan_t* p_chan_info = NULL;
    dal_dma_info_t dma_info;
    DmaStaticInfo_m static_info;
    SYS_DMA_INIT_CHECK(lchip);
    if(chan_id > MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID))
    {
        return CTC_E_INVALID_PARAM;
    }
    sal_memset(&dma_info, 0 ,sizeof(dal_dma_info_t));
    dal_get_dma_info(SYS_MAP_LDEV(lchip), &dma_info);
    if (0 == dma_info.size)
    {
        return CTC_E_NONE;
    }
    p_chan_info = (sys_dma_chan_t*)&p_usw_dma_master[lchip]->dma_chan_info[chan_id];
    if (p_chan_info->pkt_knet_en && p_usw_dma_master[lchip]->wb_keep_knet && !chan_en)
    {
        return CTC_E_NONE;
    }

    cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, chan_id, DRV_CMD_PP_EN(cmd), &static_info));
    SetDmaStaticInfo(V, chanEn_f, &static_info, chan_en);
    cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, chan_id, DRV_CMD_PP_EN(cmd), &static_info));

    p_chan_info->chan_en = chan_en;

    return CTC_E_NONE;

}

int32
sys_usw_dma_get_chan_en(uint8 lchip, uint8 chan_id, uint8* chan_en)
{
    sys_dma_chan_t* p_chan_info = NULL;
    SYS_DMA_INIT_CHECK(lchip);

    *chan_en = 0;
    p_chan_info = (sys_dma_chan_t*)&p_usw_dma_master[lchip]->dma_chan_info[chan_id];
    if (p_chan_info)
    {
        *chan_en  = p_chan_info->chan_en;
    }

    return CTC_E_NONE;
}

int32
sys_usw_dma_get_data_memory(uint8 lchip, uint8 chan_id, uint32 cur_index, uint32 mem_id, uint32 *cfg_size, uint32 **pp_desc_addr, uint32 **pp_logic_addr)
{
    sys_dma_chan_t *p_chan_info = NULL;
    sys_dma_desc_t *p_base_desc = NULL;
    DsDesc_m *p_desc = NULL;
    uint64 phy_addr = 0;
    uint32 cfg_addr = 0;
    uint32 entry_num = 0;
    uint32 per_entry_size = 0;
    uint8 entry_num_per_unit = 0;
    
    SYS_DMA_INIT_CHECK(lchip);

    p_chan_info = (sys_dma_chan_t*)&p_usw_dma_master[lchip]->dma_chan_info[chan_id];
    if (!p_chan_info->chan_en)
    {
        return CTC_E_NOT_INIT;
    }
    p_base_desc = p_chan_info->p_desc;
    p_desc = &(p_base_desc[cur_index].desc_info);

    COMBINE_64BITS_DATA(p_usw_dma_master[lchip]->dma_high_addr, (GetDsDescEncap(V, memAddr_f, p_desc) << 4), phy_addr);

    *pp_logic_addr = SYS_DMA_PHY_TO_LOGIC(lchip, phy_addr);
    *pp_desc_addr = (uint32*)(&p_base_desc[cur_index]);

    CTC_ERROR_RETURN(drv_usw_ftm_get_tcam_memory_info(lchip, mem_id, &cfg_addr, &entry_num,  &per_entry_size, NULL));

    entry_num_per_unit = SYS_DMA_TCAM_SCAN_ENTRY_PER_UNIT;

    *cfg_size = (entry_num / entry_num_per_unit + ((entry_num % entry_num_per_unit) ? 1 : 0)) * SYS_DMA_TCAM_SCAN_BYTE_PER_UNIT;

    return CTC_E_NONE;
}

int32
sys_usw_dma_sync_hash_dump(uint8 lchip, dma_dump_cb_parameter_t* p_pa, uint16* p_entry_num, void* p_data )
{
    SYS_DMA_INIT_CHECK(lchip);
    return MCHIP_DMA(lchip)->dma_sync_hash_dump(lchip, (void*)p_pa, p_entry_num, p_data);
}

int32
sys_usw_dma_set_dump_cb(uint8 lchip, void* cb)
{
    SYS_DMA_INIT_CHECK(lchip);
    if (NULL == p_usw_dma_master[lchip])
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Feature not initialized \n");
		return CTC_E_NOT_INIT;
    }

     p_usw_dma_master[lchip]->dma_dump_cb = (DMA_DUMP_FUN_P)cb;

    return CTC_E_NONE;
}

int32
sys_usw_dma_get_dump_cb(uint8 lchip, void**cb, void** user_data)
{
    SYS_DMA_INIT_CHECK(lchip);
    if (NULL == p_usw_dma_master[lchip])
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Feature not initialized \n");
        return CTC_E_NOT_INIT;
    }
    *cb = p_usw_dma_master[lchip]->dma_dump_cb;
    return CTC_E_NONE;
}


int32
sys_usw_dma_get_packet_rx_chan(uint8 lchip, uint16* p_num)
{
    SYS_DMA_INIT_CHECK(lchip);
    *p_num = p_usw_dma_master[lchip]->packet_rx_chan_num;
    return CTC_E_NONE;
}

int32
sys_usw_dma_get_dma_memory(uint8 lchip, uint8 type, uint32** p_mem)
{
    SYS_DMA_INIT_CHECK(lchip);
    *p_mem = p_usw_dma_master[lchip]->p_nonuc_bmask_dma;

    return CTC_E_NONE;
}

int32
sys_usw_dma_get_hw_learning_sync(uint8 lchip, uint8* b_sync)
{
    SYS_DMA_INIT_CHECK(lchip);
    *b_sync = p_usw_dma_master[lchip]->hw_learning_sync;
    return CTC_E_NONE;
}

int32
sys_usw_dma_set_hw_learning_sync(uint8 lchip, uint8 b_sync)
{
    SYS_DMA_INIT_CHECK(lchip);
    p_usw_dma_master[lchip]->hw_learning_sync = b_sync;
    return CTC_E_NONE;
}

int32
sys_usw_dma_get_flow_stats_memory(uint8 lchip, uint8 block_id, uint32** p_mem, uint8* p_desc_done)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    DsDesc_m* p_desc = NULL;
    uint32 cur_index = 0;
    uint16 loop_pp_dsc = 0;
    uint16 loop_pp_dep = 0;
    uint64 phy_addr = 0;
#ifdef EMULATION_ENV
    uint8 pp_num = 2;
#else
    uint8 pp_num = SYS_PP_NUM(lchip);
#endif
    uint8 pp_base = SYS_PP_BASE(lchip);

#ifndef CTC_HOT_PLUG_DIS
    SYS_DMA_INIT_CHECK(lchip);
#endif

    p_dma_chan = &p_usw_dma_master[pp_base]->dma_chan_info[SYS_DMA_FLOW_STATS_CHAN_ID];
    if (p_dma_chan->chan_en == 0)
    {
        *p_mem = NULL;
        return CTC_E_NONE;
    }

    if (DRV_IS_DUET2(lchip))
    {
        return sys_duet2_dma_get_flow_stats_memory(lchip, block_id, p_mem, p_desc_done);
    }
    
    for(loop_pp_dsc=0; loop_pp_dsc < MCHIP_CAP(SYS_CAP_STATS_DMA_PP_BLOCK_NUM); loop_pp_dsc++)
    {
        if (block_id != (p_dma_chan->p_desc_info[loop_pp_dsc].value0 & 0xff))
        {
            continue;
        }
        for (loop_pp_dep = 0; loop_pp_dep < pp_num*SYS_DMA_FLOW_STATS_DESC_DEPTH; loop_pp_dep++)
        {
            p_desc = &(p_dma_chan->p_desc[MCHIP_CAP(SYS_CAP_STATS_DMA_PP_BLOCK_NUM)*loop_pp_dep+loop_pp_dsc].desc_info);
            COMBINE_64BITS_DATA(p_usw_dma_master[pp_base]->dma_high_addr,(GetDsDescEncap(V, memAddr_f, p_desc)<<4), phy_addr);
            p_mem[cur_index] = SYS_DMA_PHY_TO_LOGIC(lchip, phy_addr);
            if (p_desc_done)
            {
                p_desc_done[cur_index] = GetDsDescEncap(V, done_f, p_desc);
            }
            cur_index++;
        }
    }

    return CTC_E_NONE;
}

/*mode: 0 scan once, 1 continues scan, 2 stop scan
uinit is minute*/
int32
sys_usw_dma_set_tcam_scan_mode(uint8 lchip, uint8 mode, uint32 timer)
{
    SYS_DMA_INIT_CHECK(lchip);

    return MCHIP_DMA(lchip)->dma_set_tcam_scan_mode(lchip, mode, timer);
}
/*timer unit is ms, 0 means disable*/
int32
sys_usw_dma_set_mac_stats_timer(uint8 lchip, uint32 timer)
{
    uint32 cmd = 0;
    uint64 timer_ns = 0;
    uint32 timer_v[2] = {0};
    DmaRegRd1TrigCfg_m trigger1_timer;
    DmaRegTrigEnCfg_m trigger_ctl;

    SYS_DMA_INIT_CHECK(lchip);

    if (timer)
    {
        timer_ns = (uint64)timer*1000000/DOWN_FRE_RATE;
        timer_v[0] = timer_ns&0xFFFFFFFF;
        timer_v[1] = (timer_ns >> 32) & 0xFFFFFFFF;
        cmd = DRV_IOR(DmaRegRd1TrigCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger1_timer));
        SetDmaRegRd1TrigCfg(A, cfgRegRd1TrigNs_f, &trigger1_timer, timer_v);
        cmd = DRV_IOW(DmaRegRd1TrigCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger1_timer));
    }
    if (DRV_FROM_AT(lchip))
    {
        DmaRegRd1Ctl_m reg_rd_ctl;
        cmd = DRV_IOR(DmaRegRd1Ctl_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
        SetDmaRegRd1Ctl(V, cfgRegRdTrigEn_f, &reg_rd_ctl, timer? 1 : 0);/*set by port stats module*/
        cmd = DRV_IOW(DmaRegRd1Ctl_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    }
    else
    {
        cmd = DRV_IOR(DmaRegTrigEnCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger_ctl));
        SetDmaRegTrigEnCfg(V, cfgRegRd1TrigEn_f, &trigger_ctl, (timer?1:0));    /*set by stats module*/
        cmd = DRV_IOW(DmaRegTrigEnCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger_ctl));
    }

    return DRV_E_NONE;
}

int32
sys_usw_dma_set_npm_stats_enable(uint8 lchip, bool enable)
{
    uint32 cmd = 0;
    DmaRegTrigEnCfg_m trigger_ctl;

    SYS_DMA_INIT_CHECK(lchip);

    cmd = DRV_IOR(DmaRegTrigEnCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger_ctl));
    SetDmaRegTrigEnCfg(V, cfgRegRd5TrigEn_f, &trigger_ctl, (enable? 1:0));    /*set by npm module*/
    cmd = DRV_IOW(DmaRegTrigEnCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger_ctl));

    return DRV_E_NONE;
}

/*timer uinit is s, 0 means disable*/
int32
sys_usw_dma_set_pkt_timer(uint8 lchip, uint32 timer, uint8 enable)
{
    SYS_DMA_INIT_CHECK(lchip);

    return MCHIP_DMA(lchip)->dma_set_pkt_timer(lchip, timer, enable);
}

int32
sys_usw_dma_set_session_pkt(uint8 lchip, uint16 session_id, ctc_pkt_tx_t* p_pkt)
{
    SYS_DMA_INIT_CHECK(lchip);

    return MCHIP_DMA(lchip)->dma_set_session_pkt(lchip, session_id, p_pkt);
}

int32
sys_usw_dma_set_packet_timer_cfg(uint8 lchip, uint16 max_session, uint16 interval, uint16 pkt_len, uint8 is_destroy)
{
    SYS_DMA_INIT_CHECK(lchip);

    if (!p_usw_dma_master[lchip]->pkt_tx_timer_en)
    {
        return CTC_E_INVALID_CONFIG;
    }
    return MCHIP_DMA(lchip)->dma_set_packet_timer_cfg(lchip, max_session, interval, pkt_len, is_destroy);
}

int32
sys_usw_dma_performance_test(uint8 lchip, ctc_pkt_tx_t* p_pkt_tx)
{
    int32    ret = CTC_E_NONE;
    uint32   cmd = 0;
    uint32   value = 1;
    uint32   step = 0;
    uint32   tbl_id = 0;
    DmaCtlTab_m       ctl_tab;
    DmaTsDebugStats_m debug_stats;
    DmaTestModeLog_m  test_log;
    DmaPktTx0Ctl_m pkt_tx_ctl;
    DmaPktTx0Ctl_m pkt_tx_ctl_w;
    DmaPktTxCrcCfg_m tx_crc;
    DmaPktTxCrcCfg_m tx_crc_w;
    DmaRegRd0Ctl_m reg_rd_ctl[6];
    DmaRegRd0Ctl_m reg_rd_ctl_w;
    DmaRegTrigEnCfg_m reg_trig;
    DmaRegTrigEnCfg_m reg_trig_w;
    DmaCtlDrainEnable_m dma_drain;
    DmaCtlDrainEnable_m dma_drain_w;
    void* addr = NULL;
    uint32 start_time[2] = {0};
    uint32 end_time[2] = {0};
    uint32 value_array[2] = {0};
    uint64 byte_count = 0;
    uint64 pkt_count = 0;
    uint64 start_time_v = 0;
    uint64 end_time_v = 0;
    uint64 phy_addr;
    sys_dma_chan_t* p_dma_chan = NULL;
    uint8  chan_id = 0;

#if(1 == SDK_WORK_PLATFORM)
    return CTC_E_NONE;
#endif
    cmd = DRV_IOR(DmaCtlTab_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_PACKET_TX0_CHAN_ID, DRV_CMD_PP_EN(cmd), &ctl_tab));
    if(GetDmaCtlTab(V, vldNum_f, &ctl_tab))
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, "Dma tx is working, can not do performance test\n");
        return CTC_E_HW_BUSY;
    }
    p_dma_chan = &(p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX0_CHAN_ID]);
    DMA_LOCK(p_dma_chan->p_mutex);

    /* disable chan first */
    value = 0;
    cmd  = DRV_IOW(DmaStaticInfo_t, DmaStaticInfo_chanEn_f);
    CTC_ERROR_GOTO(DRV_FIELD_IOCTL(lchip, SYS_DMA_PACKET_TX0_CHAN_ID, DRV_CMD_PP_EN(cmd), &value), ret, error_end);
    if (DRV_FROM_AT(lchip))
    {
        cmd = DRV_IOR(DmaPktTx0Ctl_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &pkt_tx_ctl);
        sal_memcpy(&pkt_tx_ctl_w, &pkt_tx_ctl, sizeof(pkt_tx_ctl_w));
        SetDmaPktTx0Ctl(V, cfgDescValidChkEn_f, &pkt_tx_ctl_w, 0);
        SetDmaPktTx0Ctl(V, cfgPktTxCrcPadEn_f, &pkt_tx_ctl_w, 0);
        cmd = DRV_IOW(DmaPktTx0Ctl_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &pkt_tx_ctl_w);
        
        step = DmaRegRd1Ctl_t - DmaRegRd0Ctl_t;
        for (tbl_id = DmaRegRd0Ctl_t; tbl_id <= DmaRegRd5Ctl_t; tbl_id += step)
        {
            cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
            DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl[(tbl_id - DmaRegRd0Ctl_t)/step]); 
            sal_memcpy(&reg_rd_ctl_w, &reg_rd_ctl[(tbl_id - DmaRegRd0Ctl_t)/step], sizeof(reg_rd_ctl_w));
            SetDmaRegRd0Ctl(V, cfgRegRdTrigEn_f, &reg_rd_ctl_w, 0);
            cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
            DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl_w);
        }
    }
    else
    {
        cmd = DRV_IOR(DmaPktTxCrcCfg_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &tx_crc);
        sal_memcpy(&tx_crc_w, &tx_crc, sizeof(tx_crc_w));
        SetDmaPktTxCrcCfg(V, cfgPktTxCrcPadEn_f, &tx_crc_w, 0);
        cmd = DRV_IOW(DmaPktTxCrcCfg_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &tx_crc_w);
        
        cmd = DRV_IOR(DmaRegTrigEnCfg_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &reg_trig);
        sal_memset(&reg_trig_w, 0, sizeof(reg_trig_w));
        cmd = DRV_IOW(DmaRegTrigEnCfg_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &reg_trig_w);
    }

    cmd = DRV_IOR(DmaCtlDrainEnable_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &dma_drain);
    sal_memset(&dma_drain_w, 0, sizeof(dma_drain_w));
    SetDmaCtlDrainEnable(V,dmaPktTxDrainEn_f, &dma_drain_w, 1);
    cmd = DRV_IOW(DmaCtlDrainEnable_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &dma_drain_w);

    chan_id = SYS_DMA_PACKET_TX0_CHAN_ID;

    CTC_ERROR_GOTO(_sys_usw_dma_reset_channel(lchip, chan_id), ret, error_end);
    value = p_dma_chan->desc_depth;
    cmd  = DRV_IOW(DmaStaticInfo_t, DmaStaticInfo_ringDepth_f);
    CTC_ERROR_GOTO(DRV_FIELD_IOCTL(lchip, chan_id, DRV_CMD_PP_EN(cmd), &value), ret, error_end);
    CTC_ERROR_GOTO(sys_usw_dma_clear_pkt_stats(lchip, 0), ret, error_end);

    value = 0;
    cmd = DRV_IOW(DmaPktStatsCfg_t, DmaPktStatsCfg_clearOnRead_f);
    CTC_ERROR_GOTO(DRV_FIELD_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &value), ret, error_end);

    if(g_dal_op.dma_alloc)
    {
        addr = (void*)SYS_DMA_ALLOC(lchip, p_pkt_tx->skb.len + SYS_USW_PKT_HEADER_LEN, 0);
        if (NULL == addr)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            goto error_end;
        }
    }
    else
    {
        goto error_end;
    }
    sal_memcpy((uint8*)addr, p_pkt_tx->skb.head, p_pkt_tx->skb.len + SYS_USW_PKT_HEADER_LEN);
    phy_addr = SYS_DMA_LOGIC_TO_PHY(lchip, addr);

    for (step = 0; step < p_dma_chan->desc_depth; step++)
    {
        SetDsDescEncap(V, u1_pkt_eop_f, &(p_dma_chan->p_desc[step].desc_info), 1);
        SetDsDescEncap(V, u1_pkt_sop_f, &(p_dma_chan->p_desc[step].desc_info), 1);
        SetDsDescEncap(V, cfgSize_f, &(p_dma_chan->p_desc[step].desc_info), p_pkt_tx->skb.len+SYS_USW_PKT_HEADER_LEN);
        SetDsDescEncap(V, memAddr_f, &(p_dma_chan->p_desc[step].desc_info), (phy_addr >> 4));
        SetDsDescEncap(V, done_f, &(p_dma_chan->p_desc[step].desc_info), 0);
        SetDsDescEncap(V, realSize_f, &(p_dma_chan->p_desc[step].desc_info), p_pkt_tx->skb.len);
#ifdef ARCTIC
        SetDsDescEncap(V,  highAddr_f, &(p_dma_chan->p_desc[step].desc_info), p_usw_dma_master[lchip]->dma_high_addr); /*from arctic*/
        SetDsDescEncap(V, u0_pktTx_destId_f, &(p_dma_chan->p_desc[step].desc_info), (p_pkt_tx->lchip - lchip) & 0x3); /*from arctic*/
#endif   
    }

    value = p_dma_chan->desc_depth;
    cmd = DRV_IOW(DmaCtlTab_t, DmaCtlTab_vldNum_f);
    CTC_ERROR_GOTO(DRV_FIELD_IOCTL(lchip, chan_id, DRV_CMD_PP_EN(cmd), &value), ret, error_end);

    value = 1;
    cmd = DRV_IOW(DmaMiscCfg_t, DmaMiscCfg_cfgDmaPktTestEn_f);
    CTC_ERROR_GOTO(DRV_FIELD_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &value), ret, error_end);

    /* enable chan */
    value = 1;
    cmd  = DRV_IOW(DmaStaticInfo_t, DmaStaticInfo_chanEn_f);
    CTC_ERROR_GOTO(DRV_FIELD_IOCTL(lchip, chan_id, DRV_CMD_PP_EN(cmd), &value), ret, error_end);

    sal_task_sleep(2*60*1000);

    cmd = DRV_IOR(DmaTsDebugStats_t, DRV_ENTRY_FLAG);
    CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &debug_stats), ret, error_end);

    cmd = DRV_IOR(DmaTestModeLog_t, DRV_ENTRY_FLAG);
    CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &test_log), ret, error_end);

    GetDmaTsDebugStats(A, dmaTsLogChan0_f, &debug_stats, start_time);
    GetDmaTsDebugStats(A, dmaTsLogChan1_f, &debug_stats, end_time);
    GetDmaTsDebugStats(A, dmaTsLogChan3_f, &debug_stats, value_array);
    byte_count = value_array[1]&0x7FF;
    byte_count = (byte_count<< 32)|value_array[0];
    pkt_count = GetDmaTestModeLog(V, dmaPktTxTestPktCnt_f, &test_log);
    pkt_count = (pkt_count<<20)|((value_array[1]) >> 12);
    start_time_v = start_time[1];
    start_time_v = start_time_v << 32 | start_time[0];
    end_time_v = end_time[1];
    end_time_v = end_time_v << 32 | end_time[0];

    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "%-36s\n", "Pcie Performance Test");
    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "%-36s\n", "====================================");
    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "%-16s: %-20"PRIu64"\n", "packet count", pkt_count-256);
    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "%-16s: %-20"PRIu64"\n", "packet byte(kbyte)", (byte_count-256*(p_pkt_tx->skb.len+SYS_USW_PKT_HEADER_LEN))/1024);
    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "%-16s: %-20"PRIu64"\n", "used   time(s)", ((end_time_v-start_time_v)/1000/1000/1000));
    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "%-16s: %-20"PRIu64"\n", "pps", (pkt_count-256)/((end_time_v-start_time_v)/1000/1000/1000));
    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "%-16s: %-20"PRIu64"\n", "bps(Mbits/s)", (byte_count-256*(p_pkt_tx->skb.len+SYS_USW_PKT_HEADER_LEN))*8/((end_time_v-start_time_v)/1000/1000/1000)/1024/1024);
    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "%-36s\n", "====================================");
error_end:
    value = 0;
    cmd = DRV_IOW(DmaMiscCfg_t, DmaMiscCfg_cfgDmaPktTestEn_f);
    DRV_FIELD_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &value);

    sal_task_sleep(1000);

    value = 1;
    cmd = DRV_IOW(DmaPktStatsCfg_t, DmaPktStatsCfg_clearOnRead_f);
    DRV_FIELD_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &value);
    sys_usw_dma_clear_pkt_stats(lchip, 0);

    _sys_usw_dma_reset_channel(lchip, chan_id);

    value = p_dma_chan->desc_depth;
    cmd  = DRV_IOW(DmaStaticInfo_t, DmaStaticInfo_ringDepth_f);
    DRV_FIELD_IOCTL(lchip, chan_id, DRV_CMD_PP_EN(cmd), &value);
    if(addr && g_dal_op.dma_free)
    {
        SYS_DMA_FREE(lchip, (void*)addr);
    }

    value = 1;
    cmd = DRV_IOW(DmaPktTx0Ctl_t, DmaPktTx0Ctl_cfgPktTxCrcPadEn_f);
    CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, cmd, &value), ret, error_end);

    p_dma_chan->current_index = 0;
    cmd = DRV_IOW(DmaCtlDrainEnable_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &dma_drain);

    if (DRV_FROM_AT(lchip))
    {
        cmd = DRV_IOW(DmaPktTx0Ctl_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &pkt_tx_ctl);
        
        step = DmaRegRd1Ctl_t - DmaRegRd0Ctl_t;
        for (tbl_id = DmaRegRd0Ctl_t; tbl_id <= DmaRegRd5Ctl_t; tbl_id += step)
        {
            cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
            DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl[(tbl_id - DmaRegRd0Ctl_t)/step]);
        }
    }
    else
    {
        cmd = DRV_IOW(DmaPktTxCrcCfg_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &tx_crc);
        
        cmd = DRV_IOW(DmaRegTrigEnCfg_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &reg_trig);
    }

    DMA_UNLOCK(p_dma_chan->p_mutex);
    return ret;
}

STATIC int32
_sys_usw_dma_export_dest_map(uint8 lchip, uint8 type, uint8 dest_type, uint8* val)
{

    *val = 0;

    if (type <= SYS_DMA_REPORT_TYPE_IPFIX_EGS_END)
    {
        *val = (dest_type == SYS_DMA_EXP_DEST_TYPE_DMA_EXP) ? 0 : 2;
    }
    else if ((type >= SYS_DMA_REPORT_TYPE_IRM0)
        && (type <= SYS_DMA_REPORT_TYPE_ERM_EVENT))
    {
        *val = (dest_type == SYS_DMA_EXP_DEST_TYPE_DMA_EXP) ? 1 : 3;
    }
    else if ((type >= SYS_DMA_REPORT_TYPE_LATENCY_STATS)
        && (type <= SYS_DMA_REPORT_TYPE_LATENCY_EVENT))
    {
        *val = (dest_type == SYS_DMA_EXP_DEST_TYPE_DMA_EXP) ? 1 : 4;
    }
    else if ((type >= SYS_DMA_REPORT_TYPE_EFD)
        && (type <= SYS_DMA_REPORT_TYPE_DLB))
    {
        *val = (dest_type == SYS_DMA_EXP_DEST_TYPE_DMA_EXP) ? 2 : 5;
    }
    else if (type == SYS_DMA_REPORT_TYPE_OAM)
    {
        *val = (dest_type == SYS_DMA_EXP_DEST_TYPE_DMA_EXP) ? 2 : 6;
    }
    else if ((type >= SYS_DMA_REPORT_TYPE_SPNOAM_LOCAL)
        && (type <= SYS_DMA_REPORT_TYPE_SPNOAM_MASTER))
    {
        if (dest_type == SYS_DMA_EXP_DEST_TYPE_DMA_EXP)
        {
            *val = 2;
        }
        else if (dest_type == SYS_DMA_EXP_DEST_TYPE_EUNIT)
        {
            *val = 0;
        }
        else
        {
            *val = 7;
        }
    }
    else if ((type >= SYS_DMA_REPORT_TYPE_LEARNING)
        && (type <= SYS_DMA_REPORT_TYPE_FDB_DUMP))
    {
        *val = (dest_type == SYS_DMA_EXP_DEST_TYPE_DMA_EXP) ? 3 : 0;
    }
    else
    {
        *val = (dest_type == SYS_DMA_EXP_DEST_TYPE_DMA_EXP) ? 3 : 0;
    }
    return CTC_E_NONE;
}

int32
sys_usw_dma_export_map(uint8 lchip, sys_usw_dma_export_map_t* p_map)
{
    uint32 cmd = 0;
    uint8 val = 0;
    MapMsgDestMap_m msg_map;

    SYS_DMA_INIT_CHECK(lchip);

    if (p_map->type >= SYS_DMA_REPORT_TYPE_MAX)
    {
        return CTC_E_INVALID_PARAM;
    }

    sal_memset(&msg_map, 0, sizeof(MapMsgDestMap_m));

    if (CTC_IS_BIT_SET(p_map->dest_bmp, SYS_DMA_EXP_DEST_TYPE_CPU))
    {
        /*Host CPU*/
        _sys_usw_dma_export_dest_map(lchip, p_map->type, SYS_DMA_EXP_DEST_TYPE_CPU, &val);
        if (DRV_FROM_AT(lchip))
        {
            SetMapMsgDestMap(V, hcpuEn_f, &msg_map, 1);
            SetMapMsgDestMap(V, hcpuChan_f, &msg_map, val);
        }
        else
        {
            SetMapMsgDestMap(V, dmaChan_f, &msg_map, val);
        }
        p_map->ring_id = val;
    }
    if (CTC_IS_BIT_SET(p_map->dest_bmp, SYS_DMA_EXP_DEST_TYPE_EUNIT))
    {
        /*ECPU*/
        _sys_usw_dma_export_dest_map(lchip, p_map->type, SYS_DMA_EXP_DEST_TYPE_EUNIT, &val);
        if (DRV_FROM_AT(lchip))
        {
            SetMapMsgDestMap(V, ecpu0Chan_f, &msg_map, val);
            SetMapMsgDestMap(V, ecpu0En_f, &msg_map, 1);
        }
        else
        {
            SetMapMsgDestMap(V, ecpuChan_f, &msg_map, val);
        }
        p_map->eunit_ring_id = val;
    }
    if (CTC_IS_BIT_SET(p_map->dest_bmp, SYS_DMA_EXP_DEST_TYPE_EUNIT1))
    {
        if (!DRV_FROM_AT(lchip))
        {
            return CTC_E_INVALID_CONFIG;
        }

        /*ECPU1*/
        _sys_usw_dma_export_dest_map(lchip, p_map->type, SYS_DMA_EXP_DEST_TYPE_EUNIT1, &val);
        SetMapMsgDestMap(V, ecpu1Chan_f, &msg_map, val);
        SetMapMsgDestMap(V, ecpu1En_f, &msg_map, 1);
        p_map->eunit1_ring_id = val;
    }
    if (CTC_IS_BIT_SET(p_map->dest_bmp, SYS_DMA_EXP_DEST_TYPE_DMA_EXP))
    {
        /*Encap*/
        _sys_usw_dma_export_dest_map(lchip, p_map->type, SYS_DMA_EXP_DEST_TYPE_DMA_EXP, &val);
        SetMapMsgDestMap(V, encapChan_f, &msg_map, val);
        SetMapMsgDestMap(V, encapEn_f, &msg_map, 1);
        p_map->fifo_id = val;
    }
    SetMapMsgDestMap(V, mapBmp_f, &msg_map, p_map->dest_bmp);
    cmd = DRV_IOW(MapMsgDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_map->type, cmd, &msg_map));

    return CTC_E_NONE;
}

STATIC int32
_sys_usw_dma_encode_pkt_hdr(uint8 lchip, uint32 nhid, uint32* p_raw_hdr_net)
{
    uint32* p_raw_hdr              = p_raw_hdr_net;
    uint8 gchip = 0;
    uint32 src_port = 0;
    sys_nh_info_dsnh_t nh_info;

    sal_memset(p_raw_hdr, 0, SYS_USW_PKT_HEADER_LEN);
    sal_memset(&nh_info, 0, sizeof(sys_nh_info_dsnh_t));
    CTC_ERROR_RETURN(sys_usw_nh_get_nhinfo(lchip, nhid, &nh_info, 0));

    if ((DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip)))
    {
        p_raw_hdr = p_raw_hdr_net + SYS_USW_PKT_HDR_TAIL_LEN / sizeof(uint32);
    }
    sys_usw_get_gchip_id(lchip, &gchip);
    src_port = CTC_MAP_LPORT_TO_GPORT(gchip, SYS_RSV_PORT_OAM_CPU_ID);
    src_port = SYS_MAP_CTC_GPORT_TO_DRV_GPORT(src_port);

    if (DRV_FROM_AT(lchip))
    {
        /* Must be inited */
        SetMsPacketHeader(V, data0_packetType_f, p_raw_hdr, 0);
        /* Support offload data tunnel cryption in hw */
        SetMsPacketHeader(V, data0_fromCpuOrOam_f, p_raw_hdr, 1);
        SetMsPacketHeader(V, data0_ttl_f, p_raw_hdr, 64);
        SetMsPacketHeader(V, data0_macKnown_f, p_raw_hdr, 1);
        SetMsPacketHeader(V, data0_fromCpu_f, p_raw_hdr, 1);
        SetMsPacketHeader(V, data0_nextHopExt_f, p_raw_hdr, nh_info.nexthop_ext);
        SetMsPacketHeader(V, data0_bypassIngressEdit_f, p_raw_hdr, nh_info.bypass_igr_edit);
        SetMsPacketHeader(V, data0_nextHopPtr_f, p_raw_hdr, nh_info.dsnh_offset);
        /* SrcPort */
        SetMsPacketHeader(V, data0_sourcePort_f, p_raw_hdr, src_port);
        SetMsPacketHeader(V, data0_bypassIpe_f, p_raw_hdr, 1);
        SetMsPacketHeader(V, data0_color_f, p_raw_hdr, 3); /*CTC_QOS_COLOR_GREEN*/
        SetMsPacketHeader(V, data0_prio_f, p_raw_hdr, 15);
        SetMsPacketHeader(V, data0_destMap_f, p_raw_hdr, nh_info.dest_map);
    }
    else
    {
        /* Must be inited */
        SetMsPacketHeader(V, packetType_f, p_raw_hdr, 0);
        /* Support offload data tunnel cryption in hw */
        SetMsPacketHeader(V, fromCpuOrOam_f, p_raw_hdr, 1);
        SetMsPacketHeader(V, ttl_f, p_raw_hdr, 64);
        SetMsPacketHeader(V, macKnown_f, p_raw_hdr, 1);
        SetMsPacketHeader(V, u3_other_fromCpu_f, p_raw_hdr, 1);
        SetMsPacketHeader(V, nextHopExt_f, p_raw_hdr, nh_info.nexthop_ext);
        SetMsPacketHeader(V, bypassIngressEdit_f, p_raw_hdr, nh_info.bypass_igr_edit);
        SetMsPacketHeader(V, nextHopPtr_f, p_raw_hdr, nh_info.dsnh_offset);
        /* SrcPort */
        SetMsPacketHeader(V, sourcePort_f, p_raw_hdr, src_port);
        SetMsPacketHeader(V, bypassIpe_f, p_raw_hdr, 1);
        SetMsPacketHeader(V, color_f, p_raw_hdr, 3); /*CTC_QOS_COLOR_GREEN*/
        SetMsPacketHeader(V, prio_f, p_raw_hdr, 15);
        SetMsPacketHeader(V, destMap_f, p_raw_hdr, nh_info.dest_map);
    }

    return CTC_E_NONE;
}

int32
sys_usw_dma_export_pkt_cfg(uint8 lchip, sys_usw_dma_export_pkt_cfg_t* p_cfg)
{
    uint32 pkt_header[64/4] = {0}; /*hdr+tail*/
    uint32 cmd = 0;
    uint32 metadata[3];
    EncapPktCfg_m pkt_cfg;
    EncapMeterCfg_m meter_cfg;
    EncapEndTlvCfg_m end_tlv;
    EncapMiscCfg_m encap_misc_cfg;

    SYS_DMA_INIT_CHECK(lchip);
    if (p_cfg->fifo_id > 3 || p_cfg->interval >> 40)
    {
        return CTC_E_INVALID_PARAM;
    }

    sal_memset(&pkt_cfg, 0, sizeof(EncapPktCfg_m));

    CTC_ERROR_RETURN(_sys_usw_dma_encode_pkt_hdr(lchip, p_cfg->nhid, pkt_header));
    metadata[0] = p_cfg->metadata[2];
    metadata[1] = p_cfg->metadata[1];
    metadata[2] = p_cfg->metadata[0];
    if (DRV_FROM_AT(lchip))
    {
        EncapPktHdr_m pkt_hdr;
        EncapMetaCfg_m meta_cfg;
        SetEncapPktHdr(A, pktHdr_f, &pkt_hdr, pkt_header);
        cmd = DRV_IOW(EncapPktHdr_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_cfg->fifo_id, cmd, &pkt_hdr));
        SetEncapMetaCfg(A, metaData_f, &meta_cfg, metadata);
        cmd = DRV_IOW(EncapMetaCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_cfg->fifo_id, cmd, &meta_cfg));
    }
    else
    {
        SetEncapPktCfg(A, pktHdr_f, &pkt_cfg, pkt_header);
        SetEncapPktCfg(A, metaData_f, &pkt_cfg, metadata);
        cmd = DRV_IOW(EncapPktCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_cfg->fifo_id, cmd, &pkt_cfg));
    }

    SetEncapEndTlvCfg(V, endTlv_f, &end_tlv, p_cfg->end_tlv);
    cmd = DRV_IOW(EncapEndTlvCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_cfg->fifo_id, cmd, &end_tlv));
    cmd = DRV_IOR(EncapMiscCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &encap_misc_cfg));

    /*interval config*/
    if (p_cfg->interval)
    {
        uint32 interval[2] = {0};
        uint32 value = 0;
        interval[0] = p_cfg->interval & 0xffffffff;
        interval[1] = (p_cfg->interval >> 32) & 0xff;

        cmd = DRV_IOR(EncapMeterCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &meter_cfg));
        DRV_SET_FIELD_A(lchip, EncapMeterCfg_t, EncapMeterCfg_encapMeter_0_timer_f + p_cfg->fifo_id, &meter_cfg, interval);
        cmd = DRV_IOW(EncapMeterCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &meter_cfg));

        value = GetEncapMiscCfg(V, cfgEncapMeterTimerEn_f, &encap_misc_cfg);
        CTC_BIT_SET(value, p_cfg->fifo_id);
        SetEncapMiscCfg(V, cfgEncapMeterTimerEn_f, &encap_misc_cfg, value);
    }
    cmd = DRV_IOW(EncapMiscCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &encap_misc_cfg));
    return CTC_E_NONE;
}

#define _DMA_ISR_BEGIN

int32
_sys_usw_dma_wr_func(uint8 lchip)
{
    uint32 cmd = 0;
    DmaCtlTab_m tab_ctl;
    uint32 valid_num = 0;
    uint32 session_num = 0;

#ifndef CTC_HOT_PLUG_DIS
    /* init check */
    SYS_DMA_INIT_CHECK(lchip);
#endif

    session_num = p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].desc_depth;

    cmd = DRV_IOR(DmaCtlTab_t, DRV_ENTRY_FLAG);
    (DRV_IOCTL(lchip, SYS_DMA_TBL_WR_CHAN_ID, cmd, &tab_ctl));
    valid_num = GetDmaCtlTab(V, vldNum_f, &tab_ctl);
    if (session_num > valid_num)
    {
        SetDmaCtlTab(V, vldNum_f, &tab_ctl, (session_num-valid_num));
        cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
        (DRV_IOCTL(lchip, SYS_DMA_TBL_WR_CHAN_ID, cmd, &tab_ctl));
    }
    return CTC_E_NONE;
}

/**
@brief DMA packet rx thread for packet rx channel
*/
STATIC void
_sys_usw_dma_pkt_rx_thread(void* param)
{
    sys_dma_thread_t* p_thread_info = (sys_dma_thread_t*)param;
    int32 ret = 0;
    uint8 index = 0;
    uint8 lchip = p_thread_info->lchip;
    uint8 act_chan = 0;

    while (1)
    {
        ret = sal_sem_take(p_thread_info->p_sync_sem, SAL_SEM_FOREVER);
        if (0 != ret)
        {
            continue;
        }
#ifndef CTC_HOT_PLUG_DIS
        SYS_LCHIP_CHECK_ACTIVE_IN_THREAD(lchip);
        ret = sys_usw_chip_get_reset_hw_en(lchip);
        if (ret > 0)
        {
            /*in reset hw recover*/
            return;
        }
#endif
        /* when rx chans use one thread, get chan_id from fifo */
        if (p_thread_info->chan_num > 1)
        {
            sal_fifo_get(p_usw_dma_master[lchip]->pkt_thread_fifo, (unsigned char*)&act_chan, sizeof(uint8));
            MCHIP_DMA(lchip)->dma_pkt_rx(lchip, act_chan, p_thread_info);
        }
        else
        {
            act_chan = p_thread_info->chan_id[index];
            MCHIP_DMA(lchip)->dma_pkt_rx(lchip, act_chan, p_thread_info);
        }
    }

    return;
}

STATIC int32
_sys_usw_dma_tcam_scan_write_valid_num(uint8 lchip)
{
    uint32 tbl_id;
    uint32 cmd = 0;
    uint32 value = DRV_ENUM(DRV_DMA_TCAM_SCAN_DESC_NUM) + 1;
    uint32 loop = 0;

    /* step1 take tcam lock */
    drv_tcam_write_lock(lchip);
    /* step2 write valid num */
    tbl_id = DmaCtlTab_t;
    cmd = DRV_IOW(tbl_id, DmaCtlTab_vldNum_f);
    DRV_FIELD_IOCTL(lchip, SYS_DMA_TCAM_SCAN_CHAN_ID, DRV_CMD_PP_EN(cmd), &value);

    /* step3 check dma done */
    cmd = DRV_IOR(tbl_id, DmaCtlTab_vldNum_f);
#ifdef EMULATION_ENV
    for (loop = 0; loop < 2000000; loop++)
#else
    for (loop = 0; loop < 20000; loop++)
#endif
    {
        DRV_FIELD_IOCTL(lchip, SYS_DMA_TCAM_SCAN_CHAN_ID, DRV_CMD_PP_EN(cmd), &value);
        if (value == 1)
        {
            break;
        }
        sal_task_sleep(1);
    }

    /* step4 release tcam lock */
    drv_tcam_write_unlock(lchip);

    if (value != 1)
    {
        return CTC_E_HW_BUSY;
    }

    return CTC_E_NONE;
}

/*TM.MX Tcam Scan*/
STATIC void
_sys_usw_dma_write_valid_num_thread(void* param)
{
    int32 ret = 0;
    uint8 act_chan = 0;
    sys_dma_thread_t* p_thread_info = (sys_dma_thread_t*)param;
    uint8 lchip = p_thread_info->lchip;
    uint32 tbl_id = DRV_FROM_AT(lchip) ? DmaCtlIntrFunc0_t : DmaCtlIntrFunc_t;
    uint32 mask[SYS_DMA_INTR_VEC] = {0};

    while (1)
    {
        ret = sal_sem_take(p_thread_info->p_sync_sem, SAL_SEM_FOREVER);
        if (0 != ret)
        {
            continue;
        }
#ifndef CTC_HOT_PLUG_DIS
        SYS_LCHIP_CHECK_ACTIVE_IN_THREAD(lchip);
        ret = sys_usw_chip_get_reset_hw_en(lchip);
        if (ret > 0)
        {
            /*in reset hw recover*/
            return;
        }
#endif
        act_chan = p_thread_info->chan_id[0];

        ret = _sys_usw_dma_tcam_scan_write_valid_num(lchip);
        if (ret)
        {
            return;
        }
        /* release mask channel isr */
        SYS_USW_DMA_INTR_ENABLE(lchip, tbl_id, mask, act_chan);

    }

    return;
}

STATIC void
_sys_usw_dma_tcam_scan_thread(void* param)
{
    int32 ret = 0;
    uint8 act_chan = 0;
    sys_dma_thread_t* p_thread_info = (sys_dma_thread_t*)param;
    uint8 index = 0;
    uint8 lchip = p_thread_info->lchip;
    uint32 tbl_id = DRV_FROM_AT(lchip) ? DmaCtlIntrFunc0_t : DmaCtlIntrFunc_t;
    uint32 mask[SYS_DMA_INTR_VEC] = {0};

    while (1)
    {
        ret = sal_sem_take(p_thread_info->p_sync_sem, SAL_SEM_FOREVER);
        if (0 != ret)
        {
            continue;
        }
#ifndef CTC_HOT_PLUG_DIS
        SYS_LCHIP_CHECK_ACTIVE_IN_THREAD(lchip);
        ret = sys_usw_chip_get_reset_hw_en(lchip);
        if (ret > 0)
        {
            /*in reset hw recover*/
            return;
        }
#endif

        for(index = 0; index < p_thread_info->chan_num; index++)
        {
            act_chan = p_thread_info->chan_id[index];

            /* check channel is enable or not */
            if (p_usw_dma_master[lchip]->dma_chan_info[act_chan].chan_en == 0)
            {
                continue;
            }

            if (MCHIP_DMA(lchip)->dma_tcam_scan_func)
            {
                ret = MCHIP_DMA(lchip)->dma_tcam_scan_func(lchip, act_chan);
            }

            if (ret == CTC_E_NOT_INIT)
            {
                return;
            }

            /* release mask channel isr */
            SYS_USW_DMA_INTR_ENABLE(lchip, tbl_id, mask, act_chan);

        }

    }

    return;
}
/**
@brief DMA stats thread for stats channel
*/
STATIC void
_sys_usw_dma_stats_thread(void* param)
{
    int32 ret = 0;
    uint8 act_chan = 0;
    sys_dma_thread_t* p_thread_info = (sys_dma_thread_t*)param;
    uint8 index = 0;
    uint8 lchip = p_thread_info->lchip;
    uint32 tbl_id = DRV_FROM_AT(lchip) ? DmaCtlIntrFunc0_t : DmaCtlIntrFunc_t;
    uint32 mask[SYS_DMA_INTR_VEC] = {0};

    while (1)
    {
        ret = sal_sem_take(p_thread_info->p_sync_sem, SAL_SEM_FOREVER);
        if (0 != ret)
        {
            continue;
        }
#ifndef CTC_HOT_PLUG_DIS
        SYS_LCHIP_CHECK_ACTIVE_IN_THREAD(lchip);
        ret = sys_usw_chip_get_reset_hw_en(lchip);
        if (ret > 0)
        {
            /*in reset hw recover*/
            return;
        }
#endif

        /* scan all channel using same sync channel, process one by one */
        for(index = 0; index < p_thread_info->chan_num; index++)
        {
            act_chan = p_thread_info->chan_id[index];

            /* check channel is enable or not */
            if (p_usw_dma_master[lchip]->dma_chan_info[act_chan].chan_en == 0)
            {
                continue;
            }

            /* interrupt should be sync channel interrupt */

            ret = MCHIP_DMA(lchip)->dma_stats_func(lchip, act_chan);
            if (ret == CTC_E_NOT_INIT)
            {
                return;
            }

            /* release mask channel isr */
            SYS_USW_DMA_INTR_ENABLE(lchip, tbl_id, mask, act_chan);

        }

    }

    return;
}

void
_sys_usw_dma_wr_thread(void* param)
{
    int32 ret = 0;
    uint8 act_chan = 0;
    sys_dma_thread_t* p_thread_info = (sys_dma_thread_t*)param;
    uint8 index = 0;
    uint8 lchip = p_thread_info->lchip;
    uint32 tbl_id = DRV_FROM_AT(lchip) ? DmaCtlIntrFunc0_t : DmaCtlIntrFunc_t;
    uint32 mask[SYS_DMA_INTR_VEC] = {0};

    while (1)
    {
        ret = sal_sem_take(p_thread_info->p_sync_sem, SAL_SEM_FOREVER);
        if (0 != ret)
        {
            continue;
        }
#ifndef CTC_HOT_PLUG_DIS
        SYS_LCHIP_CHECK_ACTIVE_IN_THREAD(lchip);
        ret = sys_usw_chip_get_reset_hw_en(lchip);
        if (ret > 0)
        {
            /*in reset hw recover*/
            return;
        }

#endif

        /* scan all channel using same sync channel, process one by one */
        for(index = 0; index < p_thread_info->chan_num; index++)
        {
            act_chan = p_thread_info->chan_id[index];
            /* check channel is enable or not */
            if (p_usw_dma_master[lchip]->dma_chan_info[act_chan].chan_en == 0)
            {
                continue;
            }
            /* interrupt should be sync channel interrupt */
            ret = _sys_usw_dma_wr_func(lchip);
            if (ret == CTC_E_NOT_INIT)
            {
                return;
            }
            /* release mask channel isr */
            SYS_USW_DMA_INTR_ENABLE(lchip, tbl_id, mask, act_chan);
        }
    }

    return;
}

STATIC void
_sys_usw_dma_info_thread(void* param)
{
    int32 ret = 0;
    sys_dma_thread_t* p_thread_info = (sys_dma_thread_t*)param;
    uint8 index = 0;
    uint8 act_chan = 0;
    sys_dma_chan_t* p_dma_chan = NULL;
    /*DsDesc_m* p_desc = NULL;*/
    /*uint32 cur_index = 0;*/
    uint32 cmd = 0;
    DmaCtlIntrFunc_m intr_ctl;
    uint8 lchip = p_thread_info->lchip;
    uint32 tbl_id = DRV_FROM_AT(lchip) ? DmaCtlIntrFunc0_t : DmaCtlIntrFunc_t;
    uint32 mask[SYS_DMA_INTR_VEC] = {0};

    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    while (1)
    {
        ret = sal_sem_take(p_thread_info->p_sync_sem, SAL_SEM_FOREVER);
        if (0 != ret)
        {
            continue;
        }
#ifndef CTC_HOT_PLUG_DIS
        SYS_LCHIP_CHECK_ACTIVE_IN_THREAD(lchip);
        ret = sys_usw_chip_get_reset_hw_en(lchip);
        if (ret > 0)
        {
            /*in reset hw recover*/
            return;
        }
#endif

        /* scan all channel using same sync channel, process one by one */
        for(index = 0; index < p_thread_info->chan_num; index++)
        {
            act_chan = p_thread_info->chan_id[index];

            /*if (!CTC_IS_BIT_SET(dma_chan_isr_status[lchip], act_chan))
            {
               continue;
            }
            CTC_BIT_UNSET(dma_chan_isr_status[lchip], act_chan);*/
            /* check channel is enable or not */
            if (p_usw_dma_master[lchip]->dma_chan_info[act_chan].chan_en == 0)
            {
                continue;
            }

            p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[act_chan];
            /*cur_index = p_dma_chan->current_index;*/
            /*p_desc = &(p_dma_chan->p_desc[cur_index].desc_info);*/
            ret = MCHIP_DMA(lchip)->dma_info_func(lchip, act_chan);
            if (ret == CTC_E_NOT_INIT)
            {
                SYS_USW_DMA_INTR_ENABLE(lchip, tbl_id, mask, act_chan);
                return;
            }

            /* release mask channel isr */
            SYS_USW_DMA_INTR_ENABLE(lchip,  tbl_id, mask, act_chan);

            /*inval dma before read*/
            SYS_USW_DMA_CACHE_INVALID(lchip, &(p_dma_chan->p_desc[p_dma_chan->current_index].desc_info), sizeof(DsDesc_m));

            if (DRV_IS_DUET2(lchip))
            {
                if (GetDsDescEncap2(V, done_f, &(p_dma_chan->p_desc[p_dma_chan->current_index].desc_info)))
                {
                    sal_memset(&intr_ctl, 0, sizeof(DmaCtlIntrFunc_m));
                    SetDmaCtlIntrFunc(V, dmaIntrValidVec_f, &intr_ctl, (1<<act_chan));
                    cmd = DRV_IOW(DmaCtlIntrFunc_t, DRV_ENTRY_FLAG);
                    (DRV_IOCTL(lchip, 0, cmd, &intr_ctl));
                }
            }
            else
            {
                if (GetDsDescEncap(V, done_f, &(p_dma_chan->p_desc[p_dma_chan->current_index].desc_info)))
                {
                    sal_memset(&intr_ctl, 0, sizeof(DmaCtlIntrFunc_m));
                    SetDmaCtlIntrFunc(V, dmaSupIntrVec_f, &intr_ctl, (1<<act_chan));
                    cmd = DRV_IOW(DmaCtlIntrFunc_t, DRV_ENTRY_FLAG);
                    (DRV_IOCTL(lchip, 0, cmd, &intr_ctl));
                }
            }

        }
    }

    return;
}

/**
@brief DMA function interrupt serve routing
*/
int32
sys_usw_dma_isr_func(uint8 lchip, uint32 intr, void* p_data)
{
    uint8* chan_array = NULL;
    uint8 idx = 0;
    uint8 sync_chan = 0;
    uint32* p_dma_func = (uint32*)p_data;
    sys_dma_thread_t* p_thread_info = NULL;
    uint32 cmd = 0;
    uint32 tbl_id = DRV_FROM_AT(lchip) ? DmaCtlIntrFunc0_t : DmaCtlIntrFunc_t;
    uint32 mask[SYS_DMA_INTR_VEC] = {0};
    uint32 len = 0; 

    SYS_DMA_INIT_CHECK(lchip);

    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    chan_array =  p_usw_dma_master[lchip]->intr_chan_array;
    for (idx = 0; idx < p_usw_dma_master[lchip]->intr_chan_num; idx++)
    {
        if (!CTC_BMP_ISSET(p_dma_func, chan_array[idx]))
        {
            continue;
        }
        
        sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_array[idx]].sync_chan;
        p_thread_info = ctc_vector_get(p_usw_dma_master[lchip]->p_thread_vector, sync_chan);

        if (!p_thread_info)
        {
            /*means no need to create sync thread*/
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "Dma init wrong please check!!chan:%d \n", chan_array[idx]);
#endif
            continue;
        }

        /* disable interrupt of current chan */
        mask[0] = (1 << chan_array[idx]);
        DRV_IOCTL(lchip, INTR_INDEX_MASK_SET, DRV_CMD_PP_EN(cmd) ,mask);

        /* when using mutil rx channels, put chan_id into fifo */
        if (p_thread_info->chan_num > 1 && chan_array[idx] < SYS_DMA_PACKET_TX0_CHAN_ID)
        {
            len = sal_fifo_put(p_usw_dma_master[lchip]->pkt_thread_fifo, (unsigned char*)&chan_array[idx], sizeof(uint8));
            if (len != sizeof(uint8))
            {
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, " Fifo is full! \n");
            }
        }

        /* push sync sem */
        CTC_ERROR_RETURN(sal_sem_give(p_thread_info->p_sync_sem));
    }

    return CTC_E_NONE;
}


#define _DMA_INIT_BEGIN

/**
@brief DMA sync mechanism init for function interrupt
*/

STATIC int32
_sys_at_dma_get_table_id_by_ring(uint8 lchip, uint8 ring_id, uint32 * table_id)
{
    uint32 table_array[] = {DmaPktRx0Ctl_t, DmaPktRx1Ctl_t, DmaPktRx2Ctl_t, DmaPktRx3Ctl_t, DmaPktRx4Ctl_t, DmaPktRx5Ctl_t,
        DmaPktRx6Ctl_t, DmaPktRx7Ctl_t, DmaPktTx0Ctl_t, DmaPktTx1Ctl_t, DmaPktTx2Ctl_t, DmaPktTx3Ctl_t, DmaRegRd0Ctl_t, DmaRegRd1Ctl_t,
        DmaRegRd2Ctl_t, DmaRegRd3Ctl_t, DmaRegRd4Ctl_t, DmaRegRd5Ctl_t, DmaRegWr0Ctl_t, DmaRegWr1Ctl_t, DmaInfo0Ctl_t, DmaInfo1Ctl_t,
        DmaInfo2Ctl_t, DmaInfo3Ctl_t, DmaInfo4Ctl_t, DmaInfo5Ctl_t, DmaInfo6Ctl_t, DmaInfo7Ctl_t, DmaScanCtl_t, DmaBatchCtl_t};

    *table_id = table_array[ring_id];

    return CTC_E_NONE;
}

int32
_sys_usw_dma_sync_init(uint8 lchip, uintptr chan)
{
    int32 ret = 0;
    int32 prio = 0;
    sys_dma_thread_t* p_thread_info = NULL;
    char buffer[SAL_TASK_MAX_NAME_LEN];
    uint64 cpu_mask = 0;
    uint32 tcam_scan_chan = chan;

    switch (GET_CHAN_TYPE(chan))
    {
        case DRV_DMA_PACKET_RX0_CHAN_ID:
        case DRV_DMA_PACKET_RX1_CHAN_ID:
        case DRV_DMA_PACKET_RX2_CHAN_ID:
        case DRV_DMA_PACKET_RX3_CHAN_ID:
        case DRV_DMA_PACKET_RX7_CHAN_ID:

            prio = p_usw_dma_master[lchip]->dma_thread_pri[chan];

            p_thread_info = ctc_vector_get(p_usw_dma_master[lchip]->p_thread_vector, chan);
            if (!p_thread_info)
            {
                /*means no need to create sync thread*/
                return CTC_E_NONE;
            }
            else
            {
    	        sal_sprintf(buffer, "ctcPktRx%d", (uint8)chan);

                cpu_mask = sys_usw_chip_get_affinity(lchip, 0);
                ret = sys_usw_task_create(lchip, &p_thread_info->p_sync_task, buffer,
                                      SAL_DEF_TASK_STACK_SIZE, prio,SAL_TASK_TYPE_PACKET,cpu_mask, _sys_usw_dma_pkt_rx_thread, (void*)p_thread_info);
                if (ret < 0)
                {
                    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Feature not initialized \n");
                    return CTC_E_NOT_INIT;

                }

                sal_memcpy(p_thread_info->desc, buffer, sizeof(buffer));
            }
            break;

        case DRV_DMA_PACKET_TX0_CHAN_ID:
        case DRV_DMA_PACKET_TX1_CHAN_ID:
        case DRV_DMA_PACKET_TX2_CHAN_ID:
        case DRV_DMA_PACKET_TX3_CHAN_ID:
        case DRV_DMA_TBL_RD_CHAN_ID:
        case DRV_DMA_BATCH_CHAN_ID:
        case DRV_DMA_TBL_WR1_CHAN_ID:
            /*no need TODO */
            break;
        case DRV_DMA_TBL_WR_CHAN_ID:
            prio = p_usw_dma_master[lchip]->dma_thread_pri[chan];
            p_thread_info = ctc_vector_get(p_usw_dma_master[lchip]->p_thread_vector, chan);
            if (!p_thread_info)
            {
                /*means no need to create sync thread*/
                return CTC_E_NONE;
            }
            else
            {
                sal_sprintf(buffer, "Dmawr%d-%d", (uint8)(chan-SYS_DMA_TBL_WR_CHAN_ID), lchip);

                /* create dma learning thread */
                ret = sal_task_create(&p_thread_info->p_sync_task, buffer,
                                      SAL_DEF_TASK_STACK_SIZE, prio, _sys_usw_dma_wr_thread, (void*)p_thread_info);
                if (ret < 0)
                {
                    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Feature not initialized \n");
        			return CTC_E_NOT_INIT;
                }

                sal_memcpy(p_thread_info->desc, buffer, sizeof(buffer));
            }
            break;

        case DRV_DMA_PORT_STATS_CHAN_ID:
        case DRV_DMA_FLOW_STATS_CHAN_ID:
        case DRV_DMA_TBL_RD1_CHAN_ID:
        case DRV_DMA_TBL_RD2_CHAN_ID:
        case DRV_DMA_BUF_SCAN_CHAN_ID:
            prio = p_usw_dma_master[lchip]->dma_thread_pri[chan];
            p_thread_info = ctc_vector_get(p_usw_dma_master[lchip]->p_thread_vector, chan);
            if (!p_thread_info)
            {
                /*means no need to create sync thread*/
                return CTC_E_NONE;
            }
            else
            {
                sal_sprintf(buffer, "DmaStats%d", (uint8)(chan-SYS_DMA_PORT_STATS_CHAN_ID));

                cpu_mask = sys_usw_chip_get_affinity(lchip, 0);
                /* create dma stats thread */
                ret = sys_usw_task_create(lchip,&p_thread_info->p_sync_task, buffer,
                                      SAL_DEF_TASK_STACK_SIZE, prio,SAL_TASK_TYPE_STATS,cpu_mask,_sys_usw_dma_stats_thread, (void*)p_thread_info);
                if (ret < 0)
                {
                    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Feature not initialized \n");
                    return CTC_E_NOT_INIT;
                }

                sal_memcpy(p_thread_info->desc, buffer, sizeof(buffer));
            }
            break;

        case DRV_DMA_TCAM_SCAN_CHAN_ID: /* TMM */
        case DRV_DMA_REG_MAX_CHAN_ID: /* TM */
            if (DRV_FROM_TMM(lchip))
            {
                prio = p_usw_dma_master[lchip]->dma_thread_pri[chan];
                p_thread_info = ctc_vector_get(p_usw_dma_master[lchip]->p_thread_vector, chan);

                if (!p_thread_info)
                {
                    /*means no need to create sync thread*/
                    return CTC_E_NONE;
                }
                else
                {
                    sal_sprintf(buffer, "WrNum-%d", lchip);
                    /* create a thread to process tcam scan chan desciptors done, recycle desciptors */
                    ret = sal_task_create(&p_thread_info->p_sync_task, buffer,
                                          SAL_DEF_TASK_STACK_SIZE, prio, _sys_usw_dma_write_valid_num_thread, (void*)p_thread_info);
                    if (ret < 0)
                    {
                        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Feature not initialized \n");
                        return CTC_E_NOT_INIT;
                    }
                    sal_memcpy(p_thread_info->desc, buffer, sizeof(buffer));
                }
                /* create a thread to process tcam scan error */
                tcam_scan_chan =  MCHIP_CAP(SYS_CAP_DMA_TCAM_SCAN_ERROR_INTR);
            }

            prio = p_usw_dma_master[lchip]->dma_thread_pri[tcam_scan_chan];
            p_thread_info = ctc_vector_get(p_usw_dma_master[lchip]->p_thread_vector, tcam_scan_chan);
            if (!p_thread_info)
            {
                /*means no need to create sync thread*/
                return CTC_E_NONE;
            }
            else
            {

                sal_sprintf(buffer, "TcamScan-%d", lchip);

                ret = sal_task_create(&p_thread_info->p_sync_task, buffer,
                                      SAL_DEF_TASK_STACK_SIZE, prio, _sys_usw_dma_tcam_scan_thread, (void*)p_thread_info);
                if (ret < 0)
                {
                    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Feature not initialized \n");
                    return CTC_E_NOT_INIT;
                }

                sal_memcpy(p_thread_info->desc, buffer, sizeof(buffer));
            }

            break;

    case DRV_DMA_HASHKEY_CHAN_ID:
            break;

    case DRV_DMA_LEARNING_CHAN_ID:
    case DRV_DMA_IPFIX_CHAN_ID:
    case DRV_DMA_SDC_CHAN_ID:
    case DRV_DMA_MONITOR_CHAN_ID:
    case DRV_DMA_BUFFER_CHAN_ID:
    case DRV_DMA_LATENCY_CHAN_ID:
    case DRV_DMA_EFD_CHAN_ID:
    case DRV_DMA_OAM_CHAN_ID:
    case DRV_DMA_SC_OAM_CHAN_ID:
        prio = p_usw_dma_master[lchip]->dma_thread_pri[chan];
        p_thread_info = ctc_vector_get(p_usw_dma_master[lchip]->p_thread_vector, chan);
        if (!p_thread_info)
        {
            /*means no need to create sync thread*/
            return CTC_E_NONE;
        }
        else
        {
            sal_sprintf(buffer, "DmaInfo%d", (uint8)(chan-SYS_DMA_LEARNING_CHAN_ID));

            /* create dma learning thread */
            cpu_mask = sys_usw_chip_get_affinity(lchip, 0);
            ret = sys_usw_task_create(lchip,&p_thread_info->p_sync_task, buffer,
                                  SAL_DEF_TASK_STACK_SIZE, prio,SAL_TASK_TYPE_FDB,cpu_mask,_sys_usw_dma_info_thread, (void*)p_thread_info);
            if (ret < 0)
            {
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Feature not initialized \n");
			return CTC_E_NOT_INIT;

            }

            sal_memcpy(p_thread_info->desc, buffer, sizeof(buffer));
        }
        break;

    default:
        return CTC_E_INVALID_PARAM;
    }

    return CTC_E_NONE;
}

/**
@brief DMA init for pkt dma
*/
STATIC int32
_sys_usw_dma_pkt_init(uint8 lchip, sys_dma_chan_t* p_chan_info)
{
    uint32 desc_num = 0;
    sys_dma_desc_t* p_sys_desc_pad = NULL;
    DsDesc_m* p_desc = NULL;
    uint32 phy_addr = 0;
    DmaStaticInfo_m static_info;
    DmaCtlTab_m  tab_ctl;
    uint32 cmd = 0;
    uint32 tbl_id = 0;
    uint32 field_id = 0;
    void*  p_base_mem_addr = NULL;
    void*  p_mem_addr = NULL;
    DmaWeightCfg_m dma_weight;
    uint32 valid_cnt = 0;
    uint32 real_data_size = 0;
    int32  ret = CTC_E_NONE;
    dal_dma_chan_t knet_dma_chan_info;
    ds_t ds;
    sys_dma_desc_info_t* p_sys_desc_info = NULL;
    sal_fifo_t* p_data_fifo;
    uint32* data_idx_array = NULL;
    uint32 index = 0;
    uint32 len = 0;
    uint32 data_addr = 0;
    uint32 step = 0;

    CTC_PTR_VALID_CHECK(p_chan_info);
    sal_memset(&ds, 0, sizeof(ds_t));

    if (p_chan_info->pkt_knet_en )
    {
        if (p_usw_dma_master[lchip]->wb_keep_knet && p_usw_dma_master[lchip]->wb_reloading)
        {
            return CTC_E_NONE;
        }

#ifdef _SAL_LINUX_UM
        sal_memset(&knet_dma_chan_info, 0, sizeof(dal_dma_chan_t));
        knet_dma_chan_info.lchip = lchip;
        knet_dma_chan_info.channel_id = p_chan_info->channel_id;
        knet_dma_chan_info.wb_keep = p_usw_dma_master[lchip]->wb_keep_knet;
        knet_dma_chan_info.active = 1;
        knet_dma_chan_info.desc_num = p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].desc_num;
        knet_dma_chan_info.desc_depth = p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].desc_depth;
        knet_dma_chan_info.data_size = p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].data_size;
        ret = dal_knet_chan_register(lchip, &knet_dma_chan_info);
        if (ret < 0)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Knet channel register fail\n");
            return CTC_E_NO_MEMORY;
        }
        phy_addr = knet_dma_chan_info.mem_base;
        p_sys_desc_pad = (sys_dma_desc_t*)SYS_DMA_PHY_TO_LOGIC(lchip, knet_dma_chan_info.mem_base);
#endif
        goto cfg_hw;
    }

    desc_num  = p_chan_info->desc_depth;
    /* cfg desc num */
    p_sys_desc_pad = (sys_dma_desc_t*)SYS_DMA_ALLOC(lchip, (desc_num) * sizeof(sys_dma_desc_t), 0);
    if (NULL == p_sys_desc_pad)
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
		return CTC_E_NO_MEMORY;
    }
    sal_memset(p_sys_desc_pad, 0, sizeof(sys_dma_desc_t)*desc_num);

    /* cfg per desc data */
    if (p_chan_info->data_size)
    {
        real_data_size = DATA_SIZE_ALIGN(p_chan_info->data_size);
        p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].data_size = real_data_size;
        p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].cfg_size = real_data_size;
        p_chan_info->data_size = real_data_size;
        p_chan_info->cfg_size = real_data_size;
        p_base_mem_addr = SYS_DMA_ALLOC(lchip, real_data_size*p_chan_info->data_num, 0);
        if (NULL == p_base_mem_addr)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            return CTC_E_NO_MEMORY;
        }
        sal_memset(p_base_mem_addr, 0, real_data_size*p_chan_info->data_num);

        p_sys_desc_info = (sys_dma_desc_info_t*)mem_malloc(MEM_DMA_MODULE, (desc_num)*sizeof(sys_dma_desc_info_t));
        if (!p_sys_desc_info)
        {
            return CTC_E_NO_MEMORY;
        }

        /* desc_num : data_num = 1:n, use fifo for packet rx */
        if (p_chan_info->data_num > p_chan_info->desc_num && p_chan_info->channel_id < SYS_DMA_PACKET_TX0_CHAN_ID)
        {
            p_data_fifo = sal_fifo_create(p_chan_info->data_num*sizeof(uint32));
            if (NULL == p_data_fifo)
            {
                /*1:N mapping fail*/
                p_chan_info->data_num = p_chan_info->desc_num;
            }
            else
            {
                p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_data_fifo = p_data_fifo;
                /*all data is available, put data into fifo*/
                data_idx_array = (uint32*)mem_malloc(MEM_DMA_MODULE, p_chan_info->data_num*sizeof(uint32));
                if (NULL == data_idx_array)
                {                
                    return CTC_E_NO_MEMORY;
                }
                for (index = 0; index < p_chan_info->data_num; index++)
                {
                    p_mem_addr = p_base_mem_addr+real_data_size*index;            
                    phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_mem_addr);                
                    data_idx_array[index] = phy_addr;               
                }
                /*store phy address in fifo*/
                len = sal_fifo_put(p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_data_fifo, 
                        (unsigned char*)data_idx_array, p_chan_info->data_num*sizeof(uint32));
                if (len != p_chan_info->data_num*sizeof(uint32))
                {
                    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Fifo is not full! \n");
                }
            }
        }

        for (desc_num = 0; desc_num < p_chan_info->desc_num; desc_num++)
        {
            p_desc = (DsDesc_m*)&(p_sys_desc_pad[desc_num].desc_info);

            if (p_chan_info->func_type == SYS_DMA_FUNC_PACKET_TX)
            {/* set desc done, optimize dma tx logic */
                SetDsDescEncap2(V, done_f, p_desc, 1);
            }

            if (p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_data_fifo)
            {
                len = sal_fifo_get(p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_data_fifo, 
                    (unsigned char*)&data_addr, sizeof(uint32));
                if (len < sizeof(uint32))
                {
                    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Fifo is Empty! \n");
                }
                phy_addr = data_addr;
                p_sys_desc_info[desc_num].data_addr = SYS_DMA_PHY_TO_LOGIC(lchip, phy_addr);
            }
            else
            {
                p_mem_addr = p_base_mem_addr + p_chan_info->data_size*desc_num;    
                p_sys_desc_info[desc_num].data_addr = p_mem_addr;
                phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_mem_addr);
            }

            SetDsDescEncap2(V, memAddr_f, p_desc, (phy_addr >> 4));
            SetDsDescEncap(V, highAddr_f, p_desc, p_usw_dma_master[lchip]->dma_high_addr);
            SetDsDescEncap2(V, cfgSize_f, p_desc, p_chan_info->cfg_size);
            if (p_chan_info->auto_fetch_en)
            {
                if (DRV_FROM_TMM(lchip))
                {
                    SetDsDescEncap(V, valid_f, p_desc, 1);
                }
                else
                {
                    SetDsDescEncap2(V, error_f, p_desc, 1);
                    SetDsDescEncap2(V, chipAddr_f, p_desc, SYS_USW_DMA_DIRECT_ADDR);
                }
                if DRV_FROM_AT(lchip)
                {
                    SetDsDescEncap(V, descValid0_f, p_desc, 1); /*from arctic*/
                }
            }
        }
    }

    /* create chan pool for DMA pkt rx process */
    p_usw_dma_master[lchip]->pkt_thread_fifo = sal_fifo_create(SYS_USW_DMA_CHAN_POOL_SIZE*sizeof(uint8));
    if (NULL == p_usw_dma_master[lchip]->pkt_thread_fifo)
    {
        return CTC_E_NO_RESOURCE;
    }

    /* cfg static infor for dmc channel:MemBase, ring depth */
    sal_memset(&static_info, 0, sizeof(DmaStaticInfo_m));
    phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_sys_desc_pad);

cfg_hw:
    cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &static_info));
    SetDmaStaticInfo(V, highBase_f, &static_info, p_usw_dma_master[lchip]->dma_high_addr);
    SetDmaStaticInfo(V, ringBase_f, &static_info, (phy_addr >> 4));
    SetDmaStaticInfo(V, ringDepth_f, &static_info, p_chan_info->desc_depth);
    cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &static_info));

    /* cfg DmaCtlTab for VldNum */
    if ((p_chan_info->channel_id == SYS_DMA_PACKET_TX0_CHAN_ID)
        || (p_chan_info->channel_id == SYS_DMA_PACKET_TX1_CHAN_ID)
        || (p_chan_info->channel_id == SYS_DMA_PACKET_TX2_CHAN_ID)
        || (p_chan_info->channel_id == SYS_DMA_PACKET_TX3_CHAN_ID))
    {
        valid_cnt = p_chan_info->auto_fetch_en?p_chan_info->desc_num:0;
        /* channel mutex, only use fot DMA Tx */
#ifndef PACKET_TX_USE_SPINLOCK
        ret = sal_mutex_create(&(p_chan_info->p_mutex));
#else
        ret = sal_spinlock_create((sal_spinlock_t**)&(p_chan_info->p_mutex));
#endif
        if (ret || !(p_chan_info->p_mutex))
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No resource in ASIC \n");
            return CTC_E_NO_RESOURCE;
        }
    }
    else
    {
        valid_cnt = p_chan_info->desc_num;
    }

    sal_memset(&tab_ctl, 0, sizeof(DmaCtlTab_m));
    cmd = DRV_IOR(DmaCtlTab_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &tab_ctl));
    SetDmaCtlTab(V, vldNum_f, &tab_ctl, valid_cnt);
    cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &tab_ctl));

    /* cfg weight and packet rx group stall threshold */
    sal_memset(&dma_weight, 0, sizeof(DmaWeightCfg_m));
    field_id = DmaWeightCfg_cfgChan0Weight_f + p_chan_info->channel_id;
    cmd = DRV_IOR(DmaWeightCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &dma_weight));
    DRV_SET_FIELD_V(lchip, DmaWeightCfg_t, field_id, &dma_weight, (p_chan_info->weight));
    cmd = DRV_IOW(DmaWeightCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &dma_weight));

    if (p_chan_info->channel_id < SYS_DMA_PACKET_TX0_CHAN_ID)
    {
        cmd = DRV_IOR(DmaPktRxGroupMode_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &ds));
        SetDmaPktRxGroupMode(V, cfgPktRxGroupMode_f, &ds, 1);
        cmd = DRV_IOW(DmaPktRxGroupMode_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &ds));

        cmd = DRV_IOR(DmaPktRxGroupThrd_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &ds));
        step = DmaPktRxGroupThrd_cfgPktRx1GroupThrdHi_f - DmaPktRxGroupThrd_cfgPktRx0GroupThrdHi_f;
        SetDmaPktRxGroupThrd(V, cfgPktRx0GroupThrdHi_f + step * p_chan_info->channel_id, &ds, 50);
        SetDmaPktRxGroupThrd(V, cfgPktRx0GroupThrdLow_f + step * p_chan_info->channel_id, &ds, 20);
        cmd = DRV_IOW(DmaPktRxGroupThrd_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &ds));
    }

    if (DRV_FROM_AT(lchip))
    {
        host_type_t byte_order = drv_get_host_type(lchip);
        _sys_at_dma_get_table_id_by_ring(lchip, p_chan_info->channel_id, &tbl_id);
        field_id = (p_chan_info->channel_id < SYS_DMA_PACKET_TX0_CHAN_ID) ? DmaPktRx0Ctl_cfgChanWeight_f : DmaPktTx0Ctl_cfgChanWeight_f;
        cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &ds));
        DRV_SET_FIELD_V(lchip, tbl_id, field_id, &ds, (p_chan_info->weight));
        if (p_chan_info->channel_id < SYS_DMA_PACKET_TX0_CHAN_ID)
        {
            DRV_SET_FIELD_V(lchip, tbl_id, DmaPktRx0Ctl_cfgPktRxByteOrderEn_f, &ds, (byte_order == HOST_LE) ? 1 : 0);
            DRV_SET_FIELD_V(lchip, tbl_id, DmaPktRx0Ctl_cfgPktRxByteOrderNum_f, &ds, SYS_USW_PKT_HEADER_LEN / sizeof(uint32) - 1);
            DRV_SET_FIELD_V(lchip, tbl_id, DmaPktRx0Ctl_cfgPktRxGroupMode_f, &ds, 1);
            DRV_SET_FIELD_V(lchip, tbl_id, DmaPktRx0Ctl_cfgPktRxGroupThrdHi_f, &ds, 50);
            DRV_SET_FIELD_V(lchip, tbl_id, DmaPktRx0Ctl_cfgPktRxGroupThrdLow_f, &ds, 20);
        }
        else if (p_chan_info->channel_id >= SYS_DMA_PACKET_TX0_CHAN_ID && p_chan_info->channel_id <= SYS_DMA_PACKET_TX3_CHAN_ID)
        {
            DRV_SET_FIELD_V(lchip, tbl_id, DmaPktTx0Ctl_cfgPktTxByteOrderEn_f, &ds, (byte_order == HOST_LE) ? 1 : 0);
            DRV_SET_FIELD_V(lchip, tbl_id, DmaPktTx0Ctl_cfgPktTxByteOrderNum_f, &ds, SYS_USW_PKT_HEADER_LEN / sizeof(uint32) - 1);
            DRV_SET_FIELD_V(lchip, tbl_id, DmaPktTx0Ctl_cfgDescValidChkEn_f, &ds, (p_chan_info->channel_id != SYS_DMA_PKT_TX_TIMER_CHAN_ID)?1:0);
        }
        cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &ds));
    }

    if ((p_chan_info->channel_id == SYS_DMA_PACKET_TX0_CHAN_ID)
        || (p_chan_info->channel_id == SYS_DMA_PACKET_TX1_CHAN_ID)
        || (p_chan_info->channel_id == SYS_DMA_PACKET_TX2_CHAN_ID)
        || (p_chan_info->channel_id == SYS_DMA_PACKET_TX3_CHAN_ID))
    {
        sys_dma_tx_mem_t* p_tx_mem_info_tmp = NULL;
        /*packet tx need allocate memory for record desc used state */
        p_chan_info->p_desc_check = (sys_dma_tx_check_t*)mem_malloc(MEM_DMA_MODULE, sizeof(sys_dma_tx_check_t)*p_chan_info->desc_num);
        if (NULL == p_chan_info->p_desc_check)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
			return CTC_E_NO_MEMORY;
        }
        sal_memset(p_chan_info->p_desc_check, 0, sizeof(sys_dma_tx_check_t)*p_chan_info->desc_num);
        p_chan_info->p_tx_mem_info = (sys_dma_tx_mem_t*)mem_malloc(MEM_DMA_MODULE, sizeof(sys_dma_tx_mem_t)*p_chan_info->desc_num);
        if (NULL == p_chan_info->p_tx_mem_info)
        {
            mem_free(p_chan_info->p_desc_check);
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
			return CTC_E_NO_MEMORY;
        }
        sal_memset(p_chan_info->p_tx_mem_info, 0, sizeof(sys_dma_tx_mem_t)*p_chan_info->desc_num);

        for (desc_num=0;desc_num<p_chan_info->desc_num;desc_num++)
        {
            p_tx_mem_info_tmp = p_chan_info->p_tx_mem_info+desc_num;
            p_tx_mem_info_tmp->p_mem_addr = p_base_mem_addr+real_data_size*desc_num;
        }
    }

    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_desc = p_sys_desc_pad;
    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].mem_base = (uintptr)p_base_mem_addr;
    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_desc_check = p_chan_info->p_desc_check;
    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_mutex = p_chan_info->p_mutex;
    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].threshold =  p_chan_info->desc_depth/4;
    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_tx_mem_info = p_chan_info->p_tx_mem_info;
    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_desc_info  = p_sys_desc_info;
    return CTC_E_NONE;
}
STATIC int32
_sys_usw_dma_tcam_scan_init(uint8 lchip, uint32 mem_id, uint16* p_desc_index, sys_dma_desc_t* p_sys_desc_pad,
                                                                            sys_dma_desc_info_t* p_sys_desc_info)
{
    uint32 cfg_addr = 0;
    uint32 cfg_size = 0;
    uint32 entry_num = 0;
    uint32 per_entry_size = 0;
    uint32 phy_addr = 0;
    void*  p_mem_addr = NULL;
    DsDesc_m* p_desc = NULL;
    drv_ftm_tcam_info_t tcam_info;
    uint8  max_sub_id = 0;
    uint8  sub_id = 0;
    CTC_ERROR_RETURN(drv_usw_ftm_get_tcam_memory_info(lchip, mem_id, &cfg_addr, &entry_num,  &per_entry_size, &tcam_info));
    if(entry_num == 0)
    {
        return CTC_E_NONE;
    }

    cfg_size = entry_num*tcam_info.entry_offset;
    if(cfg_size > 0xFFFF)
    {
        max_sub_id = 2;
        cfg_size = cfg_size/2;
    }
    else
    {
        max_sub_id = 1;
    }

    for(sub_id = 0; sub_id < max_sub_id; sub_id++)
    {
        cfg_addr += (cfg_size*sub_id);
        p_sys_desc_info[(*p_desc_index)].value0 = mem_id << 8 | sub_id;
        p_sys_desc_info[(*p_desc_index)].value1 = tcam_info.entry_offset;
        p_desc = (DsDesc_m*)&(p_sys_desc_pad[(*p_desc_index)].desc_info);

        p_mem_addr = SYS_DMA_ALLOC(lchip, cfg_size, 0);
        p_sys_desc_info[(*p_desc_index)].data_addr = p_mem_addr;
        if (NULL == p_mem_addr)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            return CTC_E_NO_MEMORY;
        }
        sal_memset(p_mem_addr, 0, cfg_size);
        phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_mem_addr);

        SetDsDescEncap2(V, memAddr_f, p_desc, (phy_addr >> 4));
        SetDsDescEncap2(V, cfgSize_f, p_desc, cfg_size);
        SetDsDescEncap2(V, chipAddr_f, p_desc, cfg_addr);
        SetDsDescEncap(V, tsAddr_f, p_desc, cfg_addr);/* TMM */
        SetDsDescEncap2(V, dataStruct_f, p_desc, tcam_info.entry_offset>>2);

        if ((*p_desc_index) == 0)
        {
            /*first desc should cfg pause*/
            SetDsDescEncap2(V, pause_f, p_desc, 1);
        }
        (*p_desc_index)++;
    }

    return CTC_E_NONE;
}

uint32
_sys_at_dma_flow_stats_desc_init(uint8 lchip, sys_dma_desc_t* p_sys_desc_pad, sys_dma_desc_info_t* p_sys_desc_info)
{
    uint16 ram = 0;
    uint16 ram_base = 0;
    uint32 tbl_id = 0;
#ifdef ARCTIC    
    uint8 core_id = 0;
    uint8 dp_base = 0;
#endif
    uint8 pp_id = 0;
    uint8 dp_id = 0;
    uint8 loop_num = 0;
    uint32 entry_num = 0;
    uint8 loop = 0;
    uint8 loop_pp = 0;
    uint16 dsc_id = 0;
    uint32 data_size = 0;
    uint32 cfg_addr = 0;
    uint32 phy_addr = 0;
    uint32 entry_idx = 0;
    DsDesc_m* p_desc = NULL;
    void*  p_mem_addr = NULL;
    uint8 pp_bmp = SYS_PP_BMP(lchip);
    uint8 pp_num = SYS_PP_NUM(lchip);

    ram_base = !p_usw_dma_master[lchip]->flow_stats_sync_mode ? SYS_USW_DMA_STATS_FIFO_SYNC_NUM : 0;
    ram = ram_base;
    do
    {
        drv_usw_ftm_get_flow_stats_table_id(lchip, ram, &tbl_id);
        loop_num = 1;
        entry_idx = 0;
        entry_num = DRV_TABLE_MAX_INDEX(lchip, tbl_id);
#ifdef ARCTIC 
        dp_base = 1;
        if (MEM_TYPE_PER_DP == TABLE_ENTRY_TYPE(lchip, tbl_id))
        {
            loop_num = 2;
        }
        else if (MEM_TYPE_PEER_DP == TABLE_ENTRY_TYPE(lchip, tbl_id))
        {
            loop_num = SYS_VCHIP_DUAL_CORE_MODE(lchip) ? 4 :2;
            dp_base = SYS_VCHIP_DUAL_CORE_MODE(lchip) ? 2 :1;
            entry_idx = DRV_TABLE_MAX_INDEX(lchip, tbl_id)/2;
            entry_num = entry_idx;
        }
#endif
        if (!CTC_IS_BIT_SET(pp_bmp, pp_id))
        {
            loop_num = 0;
        }
        for(loop = 0;loop<loop_num;loop++)
        {
            dp_id = loop;
            p_desc = (DsDesc_m*)&(p_sys_desc_pad[dsc_id].desc_info);
            data_size = (8 == TABLE_ENTRY_OFFSET(lchip, tbl_id))? (TABLE_ENTRY_OFFSET(lchip, tbl_id)*entry_num):SYS_DMA_DESC_NOT_PROC_SIZE;
            p_mem_addr = SYS_DMA_ALLOC(lchip, data_size, 0);
            if (NULL == p_mem_addr)
            {
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
                return CTC_E_NO_MEMORY;
            }
            sal_memset(p_mem_addr, 0, data_size);
            p_sys_desc_info[dsc_id].data_addr = p_mem_addr;
            drv_get_table_property(lchip, DRV_TABLE_PROP_HW_ADDR, tbl_id, ((loop & 0x1) && SYS_VCHIP_DUAL_CORE_MODE(lchip))? entry_idx : 0, &cfg_addr);
            p_sys_desc_info[dsc_id].value0 = ((((pp_id << 10) | (dp_id << 8) | ram))& 0xffff);
            /*use burst to read*/
            cfg_addr |= 0x1;

            phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_mem_addr);
            SetDsDescEncap2(V, memAddr_f, p_desc, (phy_addr >> 4));
            SetDsDescEncap2(V, cfgSize_f, p_desc, data_size);
            SetDsDescEncap2(V, chipAddr_f, p_desc, cfg_addr);
            SetDsDescEncap(V, tsAddr_f, p_desc, cfg_addr);
            SetDsDescEncap2(V, dataStruct_f, p_desc, TABLE_ENTRY_SIZE(lchip, tbl_id)>>2);
            SetDsDescEncap2(V, pause_f, p_desc, (0 == dsc_id%MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM))? 1: 0);

#ifdef ARCTIC
{
            SetDsDescEncap(V, highAddr_f, p_desc, p_usw_dma_master[lchip]->dma_high_addr);
            SetDsDescEncap(V, u0_reg_sliceBmp_f, p_desc, 1 << ((pp_id) % PP_NUM_PER_CORE));
            SetDsDescEncap(V, u1_reg_dpBmp_f, p_desc, 1 << (dp_id/dp_base));
            SetDsDescEncap(V, u1_reg_coreBmp_f, p_desc, 1 << (core_id));
}
#endif
            dsc_id++;
        }
        ram++;
        if (ram >= MCHIP_CAP(SYS_CAP_STATS_RAM_NUM))
        {
            ram = ram_base;
            pp_id++;
            pp_id = pp_id % pp_num;
            
#ifdef ARCTIC
            core_id = pp_id / PP_NUM_PER_CORE;
#endif
            loop_pp++;
        }
    }
    while(loop_pp < (pp_num*SYS_DMA_FLOW_STATS_DESC_DEPTH) && dsc_id < (MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM)*SYS_DMA_FLOW_STATS_DESC_DEPTH));

    return CTC_E_NONE;
}

/**
@brief DMA init for reg dma
*/
STATIC int32
_sys_usw_dma_reg_init(uint8 lchip, sys_dma_chan_t* p_chan_info)
{
    uint32 desc_num = 0;
    sys_dma_desc_t* p_sys_desc_pad = NULL;
    DsDesc_m* p_desc = NULL;
    uint32 phy_addr = 0;
    DmaStaticInfo_m static_info;
    DmaCtlTab_m  tab_ctl;
    uint32 cmd = 0;
    uint32 tbl_id = 0;
    uint32 field_id = 0;
    void*  p_mem_addr = NULL;
    DmaWeightCfg_m dma_weight;
    uint32 valid_cnt = 0;
    uint32 cfg_addr = 0;
    DmaPktStatsCfg_m stats_cfg;
    uint16 index = 0;
    uint8 mac_type = 0;
    uint32 tbl_idx = 0;
    int32 ret = 0;
    sys_dma_desc_info_t* p_sys_desc_info = NULL;
    uint32 data_size = 0;
    uint32 ram_id  =0;
    uint8 is_tmm = (DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip));
    ds_t ds;

    CTC_PTR_VALID_CHECK(p_chan_info);
    sal_memset(&ds, 0, sizeof(ds_t));

    desc_num  = p_chan_info->desc_depth;
    if (!DRV_FROM_AT(lchip) || (p_chan_info->channel_id != SYS_DMA_REG_BUF_SCAN_CHAN_ID))
    {
        /*AT: BufferScan not go here*/
        /* cfg desc num */
        p_sys_desc_pad = (sys_dma_desc_t*)SYS_DMA_ALLOC(lchip, (desc_num) * sizeof(sys_dma_desc_t), 0);
        if (NULL == p_sys_desc_pad)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            return CTC_E_NO_MEMORY;
        }
        sal_memset(p_sys_desc_pad, 0, sizeof(sys_dma_desc_t)*desc_num);

        /* alloc desc info, to store desc data memory in db */
        p_sys_desc_info = (sys_dma_desc_info_t*)mem_malloc(MEM_DMA_MODULE, (desc_num)*sizeof(sys_dma_desc_info_t));
        if (!p_sys_desc_info)
        {
            SYS_DMA_FREE(lchip, p_sys_desc_pad);
            return CTC_E_NO_MEMORY;
        }
        sal_memset(p_sys_desc_info, 0, sizeof(sys_dma_desc_info_t)*desc_num);
    }
    if (p_chan_info->channel_id == SYS_DMA_PORT_STATS_CHAN_ID)
    {
        ret = sal_mutex_create(&(p_chan_info->p_mutex));
        if (ret || !(p_chan_info->p_mutex))
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No resource in ASIC \n");
            return CTC_E_NO_RESOURCE;
        }
        for (index = 0; index < p_chan_info->desc_num; index++)
        {
            /* TODO all mac valid */
            p_sys_desc_info[index].value0 = index;
            p_desc = (DsDesc_m*)&(p_sys_desc_pad[index].desc_info);

            p_mem_addr = SYS_DMA_ALLOC(lchip, p_chan_info->data_size, 0);
            p_sys_desc_info[index].data_addr = p_mem_addr;
            if (NULL == p_mem_addr)
            {
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
    			return CTC_E_NO_MEMORY;
            }

            sal_memset(p_mem_addr, 0, p_chan_info->data_size);
            _sys_usw_dma_get_mac_address(lchip, mac_type, index,  &cfg_addr);
            /*use burst to read*/
            cfg_addr |= 0x1;
            phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_mem_addr);
            SetDsDescEncap2(V, memAddr_f, p_desc, (phy_addr >> 4));
            SetDsDescEncap2(V, cfgSize_f, p_desc, p_chan_info->cfg_size);
            SetDsDescEncap2(V, chipAddr_f, p_desc, cfg_addr);
            SetDsDescEncap(V, tsAddr_f, p_desc, cfg_addr);/* TMM */
            SetDsDescEncap2(V, dataStruct_f, p_desc, SYS_USW_DMA_STATS_WORD);
            if (index == 0)
            {
                /*first desc should cfg pause*/
                SetDsDescEncap2(V, pause_f, p_desc, 1);

            }
            if (DRV_FROM_AT(lchip))
            {
                SetDsDescEncap(V, highAddr_f, p_desc, p_usw_dma_master[lchip]->dma_high_addr);
                SetDsDescEncap(V, u0_reg_sliceBmp_f, p_desc, 0);
                SetDsDescEncap(V, u1_reg_dpBmp_f, p_desc, 0);
                SetDsDescEncap(V, u1_reg_coreBmp_f, p_desc, ((index < 160) || (index >= 320 && index <= 321)) ? 0x1 : 0x2);
            }
        }
    }
    else if (p_chan_info->channel_id == SYS_DMA_FLOW_STATS_CHAN_ID)
    {
        uint32  dword = 0;
        uint32  offset = 0;
        ret = sal_mutex_create(&(p_chan_info->p_mutex));
        if (ret || !(p_chan_info->p_mutex))
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No resource in ASIC \n");
            return CTC_E_NO_RESOURCE;
        }

        for (index = 0; index < p_chan_info->desc_num; index++)
        {
            if(DRV_FROM_AT(lchip))
            {
                CTC_ERROR_RETURN(_sys_at_dma_flow_stats_desc_init(lchip, p_sys_desc_pad, p_sys_desc_info));
                break;
            }
            else if(is_tmm)
            {
                if(drv_usw_ftm_get_flow_stats_table_id(lchip, index%MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM), &tbl_id))
                {
                    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " drv_usw_ftm_get_flow_stats_table_id error index=[%u] \n",index);
                    return CTC_E_NOT_EXIST;
                }
                dword = TABLE_ENTRY_OFFSET(lchip, tbl_id)>>2;
                data_size = ((DRV_TABLE_MAX_INDEX(lchip, tbl_id)) * dword*4);
            }
            else
            {
                sys_usw_flow_stats_get_ram_info(lchip, index%MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM), &tbl_idx, &offset);
                data_size = offset*DRV_TABLE_ENTRY_SIZE(lchip, DsStats_t);
                dword = 2;
            }
            p_mem_addr = SYS_DMA_ALLOC(lchip, data_size, 0);
            p_sys_desc_info[index].data_addr = p_mem_addr;
            if (NULL == p_mem_addr)
            {
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
                return CTC_E_NO_MEMORY;
            }
            sal_memset(p_mem_addr, 0, data_size);

            if(DRV_FROM_TMM(lchip))
            {
                drv_get_table_property(lchip, DRV_TABLE_PROP_HW_ADDR, tbl_id, 0, &cfg_addr);
            }
            else
            {
                drv_get_table_property(lchip, DRV_TABLE_PROP_HW_ADDR, DsStats_t, tbl_idx, &cfg_addr);
            }
            p_sys_desc_info[index].value0 = index%MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM);
            /*use burst to read*/
            cfg_addr |= 0x1;
            p_desc = (DsDesc_m*)&(p_sys_desc_pad[index].desc_info);
            ram_id = index%MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM);
            if ((DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip)))
            {
                if ((p_usw_dma_master[lchip]->dma_cb[SYS_DMA_CB_TYPE_FLOW_STATS_RAM_STATUS](lchip, &ram_id)) ||
                    MCHIP_CAP(SYS_CAP_STATS_DMA_QUEUE_BLOCK_ID) == ram_id)
                {
                    data_size = data_size;
                }
                else
                {
                    data_size = SYS_DMA_DESC_NOT_PROC_SIZE;
                }
            }
            phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_mem_addr);
            SetDsDescEncap2(V, memAddr_f, p_desc, (phy_addr >> 4));
            SetDsDescEncap2(V, cfgSize_f, p_desc, data_size);
            SetDsDescEncap2(V, chipAddr_f, p_desc, cfg_addr);
            SetDsDescEncap(V, tsAddr_f, p_desc, cfg_addr);/* TMM */
            SetDsDescEncap2(V, dataStruct_f, p_desc, DRV_FROM_TMM(lchip)?TABLE_ENTRY_SIZE(lchip, tbl_id)>>2:2);
            SetDsDescEncap2(V, pause_f, p_desc, ((index%MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM) == 0)?1:0));
        }
    }
    else if (p_chan_info->channel_id == SYS_DMA_REG_MAX_CHAN_ID)
    {
        uint16 mem_id = 0;
        /*flow tcam*/
        for(mem_id = DRV_FTM_TCAM_KEY0; mem_id < (DRV_FTM_TCAM_KEY0 + DRV_CONST(DRV_MAX_NOR_TCAM_NUM)); mem_id++)
        {
            CTC_ERROR_RETURN(_sys_usw_dma_tcam_scan_init(lchip, mem_id, &index, p_sys_desc_pad, p_sys_desc_info));
        }

        if(DRV_IS_TSINGMA(lchip))
        {
            for(mem_id = DRV_FTM_LPM_TCAM_KEY0; mem_id < (DRV_FTM_LPM_TCAM_KEY0 + DRV_CONST(DRV_MAX_LPM_TCAM_NUM)); mem_id++)
            {
                CTC_ERROR_RETURN(_sys_usw_dma_tcam_scan_init(lchip, mem_id, &index, p_sys_desc_pad, p_sys_desc_info));
            }
            CTC_ERROR_RETURN(_sys_usw_dma_tcam_scan_init(lchip, DRV_FTM_QUEUE_TCAM, &index, p_sys_desc_pad, p_sys_desc_info));
            CTC_ERROR_RETURN(_sys_usw_dma_tcam_scan_init(lchip, DRV_FTM_CID_TCAM, &index, p_sys_desc_pad, p_sys_desc_info));
        }
    }
    else if (p_chan_info->channel_id == SYS_DMA_REG_BUF_SCAN_CHAN_ID)
    {
        /*Monitor buffer scan Process description*/
        CTC_ERROR_RETURN(MCHIP_DMA(lchip)->dma_init_reg_buffer_scan(lchip, p_chan_info));
    }
    else if (p_chan_info->channel_id == SYS_DMA_TBL_RD1_CHAN_ID)
    {
        uint16 data_size = 0;
        uint32  dword = 0;
        for (index = 0; index < p_chan_info->desc_depth; index++)
        {
           if (DRV_FROM_AT(lchip))
            {
                uint32 tbl_id[] ={DsXSecRxSecYStats_t,DsXSecRxScStats_t, DsXSecRxSecYByteStats_t, DsXSecTxSecYStats_t, DsXSecTxScStats_t, DsXSecTxSecYByteStats_t };
                dword = TABLE_ENTRY_OFFSET(lchip, tbl_id[index % 6])>>2;
                data_size = DRV_TABLE_MAX_INDEX(lchip, tbl_id[index % 6])* dword*4;
                drv_get_table_property(lchip, DRV_TABLE_PROP_HW_ADDR, tbl_id[index % 6], 0, &cfg_addr);
            }
           else if (DRV_IS_TMM(lchip))
            {
                uint32 tbl_id[] ={DsMacsecSecYStats_t,DsMacsecScStats_t, DsMacsecSecYByteStats_t, DsMacsecDestSecYStats_t, DsMacsecTransmitScStats_t, DsMacsecDestSecYByteStats_t };

                dword = TABLE_ENTRY_OFFSET(lchip, tbl_id[index % 6])>>2;
                data_size = DRV_TABLE_MAX_INDEX(lchip, tbl_id[index % 6])/2* dword*4;

                drv_get_table_property(lchip, DRV_TABLE_PROP_HW_ADDR, tbl_id[index % 6], DRV_TABLE_MAX_INDEX(lchip, tbl_id[index % 6])/2*(index/6), &cfg_addr);
            }
            else
            {
                if (0 == index % 3)
                {
                    data_size = sizeof(DsDot1AeDecryptGlobalStats_m);
                    dword = sizeof(DsDot1AeDecryptGlobalStats_m) / 4;
                    drv_get_table_property(lchip, DRV_TABLE_PROP_HW_ADDR, DsDot1AeDecryptGlobalStats_t, 0, &cfg_addr);
                }
                else if (1 == index % 3)
                {
                    data_size = sizeof(DsDot1AeEncryptStats_m)*256;
                    dword = sizeof(DsDot1AeEncryptStats_m) / 4;
                    drv_get_table_property(lchip, DRV_TABLE_PROP_HW_ADDR, DsDot1AeEncryptStats_t, 0, &cfg_addr);
                }
                else
                {
                    data_size = sizeof(DsDot1AeDecryptStats_m)*1024;
                    dword = sizeof(DsDot1AeDecryptStats_m) / 4;
                    drv_get_table_property(lchip, DRV_TABLE_PROP_HW_ADDR, DsDot1AeDecryptStats_t, 0, &cfg_addr);
                }
            }
            /*use burst to read*/
            /*cfg_addr |= 0x1;*/
            p_mem_addr = SYS_DMA_ALLOC(lchip, data_size, 0);
            p_sys_desc_info[index].data_addr = p_mem_addr;
            if (NULL == p_mem_addr)
            {
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
                return CTC_E_NO_MEMORY;
            }

            sal_memset(p_mem_addr, 0, data_size);
            phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_mem_addr);
            p_desc = (DsDesc_m*)&(p_sys_desc_pad[index].desc_info);

            SetDsDescEncap2(V, memAddr_f, p_desc, (phy_addr >> 4));
            SetDsDescEncap2(V, cfgSize_f, p_desc, data_size);
            SetDsDescEncap2(V, chipAddr_f, p_desc, cfg_addr);
            SetDsDescEncap(V, tsAddr_f, p_desc, cfg_addr);/* TMM */
            SetDsDescEncap2(V, dataStruct_f, p_desc, dword);
            if ((index % 3) == 0)
            {
                /*first desc should cfg pause*/
                SetDsDescEncap2(V, pause_f, p_desc, 1);
            }
            if (DRV_FROM_AT(lchip))
            {
                uint8 dp_id = index/6;
				
                SetDsDescEncap(V, highAddr_f, p_desc, p_usw_dma_master[lchip]->dma_high_addr);
                SetDsDescEncap(V, u0_reg_sliceBmp_f, p_desc, (1<<(dp_id/2)%SYS_CORE_PP_NUM(lchip)));
                SetDsDescEncap(V, u1_reg_dpBmp_f, p_desc, 1<<(dp_id%2));
                SetDsDescEncap(V, u1_reg_coreBmp_f, p_desc, 1<<(dp_id/SYS_CORE_PP_NUM(lchip)/2));
            }
        }
    }
    else if (p_chan_info->channel_id == SYS_DMA_TBL_RD2_CHAN_ID)
    {
        uint16 data_size = 0;
        uint32  dword = 0;
        for (index = 0; index < p_chan_info->desc_depth; index++)
        {
                uint32 tbl_id[] ={AutoGenPktTxPktStats_t,AutoGenPktRxPktStats_t, AutoGenPktTxPktAck_t, DsOamTwampStats_t};

                dword = TABLE_ENTRY_OFFSET(lchip, tbl_id[index % p_chan_info->desc_depth])>>2;
                data_size = DRV_TABLE_MAX_INDEX(lchip, tbl_id[index % p_chan_info->desc_depth])* dword*4;

                drv_get_table_property(lchip, DRV_TABLE_PROP_HW_ADDR, tbl_id[index % p_chan_info->desc_depth], 0, &cfg_addr);

            /*use burst to read*/
            /*cfg_addr |= 0x1;*/
            p_mem_addr = SYS_DMA_ALLOC(lchip, data_size, 0);
            p_sys_desc_info[index].data_addr = p_mem_addr;
            if (NULL == p_mem_addr)
            {
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
                return CTC_E_NO_MEMORY;
            }

            sal_memset(p_mem_addr, 0, data_size);
            phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_mem_addr);
            p_desc = (DsDesc_m*)&(p_sys_desc_pad[index].desc_info);

            SetDsDescEncap2(V, memAddr_f, p_desc, (phy_addr >> 4));
            SetDsDescEncap2(V, cfgSize_f, p_desc, data_size);
            SetDsDescEncap2(V, chipAddr_f, p_desc, cfg_addr);
            SetDsDescEncap(V, tsAddr_f, p_desc, cfg_addr);/* TMM */
            SetDsDescEncap2(V, dataStruct_f, p_desc, dword);
            if (DRV_FROM_TMM(lchip))
            {
                SetDsDescEncap(V, valid_f, p_desc, 1);
            }
            if ((index % p_chan_info->desc_depth) == 0)
            {
                /*first desc should cfg pause*/
                SetDsDescEncap2(V, pause_f, p_desc, 1);
            }
            if (DRV_FROM_AT(lchip))
            {
                SetDsDescEncap(V, highAddr_f, p_desc, p_usw_dma_master[lchip]->dma_high_addr);
                SetDsDescEncap(V, u0_reg_sliceBmp_f, p_desc, 0);
                SetDsDescEncap(V, u1_reg_dpBmp_f, p_desc, 0);
                SetDsDescEncap(V, u1_reg_coreBmp_f, p_desc, 1);
            }
        }
    }
    else if (p_chan_info->channel_id == SYS_DMA_TBL_WR_CHAN_ID || p_chan_info->channel_id == SYS_DMA_TBL_RD_CHAN_ID
    || p_chan_info->channel_id == SYS_DMA_BATCH_CHAN_ID)
    {
        /*for write channel, create mux*/
        ret = sal_mutex_create(&(p_chan_info->p_mutex));
        if (ret || !(p_chan_info->p_mutex))
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No resource in ASIC \n");
            return CTC_E_NO_RESOURCE;

        }
    }
    else if (p_chan_info->channel_id == SYS_DMA_TCAM_SCAN_CHAN_ID)
    {
        if (MCHIP_DMA(lchip)->dma_tcam_scan_reg_init)
        {
            MCHIP_DMA(lchip)->dma_tcam_scan_reg_init(lchip, p_chan_info, p_sys_desc_pad, p_sys_desc_info);
        }
    }

    /* cfg static infor for dmc channel:MemBase, ring depth */
    sal_memset(&static_info, 0, sizeof(DmaStaticInfo_m));
    if (DRV_FROM_AT(lchip) && p_chan_info->channel_id == SYS_DMA_REG_BUF_SCAN_CHAN_ID)
    {
        phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_chan_info->p_desc);
    }
    else
    {
        phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_sys_desc_pad);
    }
    cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &static_info));
    SetDmaStaticInfo(V, highBase_f, &static_info, p_usw_dma_master[lchip]->dma_high_addr);
    SetDmaStaticInfo(V, ringBase_f, &static_info, (phy_addr >> 4));
    SetDmaStaticInfo(V, ringDepth_f, &static_info, p_chan_info->desc_depth);
    cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &static_info));

    /* cfg DmaCtlTab for VldNum */
    valid_cnt = p_chan_info->desc_num;

    sal_memset(&tab_ctl, 0, sizeof(DmaCtlTab_m));
    cmd = DRV_IOR(DmaCtlTab_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &tab_ctl));
    SetDmaCtlTab(V, vldNum_f, &tab_ctl, valid_cnt);
    cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &tab_ctl));

    /* cfg weight */
    sal_memset(&dma_weight, 0, sizeof(DmaWeightCfg_m));
    field_id = DmaWeightCfg_cfgChan0Weight_f + p_chan_info->channel_id;
    cmd = DRV_IOR(DmaWeightCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &dma_weight));
    DRV_SET_FIELD_V(lchip, DmaWeightCfg_t, field_id, &dma_weight, (p_chan_info->weight));
    cmd = DRV_IOW(DmaWeightCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &dma_weight));

    if (DRV_FROM_AT(lchip))
    {
        _sys_at_dma_get_table_id_by_ring(lchip, p_chan_info->channel_id, &tbl_id);
        field_id = (p_chan_info->channel_id < SYS_DMA_TBL_WR_CHAN_ID) ? DmaRegRd0Ctl_cfgChanWeight_f : ((p_chan_info->channel_id == SYS_DMA_TCAM_SCAN_CHAN_ID) ? DmaScanCtl_cfgChanWeight_f : DmaRegWr0Ctl_cfgChanWeight_f);
        cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &ds));
        DRV_SET_FIELD_V(lchip, tbl_id, field_id, &ds, (p_chan_info->weight));
        cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &ds));
    }

    /* cfg clear on read */
    cmd = DRV_IOR(DmaPktStatsCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &stats_cfg));
    SetDmaPktStatsCfg(V, clearOnRead_f, &stats_cfg, 1);
    cmd = DRV_IOW(DmaPktStatsCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &stats_cfg));

    if (!(DRV_FROM_AT(lchip) && p_chan_info->channel_id == SYS_DMA_REG_BUF_SCAN_CHAN_ID))
    {
        /*AT: BufferScan not go here*/
        p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_desc = p_sys_desc_pad;
        p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].mem_base = 0;
        p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_mutex = p_chan_info->p_mutex;
        p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_desc_info = p_sys_desc_info;
    }
    return CTC_E_NONE;
}

/**
@brief DMA init for info dma
*/
STATIC int32
_sys_usw_dma_info_init(uint8 lchip, sys_dma_chan_t* p_chan_info)
{
    uint32 desc_num = 0;
    sys_dma_desc_t* p_sys_desc_pad = NULL;
    DsDesc_m* p_desc = NULL;
    uint32 phy_addr = 0;
    DmaStaticInfo_m static_info;
    DmaCtlTab_m  tab_ctl;
    uint32 cmd = 0;
    uint32 tbl_id = 0;
    uint32 field_id = 0;
    void*  p_mem_addr = NULL;
    void*  p_base_mem_addr = NULL;
    DmaWeightCfg_m dma_weight;
    uint32 valid_cnt = 0;
    uint32 real_data_size = 0;
    sys_dma_desc_info_t* p_sys_desc_info = NULL;
    ds_t ds;

    CTC_PTR_VALID_CHECK(p_chan_info);
    sal_memset(&ds, 0, sizeof(ds_t));

    desc_num  = p_chan_info->desc_depth;

    /* cfg desc num */
    p_sys_desc_pad = (sys_dma_desc_t*)SYS_DMA_ALLOC(lchip, (desc_num) * sizeof(sys_dma_desc_t), 0);
    if (NULL == p_sys_desc_pad)
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
			return CTC_E_NO_MEMORY;

    }

    sal_memset(p_sys_desc_pad, 0, sizeof(sys_dma_desc_t)*desc_num);
    real_data_size = DATA_SIZE_ALIGN(p_chan_info->data_size);
    p_base_mem_addr = SYS_DMA_ALLOC(lchip, real_data_size*p_chan_info->desc_num, 0);
    if (NULL == p_base_mem_addr)
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
        return CTC_E_NO_MEMORY;
    }
    sal_memset(p_base_mem_addr, 0, real_data_size*p_chan_info->desc_num);

    /* alloc memory for storing desc logic address in db */
    p_sys_desc_info = (sys_dma_desc_info_t*)mem_malloc(MEM_DMA_MODULE, (p_chan_info->desc_num)*sizeof(sys_dma_desc_info_t));
    if (!p_sys_desc_info)
    {
        return CTC_E_NO_MEMORY;
    }

    /* cfg per desc data */
    for (desc_num = 0; desc_num < p_chan_info->desc_num; desc_num++)
    {
        p_desc = (DsDesc_m*)&(p_sys_desc_pad[desc_num].desc_info);

        p_mem_addr = p_base_mem_addr+real_data_size*desc_num;
        p_sys_desc_info[desc_num].data_addr = p_mem_addr;
        phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_mem_addr);
        SetDsDescEncap2(V, memAddr_f, p_desc, (phy_addr >> 4));
        SetDsDescEncap2(V, cfgSize_f, p_desc, p_chan_info->cfg_size);
        SetDsDescEncap(V, highAddr_f, p_desc, p_usw_dma_master[lchip]->dma_high_addr);
    }

    /* cfg static infor for dmc channel:MemBase, ring depth */
    sal_memset(&static_info, 0, sizeof(DmaStaticInfo_m));
    phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_sys_desc_pad);
    tbl_id = DmaStaticInfo_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &static_info));
    SetDmaStaticInfo(V, highBase_f, &static_info, p_usw_dma_master[lchip]->dma_high_addr);
    SetDmaStaticInfo(V, ringBase_f, &static_info, (phy_addr >> 4));
    SetDmaStaticInfo(V, ringDepth_f, &static_info, p_chan_info->desc_depth);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &static_info));

    /* cfg DmaCtlTab for VldNum */
    valid_cnt = p_chan_info->desc_num;

    if (!p_usw_dma_master[lchip]->wb_keep_knet || !p_usw_dma_master[lchip]->wb_reloading)
    {
        sal_memset(&tab_ctl, 0, sizeof(DmaCtlTab_m));
        tbl_id = DmaCtlTab_t;
        cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &tab_ctl));
        SetDmaCtlTab(V, vldNum_f, &tab_ctl, valid_cnt);
        cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &tab_ctl));
    }

    /* cfg weight */
    sal_memset(&dma_weight, 0, sizeof(DmaWeightCfg_m));
    tbl_id = DmaWeightCfg_t;
    field_id = DmaWeightCfg_cfgChan0Weight_f + p_chan_info->channel_id;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &dma_weight));
    DRV_SET_FIELD_V(lchip, tbl_id, field_id, &dma_weight, (p_chan_info->weight));
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &dma_weight));

    if (DRV_FROM_AT(lchip))
    {
        _sys_at_dma_get_table_id_by_ring(lchip, p_chan_info->channel_id, &tbl_id);
        field_id = DmaInfo0Ctl_cfgChanWeight_f;
        cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &ds));
        DRV_SET_FIELD_V(lchip, tbl_id, field_id, &ds, (p_chan_info->weight));
        cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &ds));
    }

    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_desc = p_sys_desc_pad;
    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].mem_base = (uintptr)p_base_mem_addr;
    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_mutex = p_chan_info->p_mutex;
    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_desc_info  = p_sys_desc_info;

    return CTC_E_NONE;
}

/**
@brief DMA common init
*/
STATIC int32
_sys_usw_dma_common_init(uint8 lchip, sys_dma_chan_t* p_chan_info)
{
    uint32 cmd = 0;
    uint32 tbl_id = 0;
    DmaStaticInfo_m static_info;
    uint32 chan_en = 0;
    ds_t ds;
    uint32 value = 0;
    sal_memset(&ds, 0, sizeof(ds_t));

    switch(GET_CHAN_TYPE(p_chan_info->channel_id))
    {
    case DRV_DMA_PACKET_RX0_CHAN_ID:
    case DRV_DMA_PACKET_RX1_CHAN_ID:
    case DRV_DMA_PACKET_RX2_CHAN_ID:
    case DRV_DMA_PACKET_RX3_CHAN_ID:
    case DRV_DMA_PACKET_RX7_CHAN_ID:
    case DRV_DMA_PACKET_TX0_CHAN_ID:
    case DRV_DMA_PACKET_TX1_CHAN_ID:
    case DRV_DMA_PACKET_TX2_CHAN_ID:
    case DRV_DMA_PACKET_TX3_CHAN_ID:

        /*using tx1 channel as timer tx channel*/
        if (p_usw_dma_master[lchip]->pkt_tx_timer_en && (p_chan_info->channel_id == SYS_DMA_PKT_TX_TIMER_CHAN_ID))
        {
            tbl_id = DmaMiscCfg_t;
            cmd = DRV_IOR(tbl_id, DmaMiscCfg_cfgDmaRdNullDis_f);
            CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &chan_en));
            CTC_BIT_SET(chan_en, (SYS_DMA_PKT_TX_TIMER_CHAN_ID-SYS_DMA_PACKET_TX0_CHAN_ID));
            cmd = DRV_IOW(tbl_id, DmaMiscCfg_cfgDmaRdNullDis_f);
            CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &chan_en));
            p_chan_info->auto_mode_en = 1;

            cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
            CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, p_chan_info->channel_id, cmd, &static_info));
            SetDmaStaticInfo(V, ringDepth_f, &static_info, 0);
            cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
            CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, p_chan_info->channel_id, cmd, &static_info));

            chan_en = 0;
            break;
        }
        CTC_ERROR_RETURN(_sys_usw_dma_pkt_init(lchip, p_chan_info));
        chan_en = 0;
        break;

    case DRV_DMA_TBL_WR_CHAN_ID:
    case DRV_DMA_TBL_RD_CHAN_ID:
    case DRV_DMA_BATCH_CHAN_ID:
    case DRV_DMA_PORT_STATS_CHAN_ID:
    case DRV_DMA_FLOW_STATS_CHAN_ID:
    case DRV_DMA_REG_MAX_CHAN_ID:
    case DRV_DMA_TBL_RD1_CHAN_ID:
    case DRV_DMA_TBL_RD2_CHAN_ID:
    case DRV_DMA_TCAM_SCAN_CHAN_ID:
    case DRV_DMA_TBL_WR1_CHAN_ID:
    case DRV_DMA_BUF_SCAN_CHAN_ID:
        CTC_ERROR_RETURN(_sys_usw_dma_reg_init(lchip, p_chan_info));
        chan_en = 0;
        break;

    case DRV_DMA_LEARNING_CHAN_ID:
    case DRV_DMA_HASHKEY_CHAN_ID:
    case DRV_DMA_IPFIX_CHAN_ID:
    case DRV_DMA_SDC_CHAN_ID:
    case DRV_DMA_MONITOR_CHAN_ID:
    case DRV_DMA_BUFFER_CHAN_ID:
    case DRV_DMA_LATENCY_CHAN_ID:
    case DRV_DMA_EFD_CHAN_ID:
    case DRV_DMA_OAM_CHAN_ID:
    case DRV_DMA_SC_OAM_CHAN_ID:
        CTC_ERROR_RETURN(_sys_usw_dma_info_init(lchip, p_chan_info));
        chan_en = 0;
        break;

    default:
        break;

    }
    /*enable auto mode for tx on timer*/
    if ((p_chan_info->auto_fetch_en && DRV_IS_TSINGMA(lchip))
        || (p_chan_info->auto_mode_en && DRV_FROM_TMM(lchip)))
    {
        tbl_id = DmaCtlAutoMode_t;
        cmd = DRV_IOR(tbl_id, DmaCtlAutoMode_dmaAutoMode_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value));
        CTC_BIT_SET(value, p_chan_info->channel_id);
        cmd = DRV_IOW(tbl_id, DmaCtlAutoMode_dmaAutoMode_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value));
    }

    if (p_chan_info->auto_fetch_en && DRV_FROM_TMM(lchip))
    {
        DmaDescFetchMode_m fetch_mode;
        cmd = DRV_IOR(DmaDescFetchMode_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &fetch_mode));
        value = GetDmaDescFetchMode(V, cfgDescFetchMode_f, &fetch_mode);
        CTC_BIT_SET(value, p_chan_info->channel_id);
        SetDmaDescFetchMode(V, cfgDescFetchMode_f, &fetch_mode, value);
        cmd = DRV_IOW(DmaDescFetchMode_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &fetch_mode));
    }

    tbl_id = DmaStaticInfo_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &static_info));
    SetDmaStaticInfo(V, chanEn_f, &static_info, chan_en);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_chan_info->channel_id, DRV_CMD_PP_EN(cmd), &static_info));
    return CTC_E_NONE;
}

/**
@brief DMA intr init
   packet rx: per EOP
   Packet tx: no need
   RegRd:  no need
   RegWr: no need
   port stats: desc cnt
   Info: timer + per desc
*/
STATIC int32
_sys_at_dma_intr_init(uint8 lchip, ctc_dma_global_cfg_t* p_global_cfg)
{
    uint32 tbl_id = 0;
    uint32 cmd = 0;
    uint8 mac_num = 0;
    uint8 chan_num = 0;

    DmaPktRx0Ctl_m pkt_rx_ctl;
    DmaPktTx0Ctl_m pkt_tx_ctl;
    DmaRegRd0Ctl_m reg_rd_ctl;
    DmaRegWr0Ctl_m reg_wr_ctl;
    DmaInfo0Ctl_m  info_ctl;
    DmaScanCtl_m   scan_ctl;
    DmaScanIntrCntCfg_m scan_intr_cnt;

    /* Packet Rx Config */
    tbl_id = DmaPktRx0Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &pkt_rx_ctl));
    SetDmaPktRx0Ctl(V, cfgPktRxEopIntrEn_f, &pkt_rx_ctl, 1);
    SetDmaPktRx0Ctl(V, cfgPktRxDmaIntrEn_f, &pkt_rx_ctl, 1);
    SetDmaPktRx0Ctl(V, cfgPktRxIntrTimerEn_f, &pkt_rx_ctl, 0);
    SetDmaPktRx0Ctl(V, cfgPktRxIntrCnt_f, &pkt_rx_ctl, 0);
    SetDmaPktRx0Ctl(V, cfgDescFetchMode_f, &pkt_rx_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &pkt_rx_ctl));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PACKET_RX0_CHAN_ID;

    tbl_id = DmaPktRx1Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_rx_ctl));
    SetDmaPktRx1Ctl(V, cfgPktRxEopIntrEn_f, &pkt_rx_ctl, 1);
    SetDmaPktRx1Ctl(V, cfgPktRxDmaIntrEn_f, &pkt_rx_ctl, 1);
    SetDmaPktRx1Ctl(V, cfgPktRxIntrTimerEn_f, &pkt_rx_ctl, 0);
    SetDmaPktRx1Ctl(V, cfgPktRxIntrCnt_f, &pkt_rx_ctl, 0);
    SetDmaPktRx1Ctl(V, cfgDescFetchMode_f, &pkt_rx_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_rx_ctl));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PACKET_RX1_CHAN_ID;

    tbl_id = DmaPktRx2Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_rx_ctl));
    SetDmaPktRx2Ctl(V, cfgPktRxEopIntrEn_f, &pkt_rx_ctl, 1);
    SetDmaPktRx2Ctl(V, cfgPktRxDmaIntrEn_f, &pkt_rx_ctl, 1);
    SetDmaPktRx2Ctl(V, cfgPktRxIntrTimerEn_f, &pkt_rx_ctl, 0);
    SetDmaPktRx2Ctl(V, cfgPktRxIntrCnt_f, &pkt_rx_ctl, 0);
    SetDmaPktRx2Ctl(V, cfgDescFetchMode_f, &pkt_rx_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_rx_ctl));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PACKET_RX2_CHAN_ID;

    tbl_id = DmaPktRx3Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_rx_ctl));
    SetDmaPktRx3Ctl(V, cfgPktRxEopIntrEn_f, &pkt_rx_ctl, 1);
    SetDmaPktRx3Ctl(V, cfgPktRxDmaIntrEn_f, &pkt_rx_ctl, 1);
    SetDmaPktRx3Ctl(V, cfgPktRxIntrTimerEn_f, &pkt_rx_ctl, 0);
    SetDmaPktRx3Ctl(V, cfgPktRxIntrCnt_f, &pkt_rx_ctl, 0);
    SetDmaPktRx3Ctl(V, cfgDescFetchMode_f, &pkt_rx_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_rx_ctl));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PACKET_RX3_CHAN_ID;

    /* Packet Tx Config */
    tbl_id = DmaPktTx0Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_tx_ctl));
    SetDmaPktTx0Ctl(V, cfgPktTxEopIntrEn_f, &pkt_tx_ctl, 0);
    SetDmaPktTx0Ctl(V, cfgPktTxDmaIntrEn_f, &pkt_tx_ctl, 0);
    SetDmaPktTx0Ctl(V, cfgPktTxIntrTimerEn_f, &pkt_tx_ctl, 0);
    SetDmaPktTx0Ctl(V, cfgPktTxIntrCnt_f, &pkt_tx_ctl, 0);
    SetDmaPktTx0Ctl(V, cfgDescFetchMode_f, &pkt_tx_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_tx_ctl));

    tbl_id = DmaPktTx1Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_tx_ctl));
    SetDmaPktTx1Ctl(V, cfgPktTxEopIntrEn_f, &pkt_tx_ctl, 0);
    SetDmaPktTx1Ctl(V, cfgPktTxDmaIntrEn_f, &pkt_tx_ctl, 0);
    SetDmaPktTx1Ctl(V, cfgPktTxIntrTimerEn_f, &pkt_tx_ctl, 0);
    SetDmaPktTx1Ctl(V, cfgPktTxIntrCnt_f, &pkt_tx_ctl, 0);
    SetDmaPktTx1Ctl(V, cfgDescFetchMode_f, &pkt_tx_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_tx_ctl));

    tbl_id = DmaPktTx2Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_tx_ctl));
    SetDmaPktTx2Ctl(V, cfgPktTxEopIntrEn_f, &pkt_tx_ctl, 0);
    SetDmaPktTx2Ctl(V, cfgPktTxDmaIntrEn_f, &pkt_tx_ctl, 0);
    SetDmaPktTx2Ctl(V, cfgPktTxIntrTimerEn_f, &pkt_tx_ctl, 0);
    SetDmaPktTx2Ctl(V, cfgPktTxIntrCnt_f, &pkt_tx_ctl, 0);
    SetDmaPktTx2Ctl(V, cfgDescFetchMode_f, &pkt_tx_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_tx_ctl));

    tbl_id = DmaPktTx3Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_tx_ctl));
    SetDmaPktTx3Ctl(V, cfgPktTxEopIntrEn_f, &pkt_tx_ctl, 0);
    SetDmaPktTx3Ctl(V, cfgPktTxDmaIntrEn_f, &pkt_tx_ctl, 0);
    SetDmaPktTx3Ctl(V, cfgPktTxIntrTimerEn_f, &pkt_tx_ctl, 0);
    SetDmaPktTx3Ctl(V, cfgPktTxIntrCnt_f, &pkt_tx_ctl, 0);
    SetDmaPktTx3Ctl(V, cfgDescFetchMode_f, &pkt_tx_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_tx_ctl));

    /* Register DMA Config */
    tbl_id = DmaRegRd0Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    SetDmaRegRd0Ctl(V, cfgRegRdDescIntrEn_f, &reg_rd_ctl, 0);
    SetDmaRegRd0Ctl(V, cfgRegRdDmaIntrEn_f, &reg_rd_ctl, 0);
    SetDmaRegRd0Ctl(V, cfgRegRdIntrCnt_f, &reg_rd_ctl, 0);
    SetDmaRegRd0Ctl(V, cfgDescFetchMode_f, &reg_rd_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));

    mac_num = MCHIP_CAP(SYS_CAP_STATS_XQMAC_PORT_NUM)*MCHIP_CAP(SYS_CAP_STATS_XQMAC_RAM_NUM);
    tbl_id = DmaRegRd1Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    SetDmaRegRd1Ctl(V, cfgRegRdDescIntrEn_f, &reg_rd_ctl, 0);
    SetDmaRegRd1Ctl(V, cfgRegRdDmaIntrEn_f, &reg_rd_ctl, 1);
    SetDmaRegRd1Ctl(V, cfgRegRdIntrCnt_f, &reg_rd_ctl, mac_num);
    SetDmaRegRd1Ctl(V, cfgDescFetchMode_f, &reg_rd_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));

    tbl_id = DmaRegRd2Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    SetDmaRegRd2Ctl(V, cfgRegRdDescIntrEn_f, &reg_rd_ctl, 0);
    SetDmaRegRd2Ctl(V, cfgRegRdDmaIntrEn_f, &reg_rd_ctl, 1);
    SetDmaRegRd2Ctl(V, cfgRegRdIntrCnt_f, &reg_rd_ctl, MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM) * 4);
    SetDmaRegRd2Ctl(V, cfgDescFetchMode_f, &reg_rd_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_FLOW_STATS_CHAN_ID;

    /* buf scan */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_REG_BUF_SCAN_CHAN_ID;

    tbl_id = DmaRegRd4Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    SetDmaRegRd4Ctl(V, cfgRegRdDescIntrEn_f, &reg_rd_ctl, 0);
    SetDmaRegRd4Ctl(V, cfgRegRdDmaIntrEn_f, &reg_rd_ctl, 1);
    SetDmaRegRd4Ctl(V, cfgRegRdIntrCnt_f, &reg_rd_ctl, 3);
    SetDmaRegRd4Ctl(V, cfgDescFetchMode_f, &reg_rd_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_TBL_RD1_CHAN_ID;

    tbl_id = DmaRegRd5Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    SetDmaRegRd5Ctl(V, cfgRegRdDescIntrEn_f, &reg_rd_ctl, 0);
    SetDmaRegRd5Ctl(V, cfgRegRdDmaIntrEn_f, &reg_rd_ctl, 1);
    SetDmaRegRd5Ctl(V, cfgRegRdIntrCnt_f, &reg_rd_ctl, 3);
    SetDmaRegRd5Ctl(V, cfgDescFetchMode_f, &reg_rd_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_TBL_RD2_CHAN_ID;

    tbl_id = DmaRegWr0Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_wr_ctl));
    SetDmaRegWr0Ctl(V, cfgRegWrDescIntrEn_f, &reg_wr_ctl, 0);
    SetDmaRegWr0Ctl(V, cfgRegWrDmaIntrEn_f, &reg_wr_ctl, 0);
    SetDmaRegWr0Ctl(V, cfgRegWrIntrCnt_f, &reg_wr_ctl, 0);
    SetDmaRegWr0Ctl(V, cfgDescFetchMode_f, &reg_wr_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_wr_ctl));

    tbl_id = DmaRegWr1Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_wr_ctl));
    SetDmaRegWr1Ctl(V, cfgRegWrDescIntrEn_f, &reg_wr_ctl, 0);
    SetDmaRegWr1Ctl(V, cfgRegWrDmaIntrEn_f, &reg_wr_ctl, 0);
    SetDmaRegWr1Ctl(V, cfgRegWrIntrCnt_f, &reg_wr_ctl, 0);
    SetDmaRegWr1Ctl(V, cfgDescFetchMode_f, &reg_wr_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_wr_ctl));

    /* Info DMA Config */
    tbl_id = DmaInfo0Ctl_t;/* learning & aging */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo0Ctl(V, cfgInfoDmaIntrEn_f, &info_ctl, 1);
    SetDmaInfo0Ctl(V, cfgInfoDescIntrEn_f, &info_ctl, 1);
    SetDmaInfo0Ctl(V, cfgInfoTimerIntrEn_f, &info_ctl, 1);
    SetDmaInfo0Ctl(V, cfgInfoIntrCnt_f, &info_ctl, 0);
    SetDmaInfo0Ctl(V, cfgDescFetchMode_f, &info_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_LEARNING_CHAN_ID;

    tbl_id = DmaInfo1Ctl_t;/* hash dump */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo1Ctl(V, cfgInfoDmaIntrEn_f, &info_ctl, 0);
    SetDmaInfo1Ctl(V, cfgInfoDescIntrEn_f, &info_ctl, 0);
    SetDmaInfo1Ctl(V, cfgInfoTimerIntrEn_f, &info_ctl, 0);
    SetDmaInfo1Ctl(V, cfgInfoIntrCnt_f, &info_ctl, 0);
    SetDmaInfo1Ctl(V, cfgDescFetchMode_f, &info_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo2Ctl_t;/* ipfix */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo2Ctl(V, cfgInfoDmaIntrEn_f, &info_ctl, 1);
    SetDmaInfo2Ctl(V, cfgInfoDescIntrEn_f, &info_ctl, 1);
    SetDmaInfo2Ctl(V, cfgInfoTimerIntrEn_f, &info_ctl, 1);
    SetDmaInfo2Ctl(V, cfgInfoIntrCnt_f, &info_ctl, 0);
    SetDmaInfo2Ctl(V, cfgDescFetchMode_f, &info_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_IPFIX_CHAN_ID;

    tbl_id = DmaInfo3Ctl_t;/* monitor */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo3Ctl(V, cfgInfoDmaIntrEn_f, &info_ctl, 1);
    SetDmaInfo3Ctl(V, cfgInfoDescIntrEn_f, &info_ctl, 0);
    SetDmaInfo3Ctl(V, cfgInfoTimerIntrEn_f, &info_ctl, 1);
    SetDmaInfo3Ctl(V, cfgInfoIntrCnt_f, &info_ctl, 10);
    SetDmaInfo3Ctl(V, cfgDescFetchMode_f, &info_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_BUFFER_CHAN_ID;

    tbl_id = DmaInfo4Ctl_t;/* lantency */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo4Ctl(V, cfgInfoDmaIntrEn_f, &info_ctl, 1);
    SetDmaInfo4Ctl(V, cfgInfoDescIntrEn_f, &info_ctl, 0);
    SetDmaInfo4Ctl(V, cfgInfoTimerIntrEn_f, &info_ctl, 1);
    SetDmaInfo4Ctl(V, cfgInfoIntrCnt_f, &info_ctl, 10);
    SetDmaInfo4Ctl(V, cfgDescFetchMode_f, &info_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_LATENCY_CHAN_ID;

    tbl_id = DmaInfo5Ctl_t;/* efd */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo5Ctl(V, cfgInfoDmaIntrEn_f, &info_ctl, 1);
    SetDmaInfo5Ctl(V, cfgInfoDescIntrEn_f, &info_ctl, 0);
    SetDmaInfo5Ctl(V, cfgInfoTimerIntrEn_f, &info_ctl, 1);
    SetDmaInfo5Ctl(V, cfgInfoIntrCnt_f, &info_ctl, 10);
    SetDmaInfo5Ctl(V, cfgDescFetchMode_f, &info_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_EFD_CHAN_ID;

    tbl_id = DmaInfo6Ctl_t;/* dlb */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo6Ctl(V, cfgInfoDmaIntrEn_f, &info_ctl, 1);
    SetDmaInfo6Ctl(V, cfgInfoDescIntrEn_f, &info_ctl, 0);
    SetDmaInfo6Ctl(V, cfgInfoTimerIntrEn_f, &info_ctl, 1);
    SetDmaInfo6Ctl(V, cfgInfoIntrCnt_f, &info_ctl, 10);
    SetDmaInfo6Ctl(V, cfgDescFetchMode_f, &info_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo7Ctl_t;/* oam */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo7Ctl(V, cfgInfoDmaIntrEn_f, &info_ctl, 1);
    SetDmaInfo7Ctl(V, cfgInfoDescIntrEn_f, &info_ctl, 1);
    SetDmaInfo7Ctl(V, cfgInfoTimerIntrEn_f, &info_ctl, 1);
    SetDmaInfo7Ctl(V, cfgInfoIntrCnt_f, &info_ctl, 0);
    SetDmaInfo7Ctl(V, cfgDescFetchMode_f, &info_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));


    /* Tcam Scan Config */
    tbl_id = DmaScanCtl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &scan_ctl));
    SetDmaScanCtl(V, cfgScanDescIntrEn_f, &scan_ctl, 1);
    SetDmaScanCtl(V, cfgScanDmaIntrEn_f, &scan_ctl, 1);
    SetDmaScanCtl(V, cfgDescFetchMode_f, &info_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &scan_ctl));

    sal_memset(&scan_intr_cnt, 0, sizeof(DmaScanIntrCntCfg_m));
    tbl_id = DmaScanIntrCntCfg_t;
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &scan_intr_cnt));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_TCAM_SCAN_CHAN_ID;
    p_usw_dma_master[lchip]->intr_chan_num = chan_num;

    return CTC_E_NONE;
}

/**
@brief DMA intr init
   packet rx: per EOP
   Packet tx: no need
   RegRd:  no need
   RegWr: no need
   port stats: desc cnt
   Info: timer + per desc
*/
STATIC int32
_sys_tmm_dma_intr_init(uint8 lchip, ctc_dma_global_cfg_t* p_global_cfg)
{
    DmaRegIntrEnCfg_m reg_intr;
    DmaRegRdIntrCntCfg_m reg_intr_cnt;
    DmaInfoIntrEnCfg_m info_intr;
    DmaInfoIntrCntCfg_m intr_cnt;
    DmaPktRxIntrEnCfg_m pkt_rx_intr;
    DmaPktTxIntrEnCfg_m pkt_tx_intr;
    DmaPktRxIntrCntCfg_m pkt_rx_intr_cnt;
    DmaScanIntrCntCfg_m scan_intr_cnt;
    uint32 tbl_id = 0;
    uint32 cmd = 0;
    uint32 filed_value;
    uint16 mac_num = 0;
    uint8 chan_num = 0;

    /* cfg intr for packet rx, only using slice0 dmactl for dma packet, slice1 bufretr forwarding to dmactl0  */
    sal_memset(&pkt_rx_intr, 0, sizeof(DmaPktRxIntrEnCfg_m));
    tbl_id = DmaPktRxIntrEnCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_rx_intr));
    SetDmaPktRxIntrEnCfg(V, cfgPktRx0EopIntrEn_f, &pkt_rx_intr, 1);
    SetDmaPktRxIntrEnCfg(V, cfgPktRx1EopIntrEn_f, &pkt_rx_intr, 1);
    SetDmaPktRxIntrEnCfg(V, cfgPktRx2EopIntrEn_f, &pkt_rx_intr, 1);
    SetDmaPktRxIntrEnCfg(V, cfgPktRx3EopIntrEn_f, &pkt_rx_intr, 1);
    SetDmaPktRxIntrEnCfg(V, cfgPktRx7EopIntrEn_f, &pkt_rx_intr, 1);
    SetDmaPktRxIntrEnCfg(V, cfgPktRx0DmaIntrEn_f, &pkt_rx_intr, 1);
    SetDmaPktRxIntrEnCfg(V, cfgPktRx1DmaIntrEn_f, &pkt_rx_intr, 1);
    SetDmaPktRxIntrEnCfg(V, cfgPktRx2DmaIntrEn_f, &pkt_rx_intr, 1);
    SetDmaPktRxIntrEnCfg(V, cfgPktRx3DmaIntrEn_f, &pkt_rx_intr, 1);
    SetDmaPktRxIntrEnCfg(V, cfgPktRx7DmaIntrEn_f, &pkt_rx_intr, 1);
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PACKET_RX0_CHAN_ID;
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PACKET_RX1_CHAN_ID;
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PACKET_RX2_CHAN_ID;
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PACKET_RX3_CHAN_ID;
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PACKET_RX7_CHAN_ID;

    filed_value = 0;
    SetDmaPktRxIntrEnCfg(V, cfgPktRx0IntrTimerEn_f, &pkt_rx_intr, filed_value);
    SetDmaPktRxIntrEnCfg(V, cfgPktRx1IntrTimerEn_f, &pkt_rx_intr, filed_value);
    SetDmaPktRxIntrEnCfg(V, cfgPktRx2IntrTimerEn_f, &pkt_rx_intr, filed_value);
    SetDmaPktRxIntrEnCfg(V, cfgPktRx3IntrTimerEn_f, &pkt_rx_intr, filed_value);
    SetDmaPktRxIntrEnCfg(V, cfgPktRx7IntrTimerEn_f, &pkt_rx_intr, filed_value);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_rx_intr));

    sal_memset(&pkt_tx_intr, 0, sizeof(DmaPktTxIntrEnCfg_m));
    tbl_id = DmaPktTxIntrEnCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_tx_intr));
    SetDmaPktTxIntrEnCfg(V, cfgPktTx0EopIntrEn_f, &pkt_tx_intr, 0);
    SetDmaPktTxIntrEnCfg(V, cfgPktTx0DmaIntrEn_f, &pkt_tx_intr, 0);
    SetDmaPktTxIntrEnCfg(V, cfgPktTx1EopIntrEn_f, &pkt_tx_intr, 0);
    SetDmaPktTxIntrEnCfg(V, cfgPktTx1DmaIntrEn_f, &pkt_tx_intr, 0);
    SetDmaPktTxIntrEnCfg(V, cfgPktTx2DmaIntrEn_f, &pkt_tx_intr, 0);
    SetDmaPktTxIntrEnCfg(V, cfgPktTx2EopIntrEn_f, &pkt_tx_intr, 0);
    SetDmaPktTxIntrEnCfg(V, cfgPktTx2IntrTimerEn_f, &pkt_tx_intr, 0);
    SetDmaPktTxIntrEnCfg(V, cfgPktTx3DmaIntrEn_f, &pkt_tx_intr, 0);
    SetDmaPktTxIntrEnCfg(V, cfgPktTx3EopIntrEn_f, &pkt_tx_intr, 0);
    SetDmaPktTxIntrEnCfg(V, cfgPktTx3IntrTimerEn_f, &pkt_tx_intr, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_tx_intr));

    tbl_id = DmaPktRxIntrCntCfg_t;
    sal_memset(&pkt_rx_intr_cnt, 0, sizeof(pkt_rx_intr_cnt));
    SetDmaPktRxIntrCntCfg(V, cfgPktRx0IntrCnt_f, &pkt_rx_intr_cnt, 0);
    SetDmaPktRxIntrCntCfg(V, cfgPktRx1IntrCnt_f, &pkt_rx_intr_cnt, 0);
    SetDmaPktRxIntrCntCfg(V, cfgPktRx2IntrCnt_f, &pkt_rx_intr_cnt, 0);
    SetDmaPktRxIntrCntCfg(V, cfgPktRx3IntrCnt_f, &pkt_rx_intr_cnt, 0);
    SetDmaPktRxIntrCntCfg(V, cfgPktRx7IntrCnt_f, &pkt_rx_intr_cnt, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_rx_intr_cnt));

    /* cfg intr for Reg Dma(only port stats and dma pkt stats ) */
    sal_memset(&reg_intr, 0, sizeof(DmaRegIntrEnCfg_m));
    sal_memset(&reg_intr_cnt, 0, sizeof(reg_intr_cnt));
    tbl_id = DmaRegIntrEnCfg_t;
    SetDmaRegIntrEnCfg(V, cfgRegRd0DescIntrEn_f, &reg_intr, 0);
    SetDmaRegIntrEnCfg(V, cfgRegRd0DmaIntrEn_f, &reg_intr, 0);
    SetDmaRegIntrEnCfg(V, cfgRegRd1DmaIntrEn_f, &reg_intr, 1);
    SetDmaRegIntrEnCfg(V, cfgRegRd2DmaIntrEn_f, &reg_intr, 1);
    SetDmaRegIntrEnCfg(V, cfgRegRd3DmaIntrEn_f, &reg_intr, 1);
    SetDmaRegIntrEnCfg(V, cfgRegRd4DmaIntrEn_f, &reg_intr, 1);
    SetDmaRegIntrEnCfg(V, cfgRegRd5DmaIntrEn_f, &reg_intr, 1);
    SetDmaRegIntrEnCfg(V, cfgRegWr0DmaIntrEn_f, &reg_intr, 0);/*TMM*/
    SetDmaRegIntrEnCfg(V, cfgRegWr1DmaIntrEn_f, &reg_intr, 0);/*TMM*/
    SetDmaRegIntrEnCfg(V, cfgScanDescIntrEn_f, &reg_intr, 0);/*TMM*/
    SetDmaRegIntrEnCfg(V, cfgScanDmaIntrEn_f, &reg_intr, 0);/*TMM*/
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_intr));

    mac_num = MCHIP_CAP(SYS_CAP_STATS_XQMAC_PORT_NUM)*MCHIP_CAP(SYS_CAP_STATS_XQMAC_RAM_NUM);
    tbl_id = DmaRegRdIntrCntCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_intr_cnt));
    SetDmaRegRdIntrCntCfg(V, cfgRegRd1IntrCnt_f, &reg_intr_cnt, mac_num);
    SetDmaRegRdIntrCntCfg(V, cfgRegRd2IntrCnt_f, &reg_intr_cnt, MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM) * 2);
    SetDmaRegRdIntrCntCfg(V, cfgRegRd3IntrCnt_f, &reg_intr_cnt, 0);
    SetDmaRegRdIntrCntCfg(V, cfgRegRd4IntrCnt_f, &reg_intr_cnt, 3);
    SetDmaRegRdIntrCntCfg(V, cfgRegRd5IntrCnt_f, &reg_intr_cnt, SYS_USW_DMA_NPM_STATS_SYNC_TBL_NUM - (DRV_FROM_AT(lchip)?1:0));
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_intr_cnt));    

    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PORT_STATS_CHAN_ID;/* RegRd1 */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_FLOW_STATS_CHAN_ID;/* RegRd2 */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_REG_MAX_CHAN_ID;   /* RegRd3 */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_TBL_RD1_CHAN_ID;   /* RegRd4 */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_TBL_RD2_CHAN_ID;   /* RegRd5 */

    /* cfg intr for Info Dma */
    sal_memset(&info_intr, 0, sizeof(DmaInfoIntrEnCfg_m));
    tbl_id = DmaInfoIntrEnCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_intr));
    SetDmaInfoIntrEnCfg(V, cfgInfo0DescIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo1DescIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo2DescIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo3DescIntrEn_f, &info_intr, 0);
    SetDmaInfoIntrEnCfg(V, cfgInfo4DescIntrEn_f, &info_intr, 0);
    SetDmaInfoIntrEnCfg(V, cfgInfo5DescIntrEn_f, &info_intr, 0);/*TMM*/
    SetDmaInfoIntrEnCfg(V, cfgInfo6DescIntrEn_f, &info_intr, 1);/*TMM*/
    SetDmaInfoIntrEnCfg(V, cfgInfo7DescIntrEn_f, &info_intr, 1);/*TMM*/

    SetDmaInfoIntrEnCfg(V, cfgInfo0DmaIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo1DmaIntrEn_f, &info_intr, 0);
    SetDmaInfoIntrEnCfg(V, cfgInfo2DmaIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo3DmaIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo4DmaIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo5DmaIntrEn_f, &info_intr, 1);/*TMM*/
    SetDmaInfoIntrEnCfg(V, cfgInfo6DmaIntrEn_f, &info_intr, 1);/*TMM*/
    SetDmaInfoIntrEnCfg(V, cfgInfo7DmaIntrEn_f, &info_intr, 1);/*TMM*/

    SetDmaInfoIntrEnCfg(V, cfgInfo0TimerIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo1TimerIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo2TimerIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo3TimerIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo4TimerIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo5TimerIntrEn_f, &info_intr, 1);/*TMM*/
    SetDmaInfoIntrEnCfg(V, cfgInfo6TimerIntrEn_f, &info_intr, 1);/*TMM*/
    SetDmaInfoIntrEnCfg(V, cfgInfo7TimerIntrEn_f, &info_intr, 1);/*TMM*/
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_intr));    

    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_LEARNING_CHAN_ID; /* info0 */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_IPFIX_CHAN_ID;    /* info2 */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_BUFFER_CHAN_ID;   /* info3 */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_LATENCY_CHAN_ID;  /* info4 */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_EFD_CHAN_ID;      /* info5 */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_OAM_CHAN_ID;      /* info6 */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_SC_OAM_CHAN_ID;   /* info7 */

    tbl_id = DmaInfoIntrCntCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &intr_cnt));
    SetDmaInfoIntrCntCfg(V, cfgInfo3IntrCnt_f, &intr_cnt, 3);
    SetDmaInfoIntrCntCfg(V, cfgInfo4IntrCnt_f, &intr_cnt, 2);
    SetDmaInfoIntrCntCfg(V, cfgInfo5IntrCnt_f, &intr_cnt, 4);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &intr_cnt));

    /* cfg tcam scan */
    sal_memset(&reg_intr, 0, sizeof(DmaRegIntrEnCfg_m));
    tbl_id = DmaRegIntrEnCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_intr));
    SetDmaRegIntrEnCfg(V, cfgScanDmaIntrEn_f, &reg_intr, 1);
    SetDmaRegIntrEnCfg(V, cfgScanDescIntrEn_f, &reg_intr, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_intr));

    sal_memset(&scan_intr_cnt, 0, sizeof(DmaScanIntrCntCfg_m));
    tbl_id = DmaScanIntrCntCfg_t;
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &scan_intr_cnt));
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_TCAM_SCAN_CHAN_ID; 
    p_usw_dma_master[lchip]->intr_chan_num = chan_num;

    return CTC_E_NONE;
}

STATIC int32
_sys_usw_dma_intr_init(uint8 lchip, ctc_dma_global_cfg_t* p_global_cfg)
{
    DmaPktIntrEnCfg_m pkt_intr;
    DmaRegIntrEnCfg_m reg_intr;
    DmaRegRdIntrCntCfg_m reg_intr_cnt;
    DmaInfoIntrEnCfg_m info_intr;
    DmaInfoIntrCntCfg_m intr_cnt;
    DmaPktIntrCntCfg_m  pkt_intr_cnt;
    uint32 tbl_id = 0;
    uint32 cmd = 0;
    uint32 filed_value;
    uint8 mac_num = 0;
    uint8 chan_num = 0;

    if ((DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip)))
    {
        return _sys_tmm_dma_intr_init(lchip, p_global_cfg);
    }
    else if (DRV_FROM_AT(lchip))
    {
        return _sys_at_dma_intr_init(lchip, p_global_cfg);
    }

    /* cfg intr for packet rx, only using slice0 dmactl for dma packet, slice1 bufretr forwarding to dmactl0  */
    sal_memset(&pkt_intr, 0, sizeof(DmaPktIntrEnCfg_m));
    tbl_id = DmaPktIntrEnCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_intr));
    SetDmaPktIntrEnCfg(V, cfgPktRx0EopIntrEn_f, &pkt_intr, 1);
    SetDmaPktIntrEnCfg(V, cfgPktRx1EopIntrEn_f, &pkt_intr, 1);
    SetDmaPktIntrEnCfg(V, cfgPktRx2EopIntrEn_f, &pkt_intr, 1);
    SetDmaPktIntrEnCfg(V, cfgPktRx3EopIntrEn_f, &pkt_intr, 1);

    SetDmaPktIntrEnCfg(V, cfgPktRx0DmaIntrEn_f, &pkt_intr, 1);
    SetDmaPktIntrEnCfg(V, cfgPktRx1DmaIntrEn_f, &pkt_intr, 1);
    SetDmaPktIntrEnCfg(V, cfgPktRx2DmaIntrEn_f, &pkt_intr, 1);
    SetDmaPktIntrEnCfg(V, cfgPktRx3DmaIntrEn_f, &pkt_intr, 1);
    SetDmaPktIntrEnCfg(V, cfgPktTx0EopIntrEn_f, &pkt_intr, 0);
    SetDmaPktIntrEnCfg(V, cfgPktTx0DmaIntrEn_f, &pkt_intr, 0);
    SetDmaPktIntrEnCfg(V, cfgPktTx1EopIntrEn_f, &pkt_intr, 0);
    SetDmaPktIntrEnCfg(V, cfgPktTx1DmaIntrEn_f, &pkt_intr, 0);
    SetDmaPktIntrEnCfg(V, cfgPktTx2DmaIntrEn_f, &pkt_intr, 0);
    SetDmaPktIntrEnCfg(V, cfgPktTx2EopIntrEn_f, &pkt_intr, 0);
    SetDmaPktIntrEnCfg(V, cfgPktTx2IntrTimerEn_f, &pkt_intr, 0);
    SetDmaPktIntrEnCfg(V, cfgPktTx3DmaIntrEn_f, &pkt_intr, 0);
    SetDmaPktIntrEnCfg(V, cfgPktTx3EopIntrEn_f, &pkt_intr, 0);
    SetDmaPktIntrEnCfg(V, cfgPktTx3IntrTimerEn_f, &pkt_intr, 0);
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PACKET_RX0_CHAN_ID;
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PACKET_RX1_CHAN_ID;
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PACKET_RX2_CHAN_ID;
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PACKET_RX3_CHAN_ID;

    filed_value = 0;
    SetDmaPktIntrEnCfg(V, cfgPktRx0IntrTimerEn_f, &pkt_intr, filed_value);
    SetDmaPktIntrEnCfg(V, cfgPktRx1IntrTimerEn_f, &pkt_intr, filed_value);
    SetDmaPktIntrEnCfg(V, cfgPktRx2IntrTimerEn_f, &pkt_intr, filed_value);
    SetDmaPktIntrEnCfg(V, cfgPktRx3IntrTimerEn_f, &pkt_intr, filed_value);

    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_intr));

    /*cfg per 128 packts trigger interrupt*/
    tbl_id = DmaPktIntrCntCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    sal_memset(&pkt_intr_cnt, 0, sizeof(pkt_intr_cnt));
    SetDmaPktIntrCntCfg(V, cfgPktRx0IntrCnt_f, &pkt_intr_cnt, 0);
    SetDmaPktIntrCntCfg(V, cfgPktRx1IntrCnt_f, &pkt_intr_cnt, 0);
    SetDmaPktIntrCntCfg(V, cfgPktRx2IntrCnt_f, &pkt_intr_cnt, 0);
    SetDmaPktIntrCntCfg(V, cfgPktRx3IntrCnt_f, &pkt_intr_cnt, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_intr_cnt));

    /* cfg intr for Reg Dma(only port stats and dma pkt stats ) */
    sal_memset(&reg_intr, 0, sizeof(DmaRegIntrEnCfg_m));
    sal_memset(&reg_intr_cnt, 0, sizeof(reg_intr_cnt));
    tbl_id = DmaRegIntrEnCfg_t;
    SetDmaRegIntrEnCfg(V, cfgRegRd0DescIntrEn_f, &reg_intr, 0);
    SetDmaRegIntrEnCfg(V, cfgRegRd0DmaIntrEn_f, &reg_intr, 0);
    SetDmaRegIntrEnCfg(V, cfgRegRd1DmaIntrEn_f, &reg_intr, 1);
    SetDmaRegIntrEnCfg(V, cfgRegRd2DmaIntrEn_f, &reg_intr, 1);
    SetDmaRegIntrEnCfg(V, cfgRegRd3DmaIntrEn_f, &reg_intr, 1);
    SetDmaRegIntrEnCfg(V, cfgRegRd4DmaIntrEn_f, &reg_intr, 1);
    SetDmaRegIntrEnCfg(V, cfgRegRd5DmaIntrEn_f, &reg_intr, 1);
    SetDmaRegIntrEnCfg(V, cfgRegWrDmaIntrEn_f, &reg_intr, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_intr));
    if (DRV_IS_TSINGMA(lchip))
    {
        p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PORT_STATS_CHAN_ID;/* RegRd1 */
        p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_FLOW_STATS_CHAN_ID;/* RegRd2 */
        p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_REG_MAX_CHAN_ID;   /* RegRd3 */
        p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_TBL_RD1_CHAN_ID;   /* RegRd4 */
        p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_TBL_RD2_CHAN_ID;   /* RegRd5 */
    }
    else
    {
        p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_PORT_STATS_CHAN_ID;/* RegRd1 */
        p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_FLOW_STATS_CHAN_ID;/* RegRd2 */
        p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_REG_MAX_CHAN_ID;   /* RegRd3 */
    }

    mac_num = MCHIP_CAP(SYS_CAP_STATS_XQMAC_PORT_NUM)*MCHIP_CAP(SYS_CAP_STATS_XQMAC_RAM_NUM);
    tbl_id = DmaRegRdIntrCntCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_intr_cnt));
    SetDmaRegRdIntrCntCfg(V, cfgRegRd1IntrCnt_f, &reg_intr_cnt, mac_num);
    SetDmaRegRdIntrCntCfg(V, cfgRegRd2IntrCnt_f, &reg_intr_cnt, MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM)*2);
    SetDmaRegRdIntrCntCfg(V, cfgRegRd3IntrCnt_f, &reg_intr_cnt, (DRV_CONST(DRV_DMA_TCAM_SCAN_DESC_NUM)));
    SetDmaRegRdIntrCntCfg(V, cfgRegRd4IntrCnt_f, &reg_intr_cnt, 3);
    SetDmaRegRdIntrCntCfg(V, cfgRegRd5IntrCnt_f, &reg_intr_cnt, 20);

    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_intr_cnt));

    /* cfg intr for Info Dma */
    sal_memset(&info_intr, 0, sizeof(DmaInfoIntrEnCfg_m));
    tbl_id = DmaInfoIntrEnCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_intr));
    SetDmaInfoIntrEnCfg(V, cfgInfo0DescIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo1DescIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo2DescIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo3DescIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo4DescIntrEn_f, &info_intr, 0);

    SetDmaInfoIntrEnCfg(V, cfgInfo0DmaIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo1DmaIntrEn_f, &info_intr, 0);
    SetDmaInfoIntrEnCfg(V, cfgInfo2DmaIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo3DmaIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo4DmaIntrEn_f, &info_intr, 1);

    SetDmaInfoIntrEnCfg(V, cfgInfo0TimerIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo1TimerIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo2TimerIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo3TimerIntrEn_f, &info_intr, 1);
    SetDmaInfoIntrEnCfg(V, cfgInfo4TimerIntrEn_f, &info_intr, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_intr));

    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_LEARNING_CHAN_ID;/* info0 */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_IPFIX_CHAN_ID;/* info2 */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_SDC_CHAN_ID;/* info3 */
    p_usw_dma_master[lchip]->intr_chan_array[chan_num++] = SYS_DMA_MONITOR_CHAN_ID;/* info4 */
    p_usw_dma_master[lchip]->intr_chan_num = chan_num;

    tbl_id = DmaInfoIntrCntCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &intr_cnt));
    SetDmaInfoIntrCntCfg(V, cfgInfo4IntrCnt_f, &intr_cnt, 10);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &intr_cnt));

    return CTC_E_NONE;
}

STATIC int32
_sys_at_dma_timer_init(uint8 lchip)
{
    DmaInfo1TimerCfg_m info1_timer;
    DmaInfo2TimerCfg_m info2_timer;
    DmaInfo4TimerCfg_m info4_timer;
    DmaRegRd1TrigCfg_m trigger1_timer;
    DmaRegRd2TrigCfg_m trigger2_timer;
    DmaRegRd4TrigCfg_m trigger4_timer;
    DmaRegRd5TrigCfg_m trigger5_timer;
    DmaScanTrigCfg_m scan_timer;
    DmaRegRd0Ctl_m reg_rd_ctl;
    DmaRegWr0Ctl_m reg_wr_ctl;
    DmaScanCtl_m   scan_ctl;
    DmaInfo0Ctl_m info_ctl;
    uint32 cmd = 0;
    uint32 tbl_id = 0;
    uint64 timer = 0;
    uint32 timer_v[2] = {0};

    sal_memset(&info1_timer, 0, sizeof(info1_timer));
    sal_memset(&info4_timer, 0, sizeof(info4_timer));
    sal_memset(&trigger1_timer, 0, sizeof(trigger1_timer));
    sal_memset(&scan_timer, 0, sizeof(scan_timer));

    /* for hashdump dump 1 entry need 16 cycles, cfg timer out is 72k hashram
         16*72*1024*1.67ns = 2ms */
    timer = (uint64)2000000/DOWN_FRE_RATE; /*2ms*/
    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    tbl_id = DmaInfo1TimerCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info1_timer));
    SetDmaInfo1TimerCfg(A, cfgInfo1TimerNs_f, &info1_timer, timer_v);/* timeout cfg */
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info1_timer));

    /*for monitor set 100mS*/
    timer = (uint64)100000000; /*100ms*/
    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    if (DRV_FROM_TMM(lchip))
    {
        tbl_id = DmaInfo3TimerCfg_t;
        cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));
        SetDmaInfo3TimerCfg(A, cfgInfo3TimerNs_f, &info4_timer, timer_v);
        cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));
        tbl_id = DmaInfo5TimerCfg_t;
        cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));
        SetDmaInfo5TimerCfg(A, cfgInfo5TimerNs_f, &info4_timer, timer_v);
        cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));
    }
    tbl_id = DmaInfo4TimerCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));
    SetDmaInfo4TimerCfg(A, cfgInfo4TimerNs_f, &info4_timer, timer_v);/* timeout cfg */
    tbl_id = DmaInfo4TimerCfg_t;
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));

    /*learning using default*/
    timer = (uint64)1000000/DOWN_FRE_RATE; /*1ms*/
    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    tbl_id = DmaInfo0TimerCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));
    SetDmaInfo0TimerCfg(A, cfgInfo0TimerNs_f, &info4_timer, timer_v);
    tbl_id = DmaInfo0TimerCfg_t;
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));

    /*for ipfix set 100 ms*/
    timer = (uint64)SYS_DMA_EXPORT_IPFIX_MIN_INTERVAL * 1000000 / DOWN_FRE_RATE; /*100 ms*/
    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    tbl_id = DmaInfo2TimerCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info2_timer));
    SetDmaInfo2TimerCfg(A, cfgInfo2TimerNs_f, &info2_timer, timer_v);/* timeout cfg */
    tbl_id = DmaInfo2TimerCfg_t;
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info2_timer));

    /* Info DMA Config */
    tbl_id = DmaInfo0Ctl_t;/* learning & aging */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo0Ctl(V, cfgInfoDmaThrd_f, &info_ctl, 1);
    SetDmaInfo0Ctl(V, cfgInfoReqTimerChk_f, &info_ctl, 0);
    SetDmaInfo0Ctl(V, cfgInfoDescTimerChk_f, &info_ctl, 1);
    SetDmaInfo0Ctl(V, cfgInfoTimerEnd_f, &info_ctl, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo1Ctl_t;/* hash dump */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo1Ctl(V, cfgInfoDmaThrd_f, &info_ctl, 1);
    SetDmaInfo1Ctl(V, cfgInfoReqTimerChk_f, &info_ctl, 0);
    SetDmaInfo1Ctl(V, cfgInfoDescTimerChk_f, &info_ctl, 1);
    SetDmaInfo1Ctl(V, cfgInfoTimerEnd_f, &info_ctl, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo2Ctl_t;/* ipfix */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo2Ctl(V, cfgInfoDmaThrd_f, &info_ctl, 1);
    SetDmaInfo2Ctl(V, cfgInfoReqTimerChk_f, &info_ctl, 0);
    SetDmaInfo2Ctl(V, cfgInfoDescTimerChk_f, &info_ctl, 1);
    SetDmaInfo2Ctl(V, cfgInfoTimerEnd_f, &info_ctl, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo3Ctl_t;/* monitor */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo3Ctl(V, cfgInfoDmaThrd_f, &info_ctl, 1);
    SetDmaInfo3Ctl(V, cfgInfoReqTimerChk_f, &info_ctl, 0);
    SetDmaInfo3Ctl(V, cfgInfoDescTimerChk_f, &info_ctl, 1);
    SetDmaInfo3Ctl(V, cfgInfoTimerEnd_f, &info_ctl, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo4Ctl_t;/* latency */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo4Ctl(V, cfgInfoDmaThrd_f, &info_ctl, 1);
    SetDmaInfo4Ctl(V, cfgInfoReqTimerChk_f, &info_ctl, 0);
    SetDmaInfo4Ctl(V, cfgInfoDescTimerChk_f, &info_ctl, 1);
    SetDmaInfo4Ctl(V, cfgInfoTimerEnd_f, &info_ctl, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo5Ctl_t;/* efd */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo5Ctl(V, cfgInfoDmaThrd_f, &info_ctl, 1);
    SetDmaInfo5Ctl(V, cfgInfoReqTimerChk_f, &info_ctl, 0);
    SetDmaInfo5Ctl(V, cfgInfoDescTimerChk_f, &info_ctl, 1);
    SetDmaInfo5Ctl(V, cfgInfoTimerEnd_f, &info_ctl, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo6Ctl_t;/* dlb */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo6Ctl(V, cfgInfoDmaThrd_f, &info_ctl, 1);
    SetDmaInfo6Ctl(V, cfgInfoReqTimerChk_f, &info_ctl, 0);
    SetDmaInfo6Ctl(V, cfgInfoDescTimerChk_f, &info_ctl, 1);
    SetDmaInfo6Ctl(V, cfgInfoTimerEnd_f, &info_ctl, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo7Ctl_t;/* oam */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo7Ctl(V, cfgInfoDmaThrd_f, &info_ctl, 1);
    SetDmaInfo7Ctl(V, cfgInfoReqTimerChk_f, &info_ctl, 0);
    SetDmaInfo7Ctl(V, cfgInfoDescTimerChk_f, &info_ctl, 1);
    SetDmaInfo7Ctl(V, cfgInfoTimerEnd_f, &info_ctl, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));


    /* cfg port stats trigger function */
    timer = (uint64)1*60*1000000000/DOWN_FRE_RATE; /*1min*/
    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    tbl_id = DmaRegRd1TrigCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger1_timer));
    SetDmaRegRd1TrigCfg(A, cfgRegRd1TrigNs_f, &trigger1_timer, timer_v);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger1_timer));


    /* cfg dma flow stats trigger function */
    timer = (uint64)SYS_USW_DMA_FLOW_STATS_SYNC_TIME_FROM_TMM*4/DOWN_FRE_RATE; /*7.2s*/

    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    tbl_id = DmaRegRd2TrigCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger2_timer));
    SetDmaRegRd2TrigCfg(A, cfgRegRd2TrigNs_f, &trigger2_timer, timer_v);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger2_timer));

    /* cfg dma dot1ae stats trigger function */
    timer = (uint64)1 * 1000 * 1000 * 1000;
    timer_v[0] = timer & 0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    tbl_id = DmaRegRd4TrigCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger4_timer));
    SetDmaRegRd4TrigCfg(A, cfgRegRd4TrigNs_f, &trigger4_timer, timer_v);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger4_timer));

    /* cfg dma npm stats trigger function */
    timer = (uint64)1 * 1000 * 1000 * 1000 / DOWN_FRE_RATE;
    timer_v[0] = timer & 0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    tbl_id = DmaRegRd5TrigCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger5_timer));
    SetDmaRegRd5TrigCfg(A, cfgRegRd5TrigNs_f, &trigger5_timer, timer_v);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger5_timer));

    /* Reg Read Trig Timer Config */
    tbl_id = DmaRegRd0Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    SetDmaRegRd0Ctl(V, cfgRegRdTrigEn_f, &reg_rd_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));

    tbl_id = DmaRegRd1Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    SetDmaRegRd1Ctl(V, cfgRegRdTrigEn_f, &reg_rd_ctl, 0);/*set by port stats module*/
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));

    tbl_id = DmaRegRd2Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    SetDmaRegRd2Ctl(V, cfgRegRdTrigEn_f, &reg_rd_ctl, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));

    tbl_id = DmaRegRd4Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    SetDmaRegRd4Ctl(V, cfgRegRdTrigEn_f, &reg_rd_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));

    tbl_id = DmaRegRd5Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    SetDmaRegRd5Ctl(V, cfgRegRdTrigEn_f, &reg_rd_ctl, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));

    /* Reg Write Trig Timer Config */
    tbl_id = DmaRegWr0Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_wr_ctl));
    SetDmaRegWr0Ctl(V, cfgRegWrTrigEn_f, &reg_wr_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_wr_ctl));

    tbl_id = DmaRegWr1Ctl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_wr_ctl));
    SetDmaRegWr1Ctl(V, cfgRegWrTrigEn_f, &reg_wr_ctl, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_wr_ctl));

    /* Tcam Scan Config */
	#ifdef EMULATION_ENV
	timer = (uint64)5*60*1000000000; /* 5min */
	#else
    timer = (uint64)60*60*1000000000/DOWN_FRE_RATE; /* 60min */
	#endif
    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFF;
    tbl_id = DmaScanTrigCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &scan_timer));
    SetDmaScanTrigCfg(A, cfgScanTrigNs_f, &scan_timer, timer_v);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &scan_timer));

    tbl_id = DmaScanCtl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &scan_ctl));
    SetDmaScanCtl(V, cfgScanTrigEn_f, &scan_ctl, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &scan_ctl));

    return CTC_E_NONE;
}

/**
@brief DMA timer init only usefull for InfoDma and port stats
  Info0:learning/aging, Info1:HashKey, Info2:Ipfix
  Info3:SDC,
  Info4:Monitor:Do not using timeout interrupt, desc full interrupt
  port stats: reg1
  table read: reg0

*/
STATIC int32
_sys_usw_dma_timer_init(uint8 lchip)
{
    DmaInfoTimerCfg_m global_timer_ctl;
    DmaInfo0TimerCfg_m info0_timer;
    DmaInfo1TimerCfg_m info1_timer;
    DmaInfo4TimerCfg_m info4_timer;
    DmaInfo2TimerCfg_m info2_timer;
    DmaRegRd1TrigCfg_m trigger1_timer;
    DmaRegRd2TrigCfg_m trigger2_timer;
    DmaRegRd4TrigCfg_m trigger4_timer;
    DmaRegRd5TrigCfg_m trigger5_timer;
    DmaRegTrigEnCfg_m trigger_ctl;
    DmaInfoThrdCfg_m thrd_cfg;
    DmaPktIntrTimerCfg_m pkt_timer;
    DmaScanTrigCfg_m scan_timer;
    uint32 cmd = 0;
    uint32 tbl_id = 0;
    uint64 timer = 0;
    uint32 timer_v[2] = {0};

    if (DRV_FROM_AT(lchip))
    {
        return _sys_at_dma_timer_init(lchip);
    }

    sal_memset(&global_timer_ctl, 0, sizeof(global_timer_ctl));
    sal_memset(&info0_timer, 0, sizeof(info0_timer));
    sal_memset(&info1_timer, 0, sizeof(info1_timer));
    sal_memset(&info4_timer, 0, sizeof(info4_timer));
    sal_memset(&trigger1_timer, 0, sizeof(trigger1_timer));
    sal_memset(&trigger_ctl, 0, sizeof(trigger_ctl));
    sal_memset(&pkt_timer, 0, sizeof(pkt_timer));
    sal_memset(&scan_timer, 0, sizeof(scan_timer));

    cmd = DRV_IOR(DmaInfoThrdCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &thrd_cfg));

    SetDmaInfoThrdCfg(V, cfgInfo0DmaThrd_f, &thrd_cfg, 1); /*learning max is 8 */
    SetDmaInfoThrdCfg(V, cfgInfo2DmaThrd_f, &thrd_cfg, 1); /*ipfix max is 4 */

    SetDmaInfoThrdCfg(V, cfgInfo1DmaThrd_f, &thrd_cfg, 1);
    SetDmaInfoThrdCfg(V, cfgInfo3DmaThrd_f, &thrd_cfg, 1);
    SetDmaInfoThrdCfg(V, cfgInfo4DmaThrd_f, &thrd_cfg, 1);
    SetDmaInfoThrdCfg(V, cfgInfo5DmaThrd_f, &thrd_cfg, 1);/*TMM*/
    SetDmaInfoThrdCfg(V, cfgInfo6DmaThrd_f, &thrd_cfg, 1);/*TMM*/
    SetDmaInfoThrdCfg(V, cfgInfo7DmaThrd_f, &thrd_cfg, 1);/*TMM*/
    cmd = DRV_IOW(DmaInfoThrdCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &thrd_cfg));

    /* for packet rx ,100ms trigger interrupt, cfgPktRxIntrTimerCnt 1 means 2ns*/
    tbl_id = DmaPktIntrTimerCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_timer));
    timer = (uint64)(1000*1000*1000/SYS_USW_DMA_PACKETS_PER_INTR/2);
    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    SetDmaPktIntrTimerCfg(A, cfgPktRxIntrTimerCnt_f, &pkt_timer, timer_v);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_timer));

    /* for hashdump dump 1 entry need 16 cycles, cfg timer out is 72k hashram
         16*72*1024*1.67ns = 2ms */
    timer = (uint64)2000000/DOWN_FRE_RATE; /*2ms*/
    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    tbl_id = DmaInfo1TimerCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info1_timer));
    SetDmaInfo1TimerCfg(A, cfgInfo1TimerNs_f, &info1_timer, timer_v);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info1_timer));

    /*for monitor set 100mS*/
    timer = (uint64)100000000/DOWN_FRE_RATE; /*100ms*/
    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    if (DRV_FROM_TMM(lchip))
    {
        tbl_id = DmaInfo3TimerCfg_t;
        cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));
        SetDmaInfo3TimerCfg(A, cfgInfo3TimerNs_f, &info4_timer, timer_v);
        cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));
        tbl_id = DmaInfo5TimerCfg_t;
        cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));
        SetDmaInfo5TimerCfg(A, cfgInfo5TimerNs_f, &info4_timer, timer_v);
        cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));
    }
    tbl_id = DmaInfo4TimerCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));
    SetDmaInfo4TimerCfg(A, cfgInfo4TimerNs_f, &info4_timer, timer_v);
    tbl_id = DmaInfo4TimerCfg_t;
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));

    /*learning using default*/
    timer = (uint64)1000000/DOWN_FRE_RATE; /*1ms*/
    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    tbl_id = DmaInfo0TimerCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));
    SetDmaInfo0TimerCfg(A, cfgInfo0TimerNs_f, &info4_timer, timer_v);
    tbl_id = DmaInfo0TimerCfg_t;
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info4_timer));

    /*for ipfix set 100 ms*/
    timer = (uint64)SYS_DMA_EXPORT_IPFIX_MIN_INTERVAL * 1000000 / DOWN_FRE_RATE; /*100 ms*/
    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    tbl_id = DmaInfo2TimerCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info2_timer));
    SetDmaInfo2TimerCfg(A, cfgInfo2TimerNs_f, &info2_timer, timer_v);
    tbl_id = DmaInfo2TimerCfg_t;
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info2_timer));

    tbl_id = DmaInfoTimerCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &global_timer_ctl));

    SetDmaInfoTimerCfg(V, cfgInfo0DescTimerChk_f, &global_timer_ctl, 1);  /*learning*/
    SetDmaInfoTimerCfg(V, cfgInfo0ReqTimerChk_f, &global_timer_ctl, 0);
    SetDmaInfoTimerCfg(V, cfgInfo0TimerEnd_f, &global_timer_ctl, 1);

    SetDmaInfoTimerCfg(V, cfgInfo2DescTimerChk_f, &global_timer_ctl, 1);  /*ipfix*/
    SetDmaInfoTimerCfg(V, cfgInfo2ReqTimerChk_f, &global_timer_ctl, 0);
    SetDmaInfoTimerCfg(V, cfgInfo2TimerEnd_f, &global_timer_ctl, 1);

    SetDmaInfoTimerCfg(V, cfgInfo1DescTimerChk_f, &global_timer_ctl, 1); /*hashdump*/
    SetDmaInfoTimerCfg(V, cfgInfo1ReqTimerChk_f, &global_timer_ctl, 0);
    SetDmaInfoTimerCfg(V, cfgInfo1TimerEnd_f, &global_timer_ctl, 1);

    SetDmaInfoTimerCfg(V, cfgInfo3DescTimerChk_f, &global_timer_ctl, 1);  /*sdc*/
    SetDmaInfoTimerCfg(V, cfgInfo3ReqTimerChk_f, &global_timer_ctl, 0);
    SetDmaInfoTimerCfg(V, cfgInfo3TimerEnd_f, &global_timer_ctl, 1);

    SetDmaInfoTimerCfg(V, cfgInfo4DescTimerChk_f, &global_timer_ctl, 1);/*monitor*/
    SetDmaInfoTimerCfg(V, cfgInfo4ReqTimerChk_f, &global_timer_ctl, 0);
    SetDmaInfoTimerCfg(V, cfgInfo4TimerEnd_f, &global_timer_ctl, 1);

    SetDmaInfoTimerCfg(V, cfgInfo5DescTimerChk_f, &global_timer_ctl, 1);/*TMM*/
    SetDmaInfoTimerCfg(V, cfgInfo5ReqTimerChk_f, &global_timer_ctl, 0);/*TMM*/
    SetDmaInfoTimerCfg(V, cfgInfo5TimerEnd_f, &global_timer_ctl, 1);/*TMM*/

    SetDmaInfoTimerCfg(V, cfgInfo6DescTimerChk_f, &global_timer_ctl, 1);/*TMM*/
    SetDmaInfoTimerCfg(V, cfgInfo6ReqTimerChk_f, &global_timer_ctl, 0);/*TMM*/
    SetDmaInfoTimerCfg(V, cfgInfo6TimerEnd_f, &global_timer_ctl, 1);/*TMM*/

    SetDmaInfoTimerCfg(V, cfgInfo7DescTimerChk_f, &global_timer_ctl, 1);/*TMM*/
    SetDmaInfoTimerCfg(V, cfgInfo7ReqTimerChk_f, &global_timer_ctl, 0);/*TMM*/
    SetDmaInfoTimerCfg(V, cfgInfo7TimerEnd_f, &global_timer_ctl, 1);/*TMM*/
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &global_timer_ctl));

    /* cfg port stats trigger function */
    if (DRV_FROM_TMM(lchip))
    {
        timer = (uint64)1*20*1000000000/DOWN_FRE_RATE; /*20s*/
    }
    else
    {
        timer = (uint64)1*60*1000000000/DOWN_FRE_RATE; /*1min*/
    }
    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    tbl_id = DmaRegRd1TrigCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger1_timer));
    SetDmaRegRd1TrigCfg(A, cfgRegRd1TrigNs_f, &trigger1_timer, timer_v);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger1_timer));

    /* cfg dma flow stats trigger function */
    /* All cal for 100G speed, TMM:7.2s other:1.8s*/
    if (DRV_FROM_TMM(lchip))
    {
        timer = (uint64)SYS_USW_DMA_FLOW_STATS_SYNC_TIME_FROM_TMM*4/DOWN_FRE_RATE;
    }
    else
    {
        timer = (uint64)SYS_USW_DMA_FLOW_STATS_SYNC_TIME/DOWN_FRE_RATE; /*1s , emulation 20s*/
    }

    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
    tbl_id = DmaRegRd2TrigCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger2_timer));
    SetDmaRegRd2TrigCfg(A, cfgRegRd2TrigNs_f, &trigger2_timer, timer_v);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger2_timer));
    if(!DRV_IS_DUET2(lchip))
    {
        /* cfg dma dot1ae stats trigger function */
        timer = (uint64)SYS_USW_DMA_DOT1AE_STATS_SYNC_TIME/DOWN_FRE_RATE;
        timer_v[0] = timer&0xFFFFFFFF;
        timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
        tbl_id = DmaRegRd4TrigCfg_t;
        cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger4_timer));
        SetDmaRegRd4TrigCfg(A, cfgRegRd4TrigNs_f, &trigger4_timer, timer_v);
        cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger4_timer));
    }
    if (DRV_FROM_TMM(lchip))
    {
        /* cfg dma npm stats trigger function */
        timer = (uint64)SYS_USW_DMA_NPM_STATS_SYNC_TIME / DOWN_FRE_RATE;
        timer_v[0] = timer&0xFFFFFFFF;
        timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
        tbl_id = DmaRegRd5TrigCfg_t;
        cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger5_timer));
        SetDmaRegRd5TrigCfg(A, cfgRegRd5TrigNs_f, &trigger5_timer, timer_v);
        cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger5_timer));
    }

    tbl_id = DmaRegTrigEnCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger_ctl));
    SetDmaRegTrigEnCfg(V, cfgRegRd1TrigEn_f, &trigger_ctl, 0);    /*set by stats module*/
    SetDmaRegTrigEnCfg(V, cfgRegRd2TrigEn_f, &trigger_ctl, 1);
    SetDmaRegTrigEnCfg(V, cfgRegRd3TrigEn_f, &trigger_ctl, 0);
    SetDmaRegTrigEnCfg(V, cfgRegRd4TrigEn_f, &trigger_ctl, 1);
    SetDmaRegTrigEnCfg(V, cfgRegRd5TrigEn_f, &trigger_ctl, 0);
    SetDmaRegTrigEnCfg(V, cfgRegWrTrigEn_f, &trigger_ctl, 0);
    SetDmaRegTrigEnCfg(V, cfgRegWr0TrigEn_f, &trigger_ctl, 0);/*TMM*/
    SetDmaRegTrigEnCfg(V, cfgRegWr1TrigEn_f, &trigger_ctl, 0);/*TMM*/
    SetDmaRegTrigEnCfg(V, cfgScanTrigEn_f, &trigger_ctl, 0);/*TMM*/
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger_ctl));

     /* tcam scan */
    timer = (uint64)60*60*1000000000/DOWN_FRE_RATE; /*60min*/
    timer_v[0] = timer&0xFFFFFFFF;
    timer_v[1] = (timer >> 32) & 0xFFFF;
    tbl_id = DmaScanTrigCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &scan_timer));
    SetDmaScanTrigCfg(A, cfgScanTrigNs_f, &scan_timer, timer_v);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &scan_timer));

    sal_memset(&trigger_ctl, 0, sizeof(DmaRegTrigEnCfg_m));
    tbl_id = DmaRegTrigEnCfg_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger_ctl));
    SetDmaRegTrigEnCfg(V, cfgScanTrigEn_f, &trigger_ctl, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger_ctl));

    return CTC_E_NONE;
}

STATIC int32
_sys_at_dma_crc_init(uint8 lchip)
{
    uint32 cmd = 0;
    DmaPktRx0Ctl_m pkt_rx_ctl;
    DmaPktTx0Ctl_m pkt_tx_ctl;
    uint32 rx_tbl_id[4] = {DmaPktRx0Ctl_t, DmaPktRx1Ctl_t, DmaPktRx2Ctl_t, DmaPktRx3Ctl_t};
    uint32 tx_tbl_id[4] = {DmaPktTx0Ctl_t, DmaPktTx1Ctl_t, DmaPktTx2Ctl_t, DmaPktTx3Ctl_t};
    uint8 index = 0;
    /* tx/rx init 4 ring */
    for (index = 0; index < 4; index++)
    {
        cmd = DRV_IOR(rx_tbl_id[index], DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_rx_ctl));
        SetDmaPktRx0Ctl(V, dmaPktRxCrcValid_f, &pkt_rx_ctl, 0);
        SetDmaPktRx0Ctl(V, dmaPktRxCrcPadEn_f, &pkt_rx_ctl, 0);
        cmd = DRV_IOW(rx_tbl_id[index], DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_rx_ctl));

        cmd = DRV_IOR(tx_tbl_id[index], DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_tx_ctl));
        SetDmaPktTx0Ctl(V, cfgPktTxCrcValid_f, &pkt_tx_ctl, 0);
        SetDmaPktTx0Ctl(V, cfgPktTxCrcChkEn_f, &pkt_tx_ctl, 0);
        SetDmaPktTx0Ctl(V, cfgPktTxCrcPadEn_f, &pkt_tx_ctl, 1);
        cmd = DRV_IOW(tx_tbl_id[index], DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &pkt_tx_ctl));
    }
    return CTC_E_NONE;
}

STATIC int32
_sys_usw_dma_crc_init(uint8 lchip)
{
    uint32 cmd = 0;
    DmaPktRxCrcCfg_m rx_crc;
    DmaPktTxCrcCfg_m tx_crc;

    if (DRV_FROM_AT(lchip))
    {
        return _sys_at_dma_crc_init(lchip);
    }
    sal_memset(&rx_crc, 0, sizeof(DmaPktRxCrcCfg_m));
    sal_memset(&tx_crc, 0, sizeof(DmaPktTxCrcCfg_m));

    SetDmaPktRxCrcCfg(V,cfgPktRx0CrcValid_f, &rx_crc, 0);
    SetDmaPktRxCrcCfg(V,cfgPktRx0CrcPadEn_f, &rx_crc, 0);
    SetDmaPktRxCrcCfg(V,cfgPktRx1CrcValid_f, &rx_crc, 0);
    SetDmaPktRxCrcCfg(V,cfgPktRx1CrcPadEn_f, &rx_crc, 0);
    SetDmaPktRxCrcCfg(V,cfgPktRx2CrcValid_f, &rx_crc, 0);
    SetDmaPktRxCrcCfg(V,cfgPktRx2CrcPadEn_f, &rx_crc, 0);
    SetDmaPktRxCrcCfg(V,cfgPktRx3CrcValid_f, &rx_crc, 0);
    SetDmaPktRxCrcCfg(V,cfgPktRx3CrcPadEn_f, &rx_crc, 0);
    SetDmaPktRxCrcCfg(V,dmaPktRxCrcValid_f, &rx_crc, 0);/*TMM*/
    SetDmaPktRxCrcCfg(V,dmaPktRxCrcPadEn_f, &rx_crc, 0);/*TMM*/

    SetDmaPktTxCrcCfg(V, cfgPktTxCrcValid_f, &tx_crc, DRV_IS_TMM(lchip)?1:0);
    SetDmaPktTxCrcCfg(V, cfgPktTxCrcChkEn_f, &tx_crc, 0);
    SetDmaPktTxCrcCfg(V, cfgPktTxCrcPadEn_f, &tx_crc, DRV_IS_TMM(lchip)?0:1);

    cmd = DRV_IOW(DmaPktRxCrcCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &rx_crc));

    cmd = DRV_IOW(DmaPktTxCrcCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &tx_crc));

    return CTC_E_NONE;

}

/**
@brief Get Dma channel config
*/
STATIC int32
_sys_usw_dma_get_chan_cfg(uint8 lchip, uint8 chan_id, ctc_dma_global_cfg_t* ctc_cfg, sys_dma_chan_t* sys_cfg)
{
    uint32 desc_size = 0;
    uint32 desc_num = 0;
    uint32 type = 0;
    uint16 mac_num = 0;
    uint8  chan_idx = 0;
    ctc_dma_chan_cfg_t* tmp_chan_cfg = NULL;

    type = GET_CHAN_TYPE(chan_id);
    switch(type)
    {
        case DRV_DMA_PACKET_RX0_CHAN_ID:
        case DRV_DMA_PACKET_RX1_CHAN_ID:
        case DRV_DMA_PACKET_RX2_CHAN_ID:
        case DRV_DMA_PACKET_RX3_CHAN_ID:
        case DRV_DMA_PACKET_RX7_CHAN_ID:
            /* dma pcket rx using 256bytes as one block for one transfer */
            desc_size = (ctc_cfg->pkt_rx[chan_id].data < 256)?256:(ctc_cfg->pkt_rx[chan_id].data);
            desc_num = (ctc_cfg->pkt_rx[chan_id].desc_num)?(ctc_cfg->pkt_rx[chan_id].desc_num):64;
            desc_num = (desc_num > SYS_DMA_MAX_PACKET_RX_DESC_NUM)?SYS_DMA_MAX_PACKET_RX_DESC_NUM:desc_num;
            sys_cfg->data_num = (ctc_cfg->pkt_rx[chan_id].data_num <= desc_num)?desc_num:ctc_cfg->pkt_rx[chan_id].data_num;
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_PACKET_RX0_CHAN_ID+chan_id;
            sys_cfg->current_index = 0;
#ifdef _SAL_LINUX_UM
            sys_cfg->pkt_knet_en = ctc_cfg->pkt_rx[chan_id].pkt_knet_en;
#endif
            sys_cfg->cfg_size = desc_size;
            sys_cfg->data_size = desc_size;
            sys_cfg->desc_depth = desc_num;
            sys_cfg->desc_num = desc_num;
            sys_cfg->func_type = SYS_DMA_FUNC_PACKET_RX;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sys_cfg->auto_fetch_en = (sys_cfg->pkt_knet_en)?1:0;
            /* for 4 dma rx channel, channel 0 is high priority */
            sys_cfg->weight = (chan_id == SYS_DMA_PACKET_RX0_CHAN_ID)?(SYS_DMA_HIGH_WEIRGH + 2):SYS_DMA_HIGH_WEIRGH;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[chan_id], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        case DRV_DMA_PACKET_TX0_CHAN_ID:
        case DRV_DMA_PACKET_TX1_CHAN_ID:
        case DRV_DMA_PACKET_TX2_CHAN_ID:
        case DRV_DMA_PACKET_TX3_CHAN_ID:

            if (p_usw_dma_master[lchip]->pkt_tx_timer_en && (chan_id == SYS_DMA_PKT_TX_TIMER_CHAN_ID))
            {
                sys_cfg->channel_id = chan_id;
                sys_cfg->auto_mode_en = 1;
                sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[chan_id], sys_cfg, sizeof(sys_dma_chan_t));
                break;
            }

            chan_idx = GET_CHAN_TYPE(chan_id) - DRV_DMA_PACKET_TX0_CHAN_ID;
            desc_size = (ctc_cfg->pkt_tx_ext[chan_idx].data < SYS_DMA_TX_PKT_MEM_SIZE)?SYS_DMA_TX_PKT_MEM_SIZE:(ctc_cfg->pkt_tx_ext[chan_idx].data);
            tmp_chan_cfg = ctc_cfg->pkt_tx_ext[chan_idx].desc_num ?
                (&ctc_cfg->pkt_tx_ext[chan_idx]) : (&ctc_cfg->pkt_tx);
            desc_num = (tmp_chan_cfg->desc_num)?(tmp_chan_cfg->desc_num):8;
            desc_num = (desc_num > SYS_DMA_MAX_PACKET_TX_DESC_NUM)?SYS_DMA_MAX_PACKET_TX_DESC_NUM:desc_num;
            sys_cfg->data_num = desc_num;
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = chan_id;
            sys_cfg->current_index = 0;
#ifdef _SAL_LINUX_UM
            sys_cfg->pkt_knet_en = tmp_chan_cfg->pkt_knet_en;
#endif
            sys_cfg->cfg_size = desc_size;
            sys_cfg->data_size = desc_size;
            sys_cfg->desc_depth = desc_num;
            sys_cfg->desc_num = desc_num;
            sys_cfg->func_type = SYS_DMA_FUNC_PACKET_TX;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
#if (0 == SDK_WORK_PLATFORM)
            sys_cfg->tx_crc_len = ((DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip))&&(chan_id != SYS_DMA_FLEXE_TX_RING))?CTC_DMA_PKT_CRC_LEN:0;
#endif
            sys_cfg->auto_fetch_en = (sys_cfg->pkt_knet_en)?1:0;
            /* for 2 dma tx channel, channel tx0 is high priority */
            sys_cfg->weight = (chan_id == SYS_DMA_PACKET_TX0_CHAN_ID)?(SYS_DMA_HIGH_WEIRGH + 2):SYS_DMA_HIGH_WEIRGH;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[chan_id], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        case DRV_DMA_TBL_RD_CHAN_ID:
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_TBL_RD_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 0;
            sys_cfg->data_size = 0;
            sys_cfg->desc_depth = 64;
            sys_cfg->desc_num = 0;
            sys_cfg->func_type = SYS_DMA_FUNC_TABLE_R;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_RD_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        case DRV_DMA_TBL_WR_CHAN_ID:
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_TBL_WR_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 0;
            sys_cfg->data_size = 0;
            sys_cfg->desc_depth = 64;
            sys_cfg->desc_num = 0;
            sys_cfg->func_type = SYS_DMA_FUNC_TABLE_W;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            /*dma write need allocate memory for record desc used state */
            sys_cfg->p_desc_check = (sys_dma_tx_check_t*)mem_malloc(MEM_DMA_MODULE, sizeof(sys_dma_tx_check_t)*sys_cfg->desc_depth);
            if (NULL == sys_cfg->p_desc_check)
            {
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
    			return CTC_E_NO_MEMORY;
            }
            sal_memset(sys_cfg->p_desc_check, 0,  sizeof(sys_dma_tx_check_t)*sys_cfg->desc_depth);
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;
        case DRV_DMA_PORT_STATS_CHAN_ID:
            mac_num = MCHIP_CAP(SYS_CAP_STATS_XQMAC_PORT_NUM)*MCHIP_CAP(SYS_CAP_STATS_XQMAC_RAM_NUM);
            mac_num += (DRV_FROM_TMM(lchip)?SYS_USW_CPU_MAC_NUM:0);
            if (DRV_IS_AT(lchip) && !SYS_VCHIP_DUAL_CORE_MODE(lchip))
            {
                mac_num /= 2;
            }
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_PORT_STATS_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 40*4*4;
            sys_cfg->data_size = 40*4*4;
            sys_cfg->desc_depth = mac_num;
            sys_cfg->desc_num = mac_num;
            sys_cfg->data_num = mac_num;
            sys_cfg->func_type = SYS_DMA_FUNC_STATS;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_PORT_STATS;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PORT_STATS_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        case DRV_DMA_LEARNING_CHAN_ID:
            desc_num = (ctc_cfg->learning.desc_num)?(ctc_cfg->learning.desc_num):64;
            desc_num = (desc_num > SYS_DMA_MAX_LEARNING_DESC_NUM)?SYS_DMA_MAX_LEARNING_DESC_NUM:desc_num;

            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_LEARNING_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = (CTC_LEARNING_CACHE_MAX_INDEX > 64) ? CTC_LEARNING_CACHE_MAX_INDEX : 64;
            sys_cfg->data_size = sys_cfg->cfg_size* (DRV_FROM_AT(lchip) ?  sizeof(CpuInfoLearning_m) : ((DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip)) ? sizeof(CpuInfoLearning_s) : sizeof(sys_dma_learning_info_t)));
            sys_cfg->desc_depth = desc_num;
            sys_cfg->desc_num = desc_num;
            sys_cfg->data_num = desc_num;
            sys_cfg->func_type = SYS_DMA_FUNC_HW_LEARNING;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_LERNING;
            sys_cfg->weight = SYS_DMA_MID_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_LEARNING_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        case DRV_DMA_IPFIX_CHAN_ID:
            desc_num = (ctc_cfg->ipfix.desc_num)?(ctc_cfg->ipfix.desc_num):64;
            desc_num = (desc_num > SYS_DMA_MAX_IPFIX_DESC_NUM)?SYS_DMA_MAX_IPFIX_DESC_NUM:desc_num;

            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_IPFIX_CHAN_ID;

            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 64;
            if (DRV_FROM_TMM(lchip))
            {
                sys_cfg->data_size = 64 * sizeof(CpuInfoIpfix_m);
            }
            else
            {
                sys_cfg->data_size = 64 * sizeof(DmaToCpuIpfixAccFifo_m);
            }
            sys_cfg->desc_depth = desc_num;
            sys_cfg->desc_num = desc_num;
            sys_cfg->data_num = desc_num;
            sys_cfg->func_type = SYS_DMA_FUNC_IPFIX;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_IPFIX;
            sys_cfg->weight = SYS_DMA_MID_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_IPFIX_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        case DRV_DMA_SDC_CHAN_ID:
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_SDC_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 256;
            sys_cfg->data_size = 256*sizeof(DmaToCpuSdcFifo_m);
            sys_cfg->desc_depth = 64;
            sys_cfg->desc_num = 64;
            sys_cfg->func_type = SYS_DMA_FUNC_SDC;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_SDC_STATS;
            sys_cfg->weight = SYS_DMA_MID_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_SDC_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        case DRV_DMA_MONITOR_CHAN_ID:
            /*for monitor function per monitor interval process 128 entries, Dma allocate 1k entry data memory,
                So entry desc consume time is :1024/128*interval
             */
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_MONITOR_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 1024;
            sys_cfg->data_size = 1024*sizeof(DmaToCpuActMonIrmFifo_m);
            sys_cfg->desc_depth = 12;
            sys_cfg->desc_num = 12;
            sys_cfg->data_num = sys_cfg->desc_num;
            sys_cfg->func_type = SYS_DMA_FUNC_MONITOR;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_MONITOR;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_MONITOR_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        case DRV_DMA_BUFFER_CHAN_ID:
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_BUFFER_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 1024;
            sys_cfg->data_size = 1024 * sizeof(CpuInfoIrmMon_m);
            sys_cfg->desc_depth = 12;
            sys_cfg->desc_num = 12;
            sys_cfg->data_num = sys_cfg->desc_num;
            sys_cfg->func_type = SYS_DMA_FUNC_BUFFER_MON;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_BUFFER_MON;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_BUFFER_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        case DRV_DMA_LATENCY_CHAN_ID:
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_LATENCY_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 1024;
            sys_cfg->data_size = 1024 * sizeof(CpuInfoLatencyMon_m);
            sys_cfg->desc_depth = 4;
            sys_cfg->desc_num = 4;
            sys_cfg->data_num = sys_cfg->desc_num;
            sys_cfg->func_type = SYS_DMA_FUNC_LATENCY_MON;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_LATENCY_MON;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_LATENCY_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        case DRV_DMA_EFD_CHAN_ID:
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_EFD_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 1024;
            sys_cfg->data_size = 1024 * sizeof(CpuInfoEfdMon_m);
            sys_cfg->desc_depth = 16;
            sys_cfg->desc_num = 16;
            sys_cfg->data_num = sys_cfg->desc_num;
            sys_cfg->func_type = SYS_DMA_FUNC_EFD_MON;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_EFD_MON;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_EFD_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        case DRV_DMA_HASHKEY_CHAN_ID:
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_HASHKEY_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 256;   /* for optimal should eq dump threshold */
            if (DRV_FROM_TMM(lchip))
            {
                sys_cfg->data_size = 256*TABLE_ENTRY_SIZE(lchip, CpuInfoDump_t);
            }
            else
            {
                sys_cfg->data_size = 256*sizeof(DmaFibDumpFifo_m);
            }
            sys_cfg->desc_depth = 64;
            sys_cfg->desc_num = 64;
            sys_cfg->data_num = sys_cfg->desc_num;
            sys_cfg->func_type = 0;
            sys_cfg->weight = SYS_DMA_MID_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_HASHKEY_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        /*used for sync flow stats, using 4 desc, every desc sync Max Dsstats/4 entry num*/
        case DRV_DMA_FLOW_STATS_CHAN_ID:
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_FLOW_STATS_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_SIZE)*DRV_TABLE_ENTRY_SIZE(lchip, DsStats_t);  /*Notice:for reg dma, size must eq ds(2 n)*entry num */
            sys_cfg->data_size = MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_SIZE)*DRV_TABLE_ENTRY_SIZE(lchip, DsStats_t);
            sys_cfg->desc_depth = MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM)*SYS_DMA_FLOW_STATS_DESC_DEPTH;
            sys_cfg->desc_num = MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM)*SYS_DMA_FLOW_STATS_DESC_DEPTH;
            sys_cfg->func_type = 0;
            sys_cfg->data_num = sys_cfg->desc_num;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_FLOW_STATS;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_FLOW_STATS_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        case DRV_DMA_BUF_SCAN_CHAN_ID:
            sys_cfg->chan_en = 1;
            sys_cfg->channel_id = SYS_DMA_REG_BUF_SCAN_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_BUFFER_MON_SCAN;
            sys_cfg->cfg_size = 0;  /*Notice:should get by ftm module when used */
            sys_cfg->data_size = 0;
            sys_cfg->desc_depth = 1;
            sys_cfg->desc_num = 1;
            sys_cfg->data_num = sys_cfg->desc_num;
            sys_cfg->func_type = 0;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_REG_BUF_SCAN_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;
        case DRV_DMA_REG_MAX_CHAN_ID:  /*TM tcam scan*/
            sys_cfg->chan_en = drv_ser_get_cfg(lchip, DRV_SER_CFG_TYPE_SCAN_MODE, NULL);
            sys_cfg->channel_id = SYS_DMA_REG_MAX_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 0;  /*Notice:should get by ftm module when used */
            sys_cfg->data_size = 0;
            sys_cfg->desc_depth = (DRV_CONST(DRV_DMA_TCAM_SCAN_DESC_NUM));
            sys_cfg->desc_num = (DRV_CONST(DRV_DMA_TCAM_SCAN_DESC_NUM));
            sys_cfg->func_type = 0;
            sys_cfg->data_num = sys_cfg->desc_num;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_REG_MAX_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;
        case DRV_DMA_TBL_RD1_CHAN_ID:
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_TBL_RD1_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 0;  /*Using for dot1ae stats in TM */
            sys_cfg->data_size = 0;
            sys_cfg->desc_depth = DRV_FROM_AT(lchip)? 96 : ((DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip)) ? 12 : 3);
            sys_cfg->desc_num = DRV_FROM_AT(lchip)? 96 : ((DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip)) ? 12 : 3);
            sys_cfg->func_type = 0;
            sys_cfg->data_num = sys_cfg->desc_num;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_DOT1AE_STATS;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_RD1_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;
        case DRV_DMA_TBL_RD2_CHAN_ID:
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_TBL_RD2_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 0;  /*Using for dot1ae stats in TM */
            sys_cfg->data_size = 0;
            sys_cfg->desc_depth = SYS_USW_DMA_NPM_STATS_SYNC_TBL_NUM - ((DRV_FROM_AT(lchip))?1:0);
            sys_cfg->desc_num =  SYS_USW_DMA_NPM_STATS_SYNC_TBL_NUM - ((DRV_FROM_AT(lchip))?1:0);
            sys_cfg->func_type = 0;
            sys_cfg->data_num = sys_cfg->desc_num;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_NPM_STATS;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_RD2_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;
        case DRV_DMA_OAM_CHAN_ID:
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_OAM_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 512;
            sys_cfg->data_size = 512 * sizeof(CpuInfoOamStats_m);
            sys_cfg->desc_depth = 2;
            sys_cfg->desc_num = 2;
            sys_cfg->func_type = SYS_DMA_FUNC_OAM;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_OAM;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_OAM_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;

        case DRV_DMA_SC_OAM_CHAN_ID:
            sys_cfg->chan_en = 0;
            sys_cfg->channel_id = SYS_DMA_SC_OAM_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 120;
            sys_cfg->data_size = 120 * sizeof(CpuInfoFlexEOam_m);
            sys_cfg->desc_depth = 2;
            sys_cfg->desc_num = 2;
            sys_cfg->data_num = sys_cfg->desc_num;
            sys_cfg->func_type = SYS_DMA_FUNC_SC_OAM;
            sys_cfg->cb_type = SYS_DMA_CB_TYPE_SCOAM_EVENT;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_SC_OAM_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;
        case DRV_DMA_TCAM_SCAN_CHAN_ID: /*TM.MX tcam scan */
            sys_cfg->chan_en = drv_ser_get_cfg(lchip, DRV_SER_CFG_TYPE_SCAN_MODE, NULL);
            sys_cfg->channel_id = SYS_DMA_TCAM_SCAN_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 0;  /*Notice:should get by ftm module when used */
            sys_cfg->data_size = 0;
            sys_cfg->desc_depth = (DRV_CONST(DRV_DMA_TCAM_SCAN_DESC_NUM)) + 1;/* per descriptor corresponding to one tcam block */
            sys_cfg->desc_num = 1;
            sys_cfg->data_num = sys_cfg->desc_num;
            sys_cfg->func_type = SYS_DMA_FUNC_TCAM_SCAN;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sys_cfg->auto_mode_en = 0;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TCAM_SCAN_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
			if (DRV_FROM_AT(lchip))
            {
                p_usw_dma_master[lchip]->dma_chan_info[chan_id + 2].chan_en  = 1;
            }
			else if ((DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip)))
            {
                p_usw_dma_master[lchip]->dma_chan_info[chan_id + 1].chan_en  = 1;
            }
            break;
        case DRV_DMA_BATCH_CHAN_ID:
            sys_cfg->chan_en = 1;
            sys_cfg->channel_id = SYS_DMA_BATCH_CHAN_ID;
            sys_cfg->current_index = 0;
            sys_cfg->cfg_size = 0;
            sys_cfg->data_size = 0;
            sys_cfg->desc_depth = 1;
            sys_cfg->desc_num = 0;
            sys_cfg->func_type = SYS_DMA_FUNC_BATCH;
            sys_cfg->weight = SYS_DMA_LOW_WEIGHT;
            sys_cfg->sync_chan = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_chan;
            sys_cfg->sync_en = p_usw_dma_master[lchip]->dma_chan_info[chan_id].sync_en;
            sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_BATCH_CHAN_ID], sys_cfg, sizeof(sys_dma_chan_t));
            break;
        default:
            return CTC_E_INVALID_PARAM;

    }

    return CTC_E_NONE;
}

int32
_sys_usw_dma_init_thread(uint8 lchip, uint8 start_chan, uint8 cur_chan, uint16 prio)
{
    uint8 temp_idx = 0;
    uint8 need_merge = 0;
    sys_dma_thread_t* p_thread_info = NULL;
    int32 ret = 0;

    for (temp_idx = start_chan; temp_idx < cur_chan; temp_idx++)
    {
        if (p_usw_dma_master[lchip]->dma_thread_pri[temp_idx] == prio)
        {
            need_merge = 1;
            break;
        }
    }

    if (need_merge)
    {
        p_thread_info = ctc_vector_get(p_usw_dma_master[lchip]->p_thread_vector, temp_idx);
        if (!p_thread_info)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Entry not exist \n");
			return CTC_E_NOT_EXIST;

        }
        p_thread_info->lchip = lchip;
        p_thread_info->chan_num++;
        p_thread_info->chan_id[p_thread_info->chan_num-1] = cur_chan;
        p_usw_dma_master[lchip]->dma_chan_info[cur_chan].sync_chan = temp_idx;
        p_usw_dma_master[lchip]->dma_chan_info[cur_chan].sync_en = 1;
    }
    else
    {
        p_thread_info = ctc_vector_get(p_usw_dma_master[lchip]->p_thread_vector, cur_chan);
        if (p_thread_info)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Entry already exist \n");
			return CTC_E_EXIST;

        }

        /*create new thread info*/
        p_thread_info = (sys_dma_thread_t*)mem_malloc(MEM_DMA_MODULE, sizeof(sys_dma_thread_t));
        if (!p_thread_info)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
			return CTC_E_NO_MEMORY;
        }

        sal_memset(p_thread_info, 0, sizeof(sys_dma_thread_t));

        p_thread_info->chan_num = 1;
        p_thread_info->chan_id[0] = cur_chan;
        p_thread_info->prio = prio;
        p_thread_info->lchip = lchip;

        ret = sal_sem_create(&p_thread_info->p_sync_sem, 0);
        if (ret < 0)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Feature not initialized \n");
			return CTC_E_NOT_INIT;

        }

        ctc_vector_add(p_usw_dma_master[lchip]->p_thread_vector, cur_chan, (void*)p_thread_info);

        p_usw_dma_master[lchip]->dma_chan_info[cur_chan].sync_chan = cur_chan;
        p_usw_dma_master[lchip]->dma_chan_info[cur_chan].sync_en = 1;
    }

    return CTC_E_NONE;
}

STATIC int32
_sys_usw_dma_init_db(uint8 lchip, ctc_dma_global_cfg_t* p_cfg)
{
    /* rx channel num should get from enq module, TODO */
    uint8 rx_chan_num = p_cfg->pkt_rx_chan_num;
    uint8 chan_rx_idx = 0;
    uint16 pri = SAL_TASK_PRIO_DEF;
    uint8 idx = 0;
    uint8 chan_num = 0;
    uint8 chan_array[32] = {0};

    p_usw_dma_master[lchip]->packet_rx_chan_num = rx_chan_num;

    /* default enable these function */
    CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_HASHKEY_CHAN_ID);
    if (!DRV_FROM_AT(lchip) || lchip == SYS_PP_BASE(lchip))
    {
        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_TBL_RD_CHAN_ID);
        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_TBL_WR_CHAN_ID);
    }
#if (0 == SDK_WORK_PLATFORM)
    if (DRV_FROM_AT(lchip) && lchip == SYS_PP_BASE(lchip))
    {
        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_BATCH_CHAN_ID);
    }
#endif
    if (!DRV_FROM_TMM(lchip))
    {
        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_SDC_CHAN_ID);
    }
    p_usw_dma_master[lchip]->pkt_tx_timer_en = CTC_IS_BIT_SET(p_cfg->func_en_bitmap, CTC_DMA_FUNC_TIMER_PACKET)?1:0;

    /* init packet rx */
    if (CTC_IS_BIT_SET(p_cfg->func_en_bitmap, CTC_DMA_FUNC_PACKET_RX) && (!DRV_FROM_AT(lchip) || lchip == SYS_PP_BASE(lchip)))
    {
        for (chan_rx_idx = 0; chan_rx_idx < rx_chan_num; chan_rx_idx++)
        {
            pri = p_cfg->pkt_rx[chan_rx_idx].priority;
            if (pri == 0)
            {
                pri = SAL_TASK_PRIO_DEF;
            }

            CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, chan_rx_idx);
            p_usw_dma_master[lchip]->dma_thread_pri[chan_rx_idx] = pri;
            CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, 0, chan_rx_idx, pri));
        }

        if (DRV_IS_TMM(lchip))
        {
            /* enable flexe packet rx */
            CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_FLEXE_RX_RING);
            p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_FLEXE_RX_RING] = SAL_TASK_PRIO_DEF+1;
            CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_PACKET_RX0_CHAN_ID, SYS_DMA_FLEXE_RX_RING, (SAL_TASK_PRIO_DEF+1)));
        }
    }

    /* init packet tx */
    if (CTC_IS_BIT_SET(p_cfg->func_en_bitmap, CTC_DMA_FUNC_PACKET_TX))
    {
        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_PACKET_TX0_CHAN_ID);
        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_PACKET_TX1_CHAN_ID);

        if(!DRV_IS_DUET2(lchip) && p_usw_dma_master[lchip]->pkt_tx_timer_en)
        {
            CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_PACKET_TX3_CHAN_ID);
        }

        if (DRV_FROM_TMM(lchip))
        {
            CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_PACKET_TX2_CHAN_ID);
        }
    }

    /* init learning */
    if (CTC_IS_BIT_SET(p_cfg->func_en_bitmap, CTC_DMA_FUNC_HW_LEARNING))
    {
        pri = p_cfg->learning.priority;
        if (pri == 0)
        {
            pri = SAL_TASK_PRIO_DEF;
        }

        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_LEARNING_CHAN_ID);
        p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_LEARNING_CHAN_ID] = pri;
       CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_LEARNING_CHAN_ID,
                SYS_DMA_LEARNING_CHAN_ID, pri));
    }

    /* init ipfix */
    if (CTC_IS_BIT_SET(p_cfg->func_en_bitmap, CTC_DMA_FUNC_IPFIX) && MCHIP_FEATURE_EN(lchip, CTC_FEATURE_IPFIX))
    {
        pri = p_cfg->ipfix.priority;
        if (pri == 0)
        {
            pri = SAL_TASK_PRIO_DEF;
        }
        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_IPFIX_CHAN_ID);
        p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_IPFIX_CHAN_ID] = pri;
        CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_LEARNING_CHAN_ID, SYS_DMA_IPFIX_CHAN_ID, pri));
    }

    /* init sdc, temply enable sdc always */
    if (CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_SDC_CHAN_ID))
    {
        p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_SDC_CHAN_ID] = SAL_TASK_PRIO_DEF;
        CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_LEARNING_CHAN_ID, SYS_DMA_SDC_CHAN_ID,
            SAL_TASK_PRIO_DEF));
    }

    /* init monitor */
    if (CTC_IS_BIT_SET(p_cfg->func_en_bitmap, CTC_DMA_FUNC_MONITOR) && MCHIP_FEATURE_EN(lchip, CTC_FEATURE_MONITOR))
    {
        if (DRV_FROM_TMM(lchip))
        {
            /* irm and erm info */
            CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_BUFFER_CHAN_ID);
            p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_BUFFER_CHAN_ID] = SAL_TASK_PRIO_DEF;
            CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_LEARNING_CHAN_ID, SYS_DMA_BUFFER_CHAN_ID,
                                                      SAL_TASK_PRIO_DEF));

            /* latency info */
            CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_LATENCY_CHAN_ID);
            p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_LATENCY_CHAN_ID] = SAL_TASK_PRIO_DEF;
            CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_LEARNING_CHAN_ID, SYS_DMA_LATENCY_CHAN_ID,
                                                      SAL_TASK_PRIO_DEF));

            /* efd and dlb info */
            CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_EFD_CHAN_ID);
            p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_EFD_CHAN_ID] = SAL_TASK_PRIO_DEF;
            CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_LEARNING_CHAN_ID, SYS_DMA_EFD_CHAN_ID,
                                                      SAL_TASK_PRIO_DEF));
            if (DRV_FROM_AT(lchip))
            {
                /* monitor buffer scan */
                CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_REG_BUF_SCAN_CHAN_ID);
                p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_REG_BUF_SCAN_CHAN_ID] = SAL_TASK_PRIO_NICE_LOW;
                CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_PORT_STATS_CHAN_ID, SYS_DMA_REG_BUF_SCAN_CHAN_ID,
                                                      SAL_TASK_PRIO_NICE_LOW));
            }
        }
        else
        {
            CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_MONITOR_CHAN_ID);
            p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_MONITOR_CHAN_ID] = SAL_TASK_PRIO_DEF;
            CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_LEARNING_CHAN_ID, SYS_DMA_MONITOR_CHAN_ID,
                                                      SAL_TASK_PRIO_DEF));
        }
    }

    /* init port stats*/
    if (CTC_IS_BIT_SET(p_cfg->func_en_bitmap, CTC_DMA_FUNC_STATS) && MCHIP_FEATURE_EN(lchip, CTC_FEATURE_STATS) && (!DRV_FROM_AT(lchip)))
    {
        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_PORT_STATS_CHAN_ID);
        p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_PORT_STATS_CHAN_ID] = SAL_TASK_PRIO_NICE_LOW;
        CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_PORT_STATS_CHAN_ID,
             SYS_DMA_PORT_STATS_CHAN_ID, SAL_TASK_PRIO_NICE_LOW));
    }

    if (MCHIP_FEATURE_EN(lchip, CTC_FEATURE_STATS) && (!DRV_FROM_AT(lchip) || lchip == SYS_PP_BASE(lchip)))
    {
        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_FLOW_STATS_CHAN_ID);
        p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_FLOW_STATS_CHAN_ID] = SAL_TASK_PRIO_NICE_LOW;
        CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_PORT_STATS_CHAN_ID,
                 SYS_DMA_FLOW_STATS_CHAN_ID, SAL_TASK_PRIO_NICE_LOW));
    }

    if (p_usw_dma_master[lchip]->pkt_tx_timer_en && DRV_IS_DUET2(lchip))
    {
        p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_TBL_WR_CHAN_ID] = SAL_TASK_PRIO_NICE_LOW;
        CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_TBL_WR_CHAN_ID,
                 SYS_DMA_TBL_WR_CHAN_ID, SAL_TASK_PRIO_NICE_LOW));
    }

    if (drv_ser_get_tcam_scan_enable(lchip))
    {
        if (DRV_FROM_TMM(lchip))
        {
            /* Modify For TsingMa.MX, Tcam Scan has its own channel and do not use reg read channel any more */
            CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_TCAM_SCAN_CHAN_ID);

            /* init thread to process tcam scan channel desciptor done */
            p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_TCAM_SCAN_CHAN_ID] = SAL_TASK_PRIO_NICE_LOW;
            CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_TCAM_SCAN_CHAN_ID,
                                                      SYS_DMA_TCAM_SCAN_CHAN_ID, SAL_TASK_PRIO_NICE_LOW));

            /* init thread to process tcam scan error */
	        p_usw_dma_master[lchip]->dma_thread_pri[MCHIP_CAP(SYS_CAP_DMA_TCAM_SCAN_ERROR_INTR)] = SAL_TASK_PRIO_NICE_LOW;
            CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, MCHIP_CAP(SYS_CAP_DMA_TCAM_SCAN_ERROR_INTR), MCHIP_CAP(SYS_CAP_DMA_TCAM_SCAN_ERROR_INTR), SAL_TASK_PRIO_NICE_LOW));
        }
        else
        {
            CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_REG_MAX_CHAN_ID);
            p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_REG_MAX_CHAN_ID] = SAL_TASK_PRIO_NICE_LOW;
            CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_REG_MAX_CHAN_ID,
                                                      SYS_DMA_REG_MAX_CHAN_ID, SAL_TASK_PRIO_NICE_LOW));
        }
    }
#ifndef EMULATION_ENV
    if(DRV_FROM_TM(lchip) && !DRV_IS_TMG(lchip) && MCHIP_FEATURE_EN(lchip, CTC_FEATURE_DOT1AE) && (!DRV_FROM_AT(lchip) || lchip == SYS_PP_BASE(lchip)))
    {
        /*init dot1ae stats*/
        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_TBL_RD1_CHAN_ID);
        p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_TBL_RD1_CHAN_ID] = SAL_TASK_PRIO_NICE_LOW;
        CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_PORT_STATS_CHAN_ID,
             SYS_DMA_TBL_RD1_CHAN_ID, SAL_TASK_PRIO_NICE_LOW));
    }
#endif
    if(DRV_FROM_TMM(lchip) && MCHIP_FEATURE_EN(lchip, CTC_FEATURE_NPM) && (!DRV_FROM_AT(lchip) || lchip == SYS_PP_BASE(lchip)) )
    {
        /*init npm stats*/
        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_TBL_RD2_CHAN_ID);
        p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_TBL_RD2_CHAN_ID] = SAL_TASK_PRIO_NICE_LOW;
        CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_PORT_STATS_CHAN_ID,
             SYS_DMA_TBL_RD2_CHAN_ID, SAL_TASK_PRIO_NICE_LOW));
    }
    if ((DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip)) && MCHIP_FEATURE_EN(lchip, CTC_FEATURE_NPM))
    {
        /* init oam info */
        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_OAM_CHAN_ID);
        p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_OAM_CHAN_ID] = SAL_TASK_PRIO_DEF;
        CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_LEARNING_CHAN_ID,
                                                  SYS_DMA_OAM_CHAN_ID, SAL_TASK_PRIO_DEF));
    }

    if ((DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip)) && MCHIP_FEATURE_EN(lchip, CTC_FEATURE_SC_OAM))
    {
        /* init sc_oam info */
        CTC_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_SC_OAM_CHAN_ID);
        p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_SC_OAM_CHAN_ID] = SAL_TASK_PRIO_DEF;
        CTC_ERROR_RETURN(_sys_usw_dma_init_thread(lchip, SYS_DMA_LEARNING_CHAN_ID,
                                                  SYS_DMA_SC_OAM_CHAN_ID, SAL_TASK_PRIO_DEF));
    }

    /* init chan array using interrupt */
    for (idx = 0; idx < p_usw_dma_master[lchip]->intr_chan_num; idx++)
    {
        if (CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, p_usw_dma_master[lchip]->intr_chan_array[idx]))
        {
            chan_array[chan_num++] = p_usw_dma_master[lchip]->intr_chan_array[idx];
        }
    }

    if (drv_ser_get_tcam_scan_enable(lchip) && DRV_FROM_TMM(lchip))
    {
        chan_array[chan_num++] = DRV_FROM_AT(lchip)?30:29;/* to process tcam scan error */
    }
    p_usw_dma_master[lchip]->intr_chan_num = chan_num;
    sal_memcpy(p_usw_dma_master[lchip]->intr_chan_array, chan_array, chan_num * sizeof(uint8));

    return CTC_E_NONE;
}

/**
 @brief mv desc data from src to dest, if src_desc_idx equal dst_desc_idx, means clear src data, duet2 not support 
         donot need use GetDsDescEncap2
*/
int32
sys_usw_dma_copy_desc_data(uint8 lchip, uint8 chan_id, uint32 src_desc_idx, uint32 dst_desc_idx)
{
    sys_dma_chan_t* p_chan_info = NULL;
    sys_dma_desc_t *p_base_desc = NULL;
    DsDesc_m *p_src_desc = NULL;
    DsDesc_m *p_dst_desc = NULL;
    uint32* p_src_data = NULL;
    uint32* p_dst_data = NULL;
    uint64 src_phy_addr = 0;
    uint64 dst_phy_addr = 0;

    SYS_DMA_INIT_CHECK(lchip);

    p_chan_info = (sys_dma_chan_t*)&p_usw_dma_master[lchip]->dma_chan_info[chan_id];
    if (!p_chan_info->chan_en)
    {
        return CTC_E_NOT_INIT;
    }
    p_base_desc = p_chan_info->p_desc;
    p_src_desc = &(p_base_desc[src_desc_idx].desc_info);
    COMBINE_64BITS_DATA(p_usw_dma_master[lchip]->dma_high_addr, (GetDsDescEncap(V, memAddr_f, p_src_desc) << 4), src_phy_addr);
    p_src_data = SYS_DMA_PHY_TO_LOGIC(lchip, src_phy_addr);

    if (src_desc_idx != dst_desc_idx)
    {
        p_dst_desc = &(p_base_desc[dst_desc_idx].desc_info);
        COMBINE_64BITS_DATA(p_usw_dma_master[lchip]->dma_high_addr, (GetDsDescEncap(V, memAddr_f, p_dst_desc) << 4), dst_phy_addr);
        p_dst_data = SYS_DMA_PHY_TO_LOGIC(lchip, dst_phy_addr);
        sal_memcpy(p_dst_data, p_src_data, GetDsDescEncap(V, cfgSize_f, p_dst_desc));
    }
    else
    {
        sal_memset(p_src_data, 0, GetDsDescEncap(V, cfgSize_f, p_src_desc));
    }

    return CTC_E_NONE;
}

/**
 @brief disable trigged port stats/flow stats and disable other channel, before reset switch core
*/

int32
sys_usw_dma_reset_hw_clear(uint8 lchip, void* user_param)
{
    uint32 index = 0;
    uint32 cmd;
    DmaRegTrigEnCfg_m trig_en;

    SYS_DMA_INIT_CHECK(lchip);

    /*disable all chip auto trigger action*/
    sal_memset(&trig_en, 0,  sizeof(trig_en));
    cmd = DRV_IOW(DmaRegTrigEnCfg_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, 0, cmd, &trig_en);

    /*disable all channel*/
    for (index = 0; index <= MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID); index++)
    {
        if (!CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, index))
        {
            continue;
        }
        sys_usw_dma_set_chan_en(lchip, index, 0);
    }
    return CTC_E_NONE;
}

/**
 @brief reset hw dma callback
*/
int32
sys_usw_dma_reset_hw(uint8 lchip, void* user_param)
{
    ctc_dma_global_cfg_t dma_cfg;
    uint32 index = 0;
    DMA_CB_FUN_P dma_cb_backup[SYS_DMA_CB_MAX_TYPE];
    CTC_PKT_RX_CALLBACK dma_rx_cb;
    uint32 bmp_en[2] = {0};

    sal_memset(&dma_cfg, 0, sizeof(ctc_dma_global_cfg_t));

    if (1 == p_usw_dma_master[lchip]->pkt_tx_timer_en )
    {
        CTC_BIT_SET(dma_cfg.func_en_bitmap, CTC_DMA_FUNC_TIMER_PACKET);
    }

    /* packet rx*/
    for (index = 0; index < SYS_DMA_RX_CHAN_NUM; index++)
    {
        if (CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, index))
        {
            dma_cfg.pkt_rx[index].priority = p_usw_dma_master[lchip]->dma_thread_pri[index];

            dma_cfg.pkt_rx[index].desc_num = p_usw_dma_master[lchip]->dma_chan_info[index].desc_num;
            dma_cfg.pkt_rx[index].data = p_usw_dma_master[lchip]->dma_chan_info[index].data_size;
            dma_cfg.pkt_rx[index].pkt_knet_en = p_usw_dma_master[lchip]->dma_chan_info[index].pkt_knet_en;
            CTC_BIT_SET(dma_cfg.func_en_bitmap, CTC_DMA_FUNC_PACKET_RX);
        }
    }
    dma_cfg.pkt_rx_chan_num = p_usw_dma_master[lchip]->packet_rx_chan_num;

    /* packet tx */
    if (CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_PACKET_TX0_CHAN_ID)
        || CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_PACKET_TX1_CHAN_ID))
    {
        uint8 tx_max_chan = 0;

        CTC_BIT_SET(dma_cfg.func_en_bitmap, CTC_DMA_FUNC_PACKET_TX);

        if (p_usw_dma_master[lchip]->pkt_tx_timer_en)
        {
            CTC_BIT_SET(dma_cfg.func_en_bitmap, CTC_DMA_FUNC_TIMER_PACKET);
        }

        if (DRV_IS_DUET2(lchip))
        {
            tx_max_chan = SYS_DMA_PACKET_TX1_CHAN_ID;
        }
        else
        {
            tx_max_chan = SYS_DMA_PACKET_TX3_CHAN_ID;
        }
        for (index = SYS_DMA_PACKET_TX0_CHAN_ID; index<= tx_max_chan; index ++)
        {
            if(GET_CHAN_TYPE(index) - DRV_DMA_PACKET_TX0_CHAN_ID < CTC_DMA_PKT_TX_CHAN_NUM)
            {
            dma_cfg.pkt_tx_ext[GET_CHAN_TYPE(index) - DRV_DMA_PACKET_TX0_CHAN_ID].pkt_knet_en = p_usw_dma_master[lchip]->dma_chan_info[index].pkt_knet_en;
            dma_cfg.pkt_tx_ext[GET_CHAN_TYPE(index) - DRV_DMA_PACKET_TX0_CHAN_ID].desc_num = p_usw_dma_master[lchip]->dma_chan_info[index].desc_num;
            }
        }
    }

    /* learning */
    if (CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_LEARNING_CHAN_ID))
    {
        dma_cfg.learning.priority = p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_LEARNING_CHAN_ID];
        CTC_BIT_SET(dma_cfg.func_en_bitmap, CTC_DMA_FUNC_HW_LEARNING);

        dma_cfg.learning.desc_num =  p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_LEARNING_CHAN_ID].desc_num;
    }
    dma_cfg.hw_learning_sync_en = p_usw_dma_master[lchip]->hw_learning_sync;

    /* ipfix */
    if (CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_IPFIX_CHAN_ID))
    {
        dma_cfg.ipfix.priority = p_usw_dma_master[lchip]->dma_thread_pri[SYS_DMA_IPFIX_CHAN_ID];
        CTC_BIT_SET(dma_cfg.func_en_bitmap, CTC_DMA_FUNC_IPFIX);
        dma_cfg.ipfix.desc_num =  p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_IPFIX_CHAN_ID].desc_num;
    }

    /* sdc, temply enable sdc always */
    if (CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_SDC_CHAN_ID))
    {
        CTC_BIT_SET(dma_cfg.func_en_bitmap, CTC_DMA_FUNC_SDC);
    }

    /* monitor, from tmm, use SYS_DMA_BUFFER_CHAN_ID/SYS_DMA_LATENCY_CHAN_ID/SYS_DMA_EFD_CHAN_ID instead of SYS_DMA_MONITOR_CHAN_ID*/
    if (CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, DRV_FROM_TMM(lchip)? SYS_DMA_BUFFER_CHAN_ID : SYS_DMA_MONITOR_CHAN_ID))
    {
        CTC_BIT_SET(dma_cfg.func_en_bitmap, CTC_DMA_FUNC_MONITOR);
    }

    /* port stats*/
    if (CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_PORT_STATS_CHAN_ID))
    {
        CTC_BIT_SET(dma_cfg.func_en_bitmap, CTC_DMA_FUNC_STATS);
    }

    for (index = 0; index < SYS_DMA_CB_MAX_TYPE; index++)
    {
        dma_cb_backup[index] = p_usw_dma_master[lchip]->dma_cb[index];
    }
    dma_rx_cb = p_usw_dma_master[lchip]->dma_rx_cb;

    /*1. Get the enabled desc for stats*/
    _sys_usw_dma_get_desc_en_bmp(lchip, SYS_DMA_FLOW_STATS_CHAN_ID, bmp_en);

    sys_usw_dma_deinit(lchip);
    sys_usw_dma_init(lchip, &dma_cfg);
    sys_usw_dma_init_done(lchip, 1);

    /*2. registrer cb*/
    for (index = 0; index < SYS_DMA_CB_MAX_TYPE; index++)
    {
         p_usw_dma_master[lchip]->dma_cb[index] = dma_cb_backup[index];
    }
    p_usw_dma_master[lchip]->dma_rx_cb = dma_rx_cb;

    /*3. Config the enable desc after init done*/
    for (index = 0; index < MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM); index++)
    {
        if(CTC_BMP_ISSET(bmp_en, index))
        {
            sys_usw_dma_set_cfg_size(lchip, SYS_DMA_FLOW_STATS_CHAN_ID, index, 1);
        }
    }
    return CTC_E_NONE;
}

STATIC int32
_sys_usw_dma_dump_db(uint8 lchip, sal_file_t p_f, ctc_global_dump_db_t* p_dump_param)
{
    uint8 i = 0;
    SYS_DMA_INIT_CHECK(lchip);
    SYS_DUMP_DB_LOG(p_f, "%s\n", "# Dma");
    SYS_DUMP_DB_LOG(p_f, "%s\n", "{");
    SYS_DUMP_DB_LOG(p_f, "%s\n", "Master config:");
    SYS_DUMP_DB_LOG(p_f, "%s\n", "----------------------------------------------------------------------------------------------------------------------");
    SYS_DUMP_DB_LOG(p_f, "%-30s:%u\n","packet_rx_chan_num",p_usw_dma_master[lchip]->packet_rx_chan_num);
    SYS_DUMP_DB_LOG(p_f, "%-30s:%u\n","dma_en_flag",p_usw_dma_master[lchip]->dma_en_flag);
    SYS_DUMP_DB_LOG(p_f, "%-30s:%u\n","dma_high_addr",p_usw_dma_master[lchip]->dma_high_addr);
    SYS_DUMP_DB_LOG(p_f, "%-30s:%u\n","hw_learning_sync",p_usw_dma_master[lchip]->hw_learning_sync);
    SYS_DUMP_DB_LOG(p_f, "%-30s:%u\n","dma_stats_en",p_usw_dma_master[lchip]->dma_stats_en);
    SYS_DUMP_DB_LOG(p_f, "%-30s:%u\n","pkt_tx_timer_en",p_usw_dma_master[lchip]->pkt_tx_timer_en);
    SYS_DUMP_DB_LOG(p_f, "%-30s:%u\n","tx_timer",p_usw_dma_master[lchip]->tx_timer);
    SYS_DUMP_DB_LOG(p_f, "%-30s:%u\n","wb_reloading",p_usw_dma_master[lchip]->wb_reloading);
    SYS_DUMP_DB_LOG(p_f, "%-30s:","dma_thread_pri");
    for(i = 0; i <= MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID); i++)
    {
        SYS_DUMP_DB_LOG(p_f, "[%u]",p_usw_dma_master[lchip]->dma_thread_pri[i]);
    }
    SYS_DUMP_DB_LOG(p_f, "\n");
    SYS_DUMP_DB_LOG(p_f, "%s\n", "----------------------------------------------------------------------------------------------------------------------");

    SYS_DUMP_DB_LOG(p_f, "%s\n", "}");
    return CTC_E_NONE;
}

int32
_sys_usw_dma_init_pkt_destmap(uint8 lchip, uint8 rx_ring_num)
{
    uint8 index = 0;
    uint8 step = MapNetTxPktDestMap_cfgNet0TxChan1DestChan_f - MapNetTxPktDestMap_cfgNet0TxChan0DestChan_f;
    uint32 cmd = 0;
    MapHcpuPktDestMap_m hcpu_dest;
    MapEncapPktDestMap_m encap_dest;
    MapFlexEPktDestMap_m flexe_dest;
    MapNetTxPktDestMap_m nettx_dest;

    sal_memset(&hcpu_dest, 0, sizeof(MapHcpuPktDestMap_m));
    sal_memset(&encap_dest, 0, sizeof(MapEncapPktDestMap_m));
    sal_memset(&flexe_dest, 0, sizeof(MapFlexEPktDestMap_m));
    sal_memset(&nettx_dest, 0, sizeof(MapNetTxPktDestMap_m));

    /* Host Cpu Tx Dma rings destmap */
    cmd = DRV_IOR(MapHcpuPktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &hcpu_dest));

    SetMapHcpuPktDestMap(V, cfgHcpuTxChan0DestMap_f, &hcpu_dest, 1);/* NetRx 0 */
    SetMapHcpuPktDestMap(V, cfgHcpuTxChan1DestMap_f, &hcpu_dest, 1);/* NetRx 0 */
    SetMapHcpuPktDestMap(V, cfgHcpuTxChan2DestMap_f, &hcpu_dest, 5);/* FlexE */
    SetMapHcpuPktDestMap(V, cfgHcpuTxChan3DestMap_f, &hcpu_dest, 1);/* NetRx 0 */

    SetMapHcpuPktDestMap(V, cfgHcpuTxChan0DestChan_f, &hcpu_dest, 0);/* channel 0 */
    SetMapHcpuPktDestMap(V, cfgHcpuTxChan1DestChan_f, &hcpu_dest, 0);/* channel 0 */
    SetMapHcpuPktDestMap(V, cfgHcpuTxChan2DestChan_f, &hcpu_dest, 0);/* do not need config and will choose FlexE ID based on Pkt Hdr */
    SetMapHcpuPktDestMap(V, cfgHcpuTxChan3DestChan_f, &hcpu_dest, 0);/* channel 0 */

    cmd = DRV_IOW(MapHcpuPktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &hcpu_dest));

    /* Dma Encap Fifo Tx destmap */
    cmd = DRV_IOR(MapEncapPktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &encap_dest));

    SetMapEncapPktDestMap(V, cfgEncapTxChan0DestMap_f, &encap_dest, 2);/* NetRx 1 */
    SetMapEncapPktDestMap(V, cfgEncapTxChan1DestMap_f, &encap_dest, 2);/* NetRx 1 */
    SetMapEncapPktDestMap(V, cfgEncapTxChan2DestMap_f, &encap_dest, 2);/* NetRx 1 */
    SetMapEncapPktDestMap(V, cfgEncapTxChan3DestMap_f, &encap_dest, 1);/* NetRx 0 */

    SetMapEncapPktDestMap(V, cfgEncapTxChan0DestChan_f, &encap_dest, 0);/* channel 0 */
    SetMapEncapPktDestMap(V, cfgEncapTxChan1DestChan_f, &encap_dest, 1);/* channel 1 */
    SetMapEncapPktDestMap(V, cfgEncapTxChan2DestChan_f, &encap_dest, 2);/* channel 2 */
    SetMapEncapPktDestMap(V, cfgEncapTxChan3DestChan_f, &encap_dest, 3);/* channel 3 */

    cmd = DRV_IOW(MapEncapPktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &encap_dest));

    /* FlexE Tx destmap */
    cmd = DRV_IOR(MapFlexEPktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &flexe_dest));
    SetMapFlexEPktDestMap(V, cfgFlexETxChan0DestMap_f, &flexe_dest, 0);/* HCPU */
    SetMapFlexEPktDestMap(V, cfgFlexETxChan1DestMap_f, &flexe_dest, 0);/* HCPU */

    SetMapFlexEPktDestMap(V, cfgFlexETxChan0DestChan_f, &flexe_dest, 7);/* Pkt Rx Dma ring 7 */
    SetMapFlexEPktDestMap(V, cfgFlexETxChan1DestChan_f, &flexe_dest, 7);/* Pkt Rx Dma ring 7 */
    cmd = DRV_IOW(MapFlexEPktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &flexe_dest));

    /* Net Tx destmap */
    cmd = DRV_IOR(MapNetTxPktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &nettx_dest));

    for (index = 0; index < rx_ring_num; index++)
    {
        SetMapNetTxPktDestMap(V, cfgNet0TxChan0DestMap_f + step * index, &nettx_dest, 0);/* Always to HCPU */
        SetMapNetTxPktDestMap(V, cfgNet0TxChan0DestChan_f + step * index, &nettx_dest, index);/* Dma ring ID */
    }

    SetMapNetTxPktDestMap(V, cfgNet1TxChan0DestMap_f, &nettx_dest, 3);/* Always to ECPU */
    SetMapNetTxPktDestMap(V, cfgNet1TxChan0DestChan_f, &nettx_dest, 0);/* Dma ring ID */
    SetMapNetTxPktDestMap(V, cfgNet1TxChan1DestMap_f, &nettx_dest, 4);/* Always to SCPU */
    SetMapNetTxPktDestMap(V, cfgNet1TxChan1DestChan_f, &nettx_dest, 0);/* Dma ring ID */
    cmd = DRV_IOW(MapNetTxPktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &nettx_dest));

    return CTC_E_NONE;
}

int32
_sys_at_dma_init_pkt_destmap(uint8 lchip, uint8 rx_ring_num)
{
    uint8  index = 0;
    uint32 cmd = 0;
    uint32 step = MapNet0PktDestMap_cfgNet0Chan1DestIntf_f - MapNet0PktDestMap_cfgNet0Chan0DestIntf_f;
    MapHcpuPktDestMap_m hcpu_dest;
    MapNet0PktDestMap_m nettx_dest;
    MapEncapPktDestMap_m encap_dest;
    MapXfcPktDestMap_m xfc_dest;

    sal_memset(&hcpu_dest, 0, sizeof(MapHcpuPktDestMap_m));
    sal_memset(&nettx_dest, 0, sizeof(MapNet0PktDestMap_m));
    sal_memset(&encap_dest, 0, sizeof(MapEncapPktDestMap_m));
    sal_memset(&xfc_dest, 0, sizeof(MapXfcPktDestMap_m));

    /* Dma Encap Fifo to NetRx channel Mapping */
    cmd = DRV_IOR(MapEncapPktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &encap_dest));
    SetMapEncapPktDestMap(V, cfgEncapChan0DestIntf_f, &encap_dest, 0);/* NetRx0 */
    SetMapEncapPktDestMap(V, cfgEncapChan0DestChan_f, &encap_dest, 0);/* channel0 */
    SetMapEncapPktDestMap(V, cfgEncapChan1DestIntf_f, &encap_dest, 1);/* NetRx1 */
    SetMapEncapPktDestMap(V, cfgEncapChan1DestChan_f, &encap_dest, 1);/* channel1 */
    SetMapEncapPktDestMap(V, cfgEncapChan2DestIntf_f, &encap_dest, 2);/* NetRx2 */
    SetMapEncapPktDestMap(V, cfgEncapChan2DestChan_f, &encap_dest, 2);/* channel2 */
    SetMapEncapPktDestMap(V, cfgEncapChan3DestIntf_f, &encap_dest, 3);/* NetRx3*/
    SetMapEncapPktDestMap(V, cfgEncapChan3DestChan_f, &encap_dest, 3);/* channel3 */
    cmd = DRV_IOW(MapEncapPktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &encap_dest));

    /* NetTx channel to DMA Rx Rings Mapping */
    cmd = DRV_IOR(MapNet0PktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &nettx_dest));

    for (index = 0; index < rx_ring_num; index++)
    {
        SetMapNet0PktDestMap(V, cfgNet0Chan0DestIntf_f + step * index , &nettx_dest, 4);/* HCPU */
        SetMapNet0PktDestMap(V, cfgNet0Chan0DestChan_f + step * index , &nettx_dest, index);/* Dma ring ID */
    }

    cmd = DRV_IOW(MapNet0PktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &nettx_dest));

    /* NetTx channel to DMACTL1 Rx Rings Mapping */
    cmd = DRV_IOR(MapNet1PktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &nettx_dest));
    for (index = 0; index < rx_ring_num; index++)
    {
        SetMapNet1PktDestMap(V, cfgNet1Chan0DestIntf_f + step * index , &nettx_dest, 5);/* ECPUDMA0 */
        SetMapNet1PktDestMap(V, cfgNet1Chan0DestChan_f + step * index , &nettx_dest, index);/* Dma ring ID */
    }
    cmd = DRV_IOW(MapNet1PktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &nettx_dest));
    /* NetTx channel to DMACTL1 Rx Rings Mapping */
    cmd = DRV_IOR(MapNet2PktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &nettx_dest));
    for (index = 0; index < rx_ring_num; index++)
    {
        SetMapNet2PktDestMap(V, cfgNet2Chan0DestIntf_f + step * index , &nettx_dest, 6);/* ECPUDMA1 */
        SetMapNet2PktDestMap(V, cfgNet2Chan0DestChan_f + step * index , &nettx_dest, index);/* Dma ring ID */
    }
    cmd = DRV_IOW(MapNet2PktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &nettx_dest));
    /* DMA Tx Rings To NetRx Channel Mapping */
    cmd = DRV_IOR(MapHcpuPktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &hcpu_dest));
    SetMapHcpuPktDestMap(V, cfgChan0Id0DestIntf_f, &hcpu_dest, 0);/* NetRx0 */
    SetMapHcpuPktDestMap(V, cfgChan0Id0DestChan_f, &hcpu_dest, 0);/* Channel0 */
    SetMapHcpuPktDestMap(V, cfgChan0Id1DestIntf_f, &hcpu_dest, 1);/* NetRx1 */
    SetMapHcpuPktDestMap(V, cfgChan0Id1DestChan_f, &hcpu_dest, 1);/* Channel1 */
    SetMapHcpuPktDestMap(V, cfgChan0Id2DestIntf_f, &hcpu_dest, 2);/* NetRx2 */
    SetMapHcpuPktDestMap(V, cfgChan0Id2DestChan_f, &hcpu_dest, 2);/* Channel2 */
    SetMapHcpuPktDestMap(V, cfgChan0Id3DestIntf_f, &hcpu_dest, 3);/* NetRx3 */
    SetMapHcpuPktDestMap(V, cfgChan0Id3DestChan_f, &hcpu_dest, 3);/* Channel3 */
    SetMapHcpuPktDestMap(V, cfgChan1Id0DestIntf_f, &hcpu_dest, 0);/* NetRx0 */
    SetMapHcpuPktDestMap(V, cfgChan1Id0DestChan_f, &hcpu_dest, 0);/* Channel0 */
    SetMapHcpuPktDestMap(V, cfgChan1Id1DestIntf_f, &hcpu_dest, 1);/* NetRx1 */
    SetMapHcpuPktDestMap(V, cfgChan1Id1DestChan_f, &hcpu_dest, 1);/* Channel1 */
    SetMapHcpuPktDestMap(V, cfgChan1Id2DestIntf_f, &hcpu_dest, 2);/* NetRx2 */
    SetMapHcpuPktDestMap(V, cfgChan1Id2DestChan_f, &hcpu_dest, 2);/* Channel2 */
    SetMapHcpuPktDestMap(V, cfgChan1Id3DestIntf_f, &hcpu_dest, 3);/* NetRx3 */
    SetMapHcpuPktDestMap(V, cfgChan1Id3DestChan_f, &hcpu_dest, 3);/* Channel3 */
    SetMapHcpuPktDestMap(V, cfgChan2Id0DestIntf_f, &hcpu_dest, 0);/* NetRx0 */
    SetMapHcpuPktDestMap(V, cfgChan2Id0DestChan_f, &hcpu_dest, 0);/* Channel0 */
    SetMapHcpuPktDestMap(V, cfgChan2Id1DestIntf_f, &hcpu_dest, 1);/* NetRx1 */
    SetMapHcpuPktDestMap(V, cfgChan2Id1DestChan_f, &hcpu_dest, 1);/* Channel1 */
    SetMapHcpuPktDestMap(V, cfgChan2Id2DestIntf_f, &hcpu_dest, 2);/* NetRx2 */
    SetMapHcpuPktDestMap(V, cfgChan2Id2DestChan_f, &hcpu_dest, 2);/* Channel2 */
    SetMapHcpuPktDestMap(V, cfgChan2Id3DestIntf_f, &hcpu_dest, 3);/* NetRx3 */
    SetMapHcpuPktDestMap(V, cfgChan2Id3DestChan_f, &hcpu_dest, 3);/* Channel3 */
    SetMapHcpuPktDestMap(V, cfgChan3Id0DestIntf_f, &hcpu_dest, 0);/* NetRx0 */
    SetMapHcpuPktDestMap(V, cfgChan3Id0DestChan_f, &hcpu_dest, 0);/* Channel0 */
    SetMapHcpuPktDestMap(V, cfgChan3Id1DestIntf_f, &hcpu_dest, 1);/* NetRx1 */
    SetMapHcpuPktDestMap(V, cfgChan3Id1DestChan_f, &hcpu_dest, 1);/* Channel1 */
    SetMapHcpuPktDestMap(V, cfgChan3Id2DestIntf_f, &hcpu_dest, 2);/* NetRx2 */
    SetMapHcpuPktDestMap(V, cfgChan3Id2DestChan_f, &hcpu_dest, 2);/* Channel2 */
    SetMapHcpuPktDestMap(V, cfgChan3Id3DestIntf_f, &hcpu_dest, 3);/* NetRx3 */
    SetMapHcpuPktDestMap(V, cfgChan3Id3DestChan_f, &hcpu_dest, 3);/* Channel3 */
    cmd = DRV_IOW(MapHcpuPktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &hcpu_dest));


    SetMapXfcPktDestMap(V, cfgXfcPktDestChan_f, &xfc_dest, 0);
    SetMapXfcPktDestMap(V, cfgXfcPktDestIntf_f, &xfc_dest, 3);
    cmd = DRV_IOW(MapXfcPktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &xfc_dest));

    cmd = DRV_IOW(MapEcpu0PktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &hcpu_dest));
    cmd = DRV_IOW(MapEcpu1PktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &hcpu_dest));
    return CTC_E_NONE;
}

int32
_sys_at_dma_map_info(uint8 lchip, uint8 rx_ring_num)
{
    uint16 index = 0;
    uint32 cmd = 0;
    uint32 field_val = 0;
    uint32 tbl_id = 0;
    MapMsgDestMap_m map_msg;
    DmaInfo0Ctl_m info_ctl;
    MapInfoDrainEn_m drain_en;

    for (index = 0; index < SYS_DMA_INFO_MAX_SUB_TYPE; index++)
    {
        sal_memset(&map_msg, 0, sizeof(MapMsgDestMap_m));
        /* firstly  read */
        cmd = DRV_IOR(MapMsgDestMap_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, index, cmd, &map_msg));

        /* secondly  set */
        field_val = 0x1;
        SetMapMsgDestMap(V, hcpuEn_f, &map_msg, field_val);

        if (index <= 31)
        {
            field_val = 2;/* ipfix */
        }
        else if (index == 32)
        {
            field_val = 3;/* buffer monitor */
        }
        else if (index == 33 || index == 34)
        {
            field_val = 4;/* Latency monitor */
        }
        else if (index == 35)
        {
            field_val = 6;/* DLB monitor */
        }
        else if (index == 36)
        {
            field_val = 5;/* EFD monitor */
        }
        else if (index == 39 || index == 40)
        {
            field_val = 0;/* learning aging */
        }
        else if (index == 41)
        {
            field_val = 1;/* hash dump */
        }

        SetMapMsgDestMap(V, hcpuChan_f, &map_msg, field_val);

        /* finally write */
        cmd = DRV_IOW(MapMsgDestMap_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, index, cmd, &map_msg));
    }

    tbl_id = MapInfoDrainEn_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &drain_en));
    SetMapInfoDrainEn(V, cfgCoreAgingInfoDrainEn_f, &drain_en, 1);
    SetMapInfoDrainEn(V, cfgDlbInfoDrainEn_f, &drain_en, 0xF);
    SetMapInfoDrainEn(V, cfgEfdInfoDrainEn_f, &drain_en, 0xF);
    SetMapInfoDrainEn(V, cfgFibDumpInfoDrainEn_f, &drain_en, 1);
    SetMapInfoDrainEn(V, cfgFibLearnInfoDrainEn_f, &drain_en, 1);
    SetMapInfoDrainEn(V, cfgIpfixEpeInfoDrainEn_f, &drain_en, 0xF);
    SetMapInfoDrainEn(V, cfgIpfixIpeInfoDrainEn_f, &drain_en, 0xF);
    SetMapInfoDrainEn(V, cfgLatencyInfoDrainEn_f, &drain_en, 0xF);
    SetMapInfoDrainEn(V, cfgMicroBurstInfoDrainEn_f, &drain_en, 1);
    SetMapInfoDrainEn(V, cfgPpAgingInfoDrainEn_f, &drain_en, 0xF);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &drain_en));

    _sys_at_dma_init_pkt_destmap(lchip, rx_ring_num);

    /* Info DMA Config */
    tbl_id = DmaInfo0Ctl_t;/* learning & aging */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo0Ctl(V, cfgDmaInfoDs_f, &info_ctl, 3);
    SetDmaInfo0Ctl(V, cfgInfoDescByteSel_f, &info_ctl, 0);
    SetDmaInfo0Ctl(V, cfgInfoDescEmptyThrd_f, &info_ctl, 3);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo1Ctl_t;/* hash dump */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo1Ctl(V, cfgDmaInfoDs_f, &info_ctl, 3);
    SetDmaInfo1Ctl(V, cfgInfoDescByteSel_f, &info_ctl, 0);
    SetDmaInfo1Ctl(V, cfgInfoDescEmptyThrd_f, &info_ctl, 3);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo2Ctl_t;/* ipfix */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo2Ctl(V, cfgDmaInfoDs_f, &info_ctl, 9);
    SetDmaInfo2Ctl(V, cfgInfoDescByteSel_f, &info_ctl, 0);
    SetDmaInfo2Ctl(V, cfgInfoDescEmptyThrd_f, &info_ctl, 9);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo3Ctl_t;/* monitor */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo3Ctl(V, cfgDmaInfoDs_f, &info_ctl, 7);
    SetDmaInfo3Ctl(V, cfgInfoDescByteSel_f, &info_ctl, 0);
    SetDmaInfo3Ctl(V, cfgInfoDescEmptyThrd_f, &info_ctl, 7);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo4Ctl_t;/* lantency */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo4Ctl(V, cfgDmaInfoDs_f, &info_ctl, 3);
    SetDmaInfo4Ctl(V, cfgInfoDescByteSel_f, &info_ctl, 0);
    SetDmaInfo4Ctl(V, cfgInfoDescEmptyThrd_f, &info_ctl, 3);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo5Ctl_t;/* efd */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo5Ctl(V, cfgDmaInfoDs_f, &info_ctl, 1);
    SetDmaInfo5Ctl(V, cfgInfoDescByteSel_f, &info_ctl, 0);
    SetDmaInfo5Ctl(V, cfgInfoDescEmptyThrd_f, &info_ctl, 1);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo6Ctl_t;/* dlb */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo6Ctl(V, cfgDmaInfoDs_f, &info_ctl, 2);
    SetDmaInfo6Ctl(V, cfgInfoDescByteSel_f, &info_ctl, 0);
    SetDmaInfo6Ctl(V, cfgInfoDescEmptyThrd_f, &info_ctl, 2);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    tbl_id = DmaInfo7Ctl_t;/* oam */
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));
    SetDmaInfo7Ctl(V, cfgDmaInfoDs_f, &info_ctl, 4);
    SetDmaInfo7Ctl(V, cfgInfoDescByteSel_f, &info_ctl, 0);
    SetDmaInfo7Ctl(V, cfgInfoDescEmptyThrd_f, &info_ctl, 4);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &info_ctl));

    return CTC_E_NONE;
}

STATIC int32
_sys_usw_dma_cb_null(uint8 lchip, void* p_data)
{
    return CTC_E_NONE;
}
/**
 @brief init dma module and allocate necessary memory
*/
int32
sys_usw_dma_init(uint8 lchip, ctc_dma_global_cfg_t* dma_global_cfg)
{
    int32 ret = 0;
    uint8 index = 0;
    uint8 vchip_pp_base = 0;
    uint8 pp_id = 0;
    uint32 cmd = 0;
    uint32 field_val = 0;
    DmaCtlDrainEnable_m dma_drain;
#if(0 == SDK_WORK_PLATFORM)
    uint16 init_cnt = 0;
    uint32 init_done = FALSE;
    CpuMapInfoFlushCtl_m cpu_map_info_flush;
#endif
    dal_dma_info_t dma_info;
    DmaMiscCfg_m misc_cfg;
    host_type_t byte_order;
    DmaCtlIntrFunc_m dma_intr_func;
    DmaEndianCtl_m          dma_edn_ctl;
    DmaInfoDsCfg_m info_ds;
    DmaInfoDescCfg_m info_desc;
    uint8 chan = 0;

    uint32 wb_status = 0;
#if (SDB_MEM_MODEL == SDB_MODE)
    return CTC_E_NONE;
#endif

    if (CTC_IS_BIT_SET(dma_global_cfg->func_en_bitmap, CTC_DMA_FUNC_PACKET_RX))
    {
        CTC_VALUE_RANGE_CHECK(dma_global_cfg->pkt_rx_chan_num, 1, SYS_DMA_RX_CHAN_NUM);
    }

    vchip_pp_base = drv_vchip_get_pp_base(lchip);
    pp_id = lchip - vchip_pp_base;
    if (pp_id > 0 && pp_id < PP_NUM_PER_CORE)
    {
        p_usw_dma_master[lchip] = p_usw_dma_master[vchip_pp_base];
        return CTC_E_NONE;
    }
    else if (pp_id > PP_NUM_PER_CORE && pp_id < (PP_NUM_PER_CORE<<1))
    {
        p_usw_dma_master[lchip] = p_usw_dma_master[vchip_pp_base+PP_NUM_PER_CORE];
        return CTC_E_NONE;
    }

    if (p_usw_dma_master[lchip] && p_usw_dma_master[lchip]->wb_reloading != 1)
    {
        return CTC_E_NONE;
    }

    sal_memset(&dma_drain, 0, sizeof(DmaCtlDrainEnable_m));
    if (!p_usw_dma_master[lchip])
    {
        p_usw_dma_master[lchip] = (sys_dma_master_t*)mem_malloc(MEM_DMA_MODULE, sizeof(sys_dma_master_t));
        if (NULL == p_usw_dma_master[lchip])
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            return CTC_E_NO_MEMORY;
        }

        sal_memset(p_usw_dma_master[lchip], 0, sizeof(sys_dma_master_t));

        p_usw_dma_master[lchip]->packet_rx_chan_num = dma_global_cfg->pkt_rx_chan_num;
        p_usw_dma_master[lchip]->init = 1;

        for (index = 0; index < SYS_DMA_CB_MAX_TYPE; index++)
        {
           p_usw_dma_master[lchip]->dma_cb[index] = _sys_usw_dma_cb_null;
        }
    }

    /* get core pp base for arctic */
    p_usw_dma_master[lchip]->core_pp_base = sys_usw_vchip_get_core_pp_base(lchip);

    /* init chan type for all dma chan */
    for (chan = 0; chan <= MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID); chan++)
    {
        p_usw_dma_master[lchip]->chan_type[chan] = _sys_usw_dma_get_chan_type(lchip, chan);
    }

    sal_memcpy(&p_usw_dma_master[lchip]->dma_global_cfg, dma_global_cfg, sizeof(ctc_dma_global_cfg_t));
    if (DRV_FROM_AT(lchip))
    {
        p_usw_dma_master[lchip]->flow_stats_sync_mode = 0; /* AT default use fifo sync,if test at dma sync,need set flow_stats_sync_mode = 1*/
        if (p_usw_dma_master[lchip]->flow_stats_sync_mode)
        {
            /* DUAL_CORE_MODE : need process DsStatsQueueUc_t/DsStatsDmaUc_t instance 1 */
            MCHIP_CAP(SYS_CAP_STATS_DMA_PP_BLOCK_NUM) = SYS_VCHIP_DUAL_CORE_MODE(lchip) ? 66 : 62;

#ifdef EMULATION_ENV
            MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM) = MCHIP_CAP(SYS_CAP_STATS_DMA_PP_BLOCK_NUM)*2;/*per pp dma sync: 66 =16+24+12+q mc 1+ q uc 4 +dma mc 1+dma uc 4 + porttc mc 2 + porttc uc 2*/
#else
            MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM) = MCHIP_CAP(SYS_CAP_STATS_DMA_PP_BLOCK_NUM)*SYS_PP_NUM(lchip);
#endif
        }
        else
        {
            /* DUAL_CORE_MODE : need process DsStatsQueueUc_t/DsStatsDmaUc_t instance 1 */
            MCHIP_CAP(SYS_CAP_STATS_DMA_PP_BLOCK_NUM) = SYS_VCHIP_DUAL_CORE_MODE(lchip) ? 14 : 10;
#ifdef EMULATION_ENV
            MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM) = MCHIP_CAP(SYS_CAP_STATS_DMA_PP_BLOCK_NUM)*2;/*per pp dma sync: 9 = q mc 1+ q uc 4 +dma mc 1+dma uc 4 + porttc mc 2 + porttc uc 2*/
#else
            MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM) = MCHIP_CAP(SYS_CAP_STATS_DMA_PP_BLOCK_NUM)*SYS_PP_NUM(lchip);
#endif
        }
    }
    else
    {
        p_usw_dma_master[lchip]->flow_stats_sync_mode = 1;
        MCHIP_CAP(SYS_CAP_STATS_DMA_PP_BLOCK_NUM) = MCHIP_CAP(SYS_CAP_STATS_DMA_BLOCK_NUM);
    }

    /*support not init dma without dma memory*/
    sal_memset(&dma_info, 0 ,sizeof(dal_dma_info_t));
    dal_get_dma_info(SYS_MAP_LDEV(lchip), &dma_info);
    if (0 == dma_info.size)
    {
        ds_t ds;
        p_usw_dma_master[lchip]->dma_en_flag = CTC_MAX_UINT32_VALUE;
        cmd = DRV_IOR(SupEnClk_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, ds));
        SetSupEnClk(V, cfgEnClkDma_f, ds, 0);
        cmd = DRV_IOW(SupEnClk_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, ds));
        return CTC_E_NONE;
    }

    /* alloc dma memory for writing table in linkagg  */
    if (DRV_FROM_TMM(lchip) && (0 == SDK_WORK_PLATFORM) && (NULL == p_usw_dma_master[lchip]->p_nonuc_bmask_dma))
    {
        p_usw_dma_master[lchip]->p_nonuc_bmask_dma = SYS_DMA_ALLOC(lchip, SYS_USW_DMA_NONUC_BMASK_SIZE, 0);
        if (!p_usw_dma_master[lchip]->p_nonuc_bmask_dma)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            ret =  CTC_E_NO_MEMORY;
            goto error_proc;
        }
        sal_memset(p_usw_dma_master[lchip]->p_nonuc_bmask_dma, 0, SYS_USW_DMA_NONUC_BMASK_SIZE);
    }

    if ((CTC_WB_ENABLE(lchip) && CTC_WB_STATUS(lchip) == CTC_WB_STATUS_RELOADING))
    {
        uint32 rx_en = 0;
        p_usw_dma_master[lchip]->wb_reloading = 1;

        CTC_ERROR_RETURN(sys_usw_global_ctl_get(lchip, CTC_GLOBAL_WARMBOOT_CPU_RX_EN, &rx_en));
        if (rx_en != 2)
        {
            return CTC_E_NONE;
        }

        p_usw_dma_master[lchip]->wb_keep_knet = 1;
        return CTC_E_NONE;
    }

    p_usw_dma_master[lchip]->p_thread_vector = ctc_vector_init(4, (MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID) + 1) / 4  + 1);
    if (NULL == p_usw_dma_master[lchip]->p_thread_vector)
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
        ret =  CTC_E_NO_MEMORY;
        goto error_proc;
    }

    sys_usw_dma_set_dump_cb(lchip, sys_usw_dma_sync_hash_dump);
    byte_order = drv_get_host_type(lchip);

    p_usw_dma_master[lchip]->dma_high_addr = dma_info.phy_base_hi;
    p_usw_dma_master[lchip]->p_virt_base = dma_info.virt_base;
    p_usw_dma_master[lchip]->phy_base = dma_info.phy_base_hi;
    p_usw_dma_master[lchip]->phy_base <<= 32;
    p_usw_dma_master[lchip]->phy_base |= dma_info.phy_base;
    p_usw_dma_master[lchip]->op_bmp = (1<<((lchip - SYS_PP_BASE(lchip))/PP_NUM_PER_CORE)) << 10;

    /*vxworks TLP set 128*/
#ifdef _SAL_VXWORKS
    field_val = 0;
    cmd = DRV_IOW(Pcie0SysCfg_t, Pcie0SysCfg_pcie0PcieMaxRdSize_f);
    CTC_ERROR_GOTO(DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val), ret, error_proc);
#endif

    /*Tsingma soc need config intfbmp*/
    if (dal_get_soc_active(SYS_MAP_LDEV(lchip)))
    {
        field_val = 0xFFFFFFF;
        cmd = DRV_IOW(DmaIntfBmpCfg_t, DmaIntfBmpCfg_dmaDataChanBmp_f);
        CTC_ERROR_GOTO(DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val), ret, error_proc);
        cmd = DRV_IOW(DmaIntfBmpCfg_t, DmaIntfBmpCfg_dmaDescChanBmp_f);
        CTC_ERROR_GOTO(DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val), ret, error_proc);
    }
    CTC_ERROR_GOTO(drv_get_warmboot_status(lchip, &wb_status), ret, error_proc);

    /* 1. init dmactl and wait done */
    field_val = 1;
    cmd = DRV_FROM_TMM(lchip) ? DRV_IOW(DmaCtlInit_t, DmaCtlInit_init_f) : DRV_IOW(DmaCtlInit_t, DmaCtlInit_dmaInit_f);
    CTC_ERROR_GOTO(DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val), ret, error_proc);
#if(0 == SDK_WORK_PLATFORM)
    while (init_cnt < SYS_DMA_INIT_COUNT)
    {
        cmd = DRV_FROM_TMM(lchip) ? DRV_IOR(DmaCtlInitDone_t, DmaCtlInitDone_initDone_f) : DRV_IOR(DmaCtlInitDone_t, DmaCtlInitDone_dmaInitDone_f);
        CTC_ERROR_GOTO(DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val), ret, error_proc);
        

        /* init failed */
        if (field_val)
        {
            init_done = TRUE;
            break;
        }
        init_cnt++;
    }

    if ((init_done == FALSE) && (CTC_WB_STATUS_RELOADING != wb_status))
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "DMACtl init falied!\n");
        ret = CTC_E_NOT_INIT;
        goto error_proc;
    }
    sal_memset(&cpu_map_info_flush, 0, sizeof(CpuMapInfoFlushCtl_m));
    cmd = DRV_IOW(CpuMapInfoFlushCtl_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, 0, cmd, &cpu_map_info_flush);
#endif
    /*2. init dma endian config host le/be data desc byte order*/
    if (byte_order == HOST_LE)
    {
        cmd = DRV_IOR(DmaMiscCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, cmd, &misc_cfg), ret, error_proc);

        SetDmaMiscCfg(V, cfgDmaRegRdEndian_f, &misc_cfg, 1);
        SetDmaMiscCfg(V, cfgDmaRegWrEndian_f, &misc_cfg, 1);
        SetDmaMiscCfg(V, cfgDmaInfoEndian_f, &misc_cfg, 1);
        SetDmaMiscCfg(V, cfgToCpuDescEndian_f, &misc_cfg, 0);
        SetDmaMiscCfg(V, cfgFrCpuDescEndian_f, &misc_cfg, 0);

        cmd = DRV_IOW(DmaMiscCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, cmd, &misc_cfg), ret, error_proc);
    }

    cmd = DRV_IOR(DmaEndianCtl_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, 0, cmd, &dma_edn_ctl);
    if (byte_order == HOST_LE)
    {
        /*DmaEndianCtl.cfgDmaPktRxEndian  0x0*/
        field_val = 0x0;
        DRV_IOW_FIELD(lchip, DmaEndianCtl_t, DmaEndianCtl_cfgDmaPktRxEndian_f, &field_val, &dma_edn_ctl);
        if (DRV_FROM_TMM(lchip))
        {
            field_val = 0x1;
            DRV_IOW_FIELD(lchip, DmaEndianCtl_t, DmaEndianCtl_cfgDmaPktTxEndian_f, &field_val, &dma_edn_ctl);
        }
    }
    else
    {
        field_val = 1;
        DRV_IOW_FIELD(lchip, DmaEndianCtl_t, DmaEndianCtl_cfgDmaRegRdEndian_f, &field_val, &dma_edn_ctl);
        DRV_IOW_FIELD(lchip, DmaEndianCtl_t, DmaEndianCtl_cfgDmaRegWrEndian_f, &field_val, &dma_edn_ctl);
        DRV_IOW_FIELD(lchip, DmaEndianCtl_t, DmaEndianCtl_cfgDmaInfoEndian_f, &field_val, &dma_edn_ctl);
        DRV_IOW_FIELD(lchip, DmaEndianCtl_t, DmaEndianCtl_cfgFrCpuDescEndian_f, &field_val, &dma_edn_ctl);
        DRV_IOW_FIELD(lchip, DmaEndianCtl_t, DmaEndianCtl_cfgToCpuDescEndian_f, &field_val, &dma_edn_ctl);
        DRV_IOW_FIELD(lchip, DmaEndianCtl_t, DmaEndianCtl_cfgDmaPktTxEndian_f, &field_val, &dma_edn_ctl);
        field_val = 0;
        DRV_IOW_FIELD(lchip, DmaEndianCtl_t, DmaEndianCtl_cfgDmaPktRxEndian_f, &field_val, &dma_edn_ctl);
    }
    cmd = DRV_IOW(DmaEndianCtl_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, 0, cmd, &dma_edn_ctl);
#ifdef PCIE_SWAP_EN
    {
        cmd = DRV_IOR(DmaMiscCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, cmd, &misc_cfg), ret, error_proc);

        /*packet rx/tx config endian to swap*/
        field_val = GetDmaMiscCfg(V, cfgDmaPktRxEndian_f, &misc_cfg);
        field_val = field_val?0:1;
        SetDmaMiscCfg(V, cfgDmaPktRxEndian_f, &misc_cfg, field_val);
        field_val = GetDmaMiscCfg(V, cfgDmaPktTxEndian_f, &misc_cfg);
        field_val = field_val?0:1;
        SetDmaMiscCfg(V, cfgDmaPktTxEndian_f, &misc_cfg, field_val);
        /*reg wr/rd config endian to swap*/
        field_val = GetDmaMiscCfg(V, cfgDmaRegRdEndian_f, &misc_cfg);
        field_val = field_val?0:1;
        SetDmaMiscCfg(V, cfgDmaRegRdEndian_f, &misc_cfg, field_val);
        field_val = GetDmaMiscCfg(V, cfgDmaRegWrEndian_f, &misc_cfg);
        field_val = field_val?0:1;
        SetDmaMiscCfg(V, cfgDmaRegWrEndian_f, &misc_cfg, field_val);
        /*info config endian to swap*/
        field_val = GetDmaMiscCfg(V, cfgDmaInfoEndian_f, &misc_cfg);
        field_val = field_val?0:1;
        SetDmaMiscCfg(V, cfgDmaInfoEndian_f, &misc_cfg, field_val);
        /*cpu desc config endian to swap*/
        field_val = GetDmaMiscCfg(V, cfgToCpuDescEndian_f, &misc_cfg);
        field_val = field_val?0:1;
        SetDmaMiscCfg(V, cfgToCpuDescEndian_f, &misc_cfg, field_val);
        field_val = GetDmaMiscCfg(V, cfgFrCpuDescEndian_f, &misc_cfg);
        field_val = field_val?0:1;
        SetDmaMiscCfg(V, cfgFrCpuDescEndian_f, &misc_cfg, field_val);

        cmd = DRV_IOW(DmaMiscCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, cmd, &misc_cfg), ret, error_proc);
    }
#endif

    /* 3. init dma intr info  */
    CTC_ERROR_GOTO(_sys_usw_dma_intr_init(lchip, dma_global_cfg), ret, error_proc);
    /* 4. config Info and stats Dma timer */
    CTC_ERROR_GOTO(_sys_usw_dma_timer_init(lchip), ret, error_proc);
    /* 5. config packet crc */
    CTC_ERROR_GOTO(_sys_usw_dma_crc_init(lchip), ret, error_proc);

    /* 6. Map all dma info can push to Host CPU, but whether push or not depends on api enable in the end */
    if ((DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip)))
    {
        _sys_usw_dma_init_pkt_destmap(lchip, dma_global_cfg->pkt_rx_chan_num);

        /* init info dma registers which are new added in tsingma.mx chip */
        cmd = DRV_IOR(DmaInfoDsCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, cmd, &info_ds), ret, error_proc);
        SetDmaInfoDsCfg(V, cfgDmaInfo0Ds_f, &info_ds, 2);/* Learning & Aging */
        SetDmaInfoDsCfg(V, cfgDmaInfo1Ds_f, &info_ds, 2);/* Hash Dump */
        SetDmaInfoDsCfg(V, cfgDmaInfo2Ds_f, &info_ds, 9);/* Ipfix */
        SetDmaInfoDsCfg(V, cfgDmaInfo3Ds_f, &info_ds, 2);/* Buffer */
        SetDmaInfoDsCfg(V, cfgDmaInfo4Ds_f, &info_ds, 3);/* Latency */
        SetDmaInfoDsCfg(V, cfgDmaInfo5Ds_f, &info_ds, 1);/* EFD & DLB */
        SetDmaInfoDsCfg(V, cfgDmaInfo6Ds_f, &info_ds, 4);/* OAM */
        SetDmaInfoDsCfg(V, cfgDmaInfo7Ds_f, &info_ds, 3);/* SC_OAM */
        cmd = DRV_IOW(DmaInfoDsCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, cmd, &info_ds), ret, error_proc);

        cmd = DRV_IOR(DmaInfoDescCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, cmd, &info_desc), ret, error_proc);
        SetDmaInfoDescCfg(V, cfgInfo0DescByteSel_f, &info_desc, 0);/* info default represents entry num */
        SetDmaInfoDescCfg(V, cfgInfo0DescEmptyThrd_f, &info_desc, 2);
        SetDmaInfoDescCfg(V, cfgInfo1DescByteSel_f, &info_desc, 0);
        SetDmaInfoDescCfg(V, cfgInfo1DescEmptyThrd_f, &info_desc, 2);
        SetDmaInfoDescCfg(V, cfgInfo2DescByteSel_f, &info_desc, 0);
        SetDmaInfoDescCfg(V, cfgInfo2DescEmptyThrd_f, &info_desc, 9);
        SetDmaInfoDescCfg(V, cfgInfo3DescByteSel_f, &info_desc, 0);
        SetDmaInfoDescCfg(V, cfgInfo3DescEmptyThrd_f, &info_desc, 2);
        SetDmaInfoDescCfg(V, cfgInfo4DescByteSel_f, &info_desc, 0);
        SetDmaInfoDescCfg(V, cfgInfo4DescEmptyThrd_f, &info_desc, 3);
        SetDmaInfoDescCfg(V, cfgInfo5DescByteSel_f, &info_desc, 0);
        SetDmaInfoDescCfg(V, cfgInfo5DescEmptyThrd_f, &info_desc, 1);
        SetDmaInfoDescCfg(V, cfgInfo6DescByteSel_f, &info_desc, 0);
        SetDmaInfoDescCfg(V, cfgInfo6DescEmptyThrd_f, &info_desc, 4);
        SetDmaInfoDescCfg(V, cfgInfo7DescByteSel_f, &info_desc, 0);
        SetDmaInfoDescCfg(V, cfgInfo7DescEmptyThrd_f, &info_desc, 3);
        cmd = DRV_IOW(DmaInfoDescCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, cmd, &info_desc), ret, error_proc);
    }
    else if (DRV_FROM_AT(lchip))
    {
        DmaWordOrderCtl_m word_order_ctl;

        sal_memset(&word_order_ctl, 0, sizeof(DmaWordOrderCtl_m));
        SetDmaWordOrderCtl(V, cfgDmaBatchWordOrder_f, &word_order_ctl, 1);
        cmd = DRV_IOW(DmaWordOrderCtl_t, DRV_ENTRY_FLAG);
        CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, cmd, &word_order_ctl), ret, error_proc);

        _sys_at_dma_map_info(lchip, dma_global_cfg->pkt_rx_chan_num);
    }

    /*just for uml and cmodel sim platform*/
    if ((1 == SDK_WORK_PLATFORM) || ((0 == SDK_WORK_PLATFORM) && (1 == SDK_WORK_ENV)))
    {
        cmd = DRV_IOR(DmaCtlDrainEnable_t, DRV_ENTRY_FLAG);
        CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, cmd, &dma_drain), ret, error_proc);
        SetDmaCtlDrainEnable(V, dmaInfo0DrainEn_f, &dma_drain, 1);
        SetDmaCtlDrainEnable(V, dmaInfo1DrainEn_f, &dma_drain, 1);
        SetDmaCtlDrainEnable(V, dmaInfo2DrainEn_f, &dma_drain, 1);
        SetDmaCtlDrainEnable(V, dmaInfo3DrainEn_f, &dma_drain, 1);
        SetDmaCtlDrainEnable(V, dmaInfo4DrainEn_f, &dma_drain, 1);
        SetDmaCtlDrainEnable(V, dmaPktRxDrainEn_f, &dma_drain, 1);
        SetDmaCtlDrainEnable(V, dmaPktTxDrainEn_f, &dma_drain, 1);
        SetDmaCtlDrainEnable(V, dmaRegRdDrainEn_f, &dma_drain, 1);
        SetDmaCtlDrainEnable(V, dmaRegWrDrainEn_f, &dma_drain, 1);
        SetDmaCtlDrainEnable(V, dmaInfoDrainEn_f, &dma_drain, 0xFF);/* TMM */
        SetDmaCtlDrainEnable(V, dmaScanDrainEn_f, &dma_drain, 1);/* TMM */
        cmd = DRV_IOW(DmaCtlDrainEnable_t, DRV_ENTRY_FLAG);
        CTC_ERROR_GOTO(DRV_IOCTL(lchip, 0, cmd, &dma_drain), ret, error_proc);
    }
    #ifndef EMULATION_ENV
    {
        EncapMiscCfg_m encap_misc_cfg;
        EncapPktMaxLength_m encap_len;
        sal_memset(&encap_len, 0, sizeof(encap_len));
        cmd = DRV_IOR(EncapMiscCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &encap_misc_cfg));
        SetEncapMiscCfg(V, cfgEncapEndTimerEn_f, &encap_misc_cfg, 0xF);
        SetEncapMiscCfg(V, cfgEncapPktCrcEn_f, &encap_misc_cfg, 0xF);
        cmd = DRV_IOW(EncapMiscCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &encap_misc_cfg));
        SetEncapPktMaxLength(V, cfgEncap0MaxLength_f, &encap_len, 1400);
        SetEncapPktMaxLength(V, cfgEncap1MaxLength_f, &encap_len, 1400);
        SetEncapPktMaxLength(V, cfgEncap2MaxLength_f, &encap_len, 1400);
        SetEncapPktMaxLength(V, cfgEncap3MaxLength_f, &encap_len, 1400);
        cmd = DRV_IOW(EncapPktMaxLength_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &encap_len));
    }
    #endif
    p_usw_dma_master[lchip]->hw_learning_sync = dma_global_cfg->hw_learning_sync_en;

    /*call interrupt module to register isr*/
    CTC_ERROR_GOTO(sys_usw_interrupt_register_isr(SYS_CHIP_CONVERT(lchip), SYS_INTR_DMA , sys_usw_dma_isr_func), ret, error_proc);

    /*clear dma interrupt*/
    cmd = DRV_IOR(DmaCtlIntrFunc_t, DRV_ENTRY_FLAG);
    CTC_ERROR_GOTO(DRV_IOCTL(lchip, 2, cmd, &dma_intr_func), ret, error_proc);
    cmd = DRV_IOW(DmaCtlIntrFunc_t, DRV_ENTRY_FLAG);
    CTC_ERROR_GOTO(DRV_IOCTL(lchip, 3, cmd, &dma_intr_func), ret, error_proc);

    if (DRV_FROM_AT(lchip))
    {
        DmaCtlIntrFunc0_m dma_intr_func0;
        cmd = DRV_IOR(DmaCtlIntrFunc0_t, DRV_ENTRY_FLAG);
        CTC_ERROR_GOTO(DRV_IOCTL(lchip, 2, cmd, &dma_intr_func0), ret, error_proc);
        cmd = DRV_IOW(DmaCtlIntrFunc0_t, DRV_ENTRY_FLAG);
        CTC_ERROR_GOTO(DRV_IOCTL(lchip, 3, cmd, &dma_intr_func0), ret, error_proc);
    }

    drv_ser_register_hw_reset_cb(lchip, DRV_SER_HW_RESET_CB_TYPE_DMA, sys_usw_dma_reset_hw);
    drv_ser_register_hw_reset_cb(lchip, DRV_SER_HW_RESET_CB_TYPE_DMA_CLEAR, sys_usw_dma_reset_hw_clear);

    CTC_ERROR_GOTO(sys_usw_dump_db_register_cb(lchip, CTC_FEATURE_DMA, _sys_usw_dma_dump_db), ret, error_proc);

    if (DRV_IS_DUET2(lchip))
    {
        CTC_ERROR_GOTO(drv_ser_register_dma_data_memory_cb(lchip, sys_duet2_dma_get_data_memory), ret, error_proc);
    }
    else
    {
        CTC_ERROR_GOTO(drv_ser_register_dma_data_memory_cb(lchip, sys_usw_dma_get_data_memory), ret, error_proc);
    }

    return CTC_E_NONE;

error_proc:
    sys_usw_dma_deinit(lchip);
    return ret;
}

int32
_sys_usw_dma_recover_channel_dyn(uint8 lchip, uint8 chan_id)
{
    uint32 cmd = 0;
    DmaDynInfo_m dyn_info;
    sys_dma_chan_t* p_dma_chan;
    DmaCtlTab_m tab_ctl;
    uint8 index = 0;
    uint8 cache_cnt = 0;
    uint16 cache_ptr = 0;
    DmaDescCache_m DescCache;
    DsDesc_m* p_desc = NULL;
    uint16 desc_index = 0;

    SYS_DMA_INIT_CHECK(lchip);
    if (!(DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip)))
    {
        return CTC_E_NOT_SUPPORT;
    }
    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[chan_id];
    if (!p_dma_chan->desc_depth)
    {
        return CTC_E_NONE;
    }
    /*Step1: recover dma channel desc current index from dyninfo */
    cmd = DRV_IOR(DmaDynInfo_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, chan_id, cmd, &dyn_info);
    p_dma_chan->current_index = GetDmaDynInfo(V, ringWrPtr_f, &dyn_info);
    desc_index = GetDmaDynInfo(V, ringRdPtr_f, &dyn_info);
    cache_cnt = GetDmaDynInfo(V, cacheCnt_f, &dyn_info);
    if (DRV_DMA_TBL_WR_CHAN_ID <= GET_CHAN_TYPE(chan_id) && GET_CHAN_TYPE(chan_id) < DRV_DMA_LEARNING_CHAN_ID)
    {
        return CTC_E_NONE;
    }

    /*Step2: recover dma channel valid desc num to channel depth */
    cmd = DRV_IOR(DmaCtlTab_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, chan_id, cmd, &tab_ctl);
    if (p_dma_chan->desc_depth > GetDmaCtlTab(V, vldNum_f, &tab_ctl))
    {
        SetDmaCtlTab(V, vldNum_f, &tab_ctl, (p_dma_chan->desc_depth-GetDmaCtlTab(V, vldNum_f, &tab_ctl)));
        cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, chan_id, cmd, &tab_ctl);
    }

    /*Step3: rewrite dma channel desc cache */
    cache_ptr = GetDmaDynInfo(V, cacheRdPtr_f, &dyn_info);
    desc_index = (desc_index + p_dma_chan->desc_depth - cache_cnt)%p_dma_chan->desc_depth;

    for (index = 0; index < cache_cnt; index++)
    {
        if (desc_index >= p_dma_chan->desc_depth)
        {
            desc_index = (desc_index) % p_dma_chan->desc_depth;
        }
        p_desc = (DsDesc_m*)&p_dma_chan->p_desc[desc_index].desc_info;
        sal_memset(&DescCache, 0, sizeof(DescCache));
        SetDmaDescCache(V, cfgSize_f,&DescCache, GetDsDescEncap(V, cfgSize_f, p_desc));
        SetDmaDescCache(V, memAddr_f,&DescCache, GetDsDescEncap(V, memAddr_f, p_desc));
        cmd = DRV_IOW(DmaDescCache_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, (cache_ptr), cmd, &DescCache);
        desc_index++;
        cache_ptr++;
    }
    return CTC_E_NONE;
}

int32
sys_usw_dma_init_done(uint8 lchip, uint8 in_chip_reset)
{
    int32 ret = CTC_E_NONE;
    uint8 index = 0;
    uint32 cmd = 0;
    uint32 field_value = 1;
    uint8 pp_id = 0;
    sys_dma_chan_t dma_chan_info;
    ctc_chip_device_info_t device_info;
    CpuMapInfoFlushCtl_m cpu_map_info_flush;
    DmaFlushCtl_m flush_dma;
    uint8  fetch_mode_cnt = 0;

#if (SDB_MEM_MODEL == SDB_MODE)
    return CTC_E_NONE;
#endif

    /*dma not alloc memory, not init*/
    if (CTC_MAX_UINT32_VALUE == p_usw_dma_master[lchip]->dma_en_flag)
    {
        p_usw_dma_master[lchip]->dma_en_flag = 0;
        return CTC_E_NONE;
    }

    _sys_usw_dma_init_db(lchip, &p_usw_dma_master[lchip]->dma_global_cfg);

    /* 2.  per channel config, just config the dmactl which have the function  */
    for (index = 0; index <= MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID); index++)
    {
        if (CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, index))
        {
            sal_memset(&dma_chan_info, 0, sizeof(sys_dma_chan_t));
            CTC_ERROR_GOTO(_sys_usw_dma_get_chan_cfg(lchip, index, &p_usw_dma_master[lchip]->dma_global_cfg, &dma_chan_info), ret, error_proc);

            if (dma_chan_info.auto_fetch_en && DRV_IS_TSINGMA(lchip) && (SYS_GET_CHIP_VERSION == SYS_CHIP_SUB_VERSION_A))
            {
                ret = CTC_E_NOT_SUPPORT;
                goto error_proc;
            }
            if (dma_chan_info.auto_fetch_en)
            {
                fetch_mode_cnt++;
            }
            /*keep knet function enable, need reset other channel to default status when wb reloadnig*/
            if (p_usw_dma_master[lchip]->wb_keep_knet && p_usw_dma_master[lchip]->wb_reloading &&
                !dma_chan_info.pkt_knet_en && (GET_CHAN_TYPE(index) <  DRV_DMA_TBL_WR_CHAN_ID))
            {
                CTC_ERROR_GOTO(_sys_usw_dma_reset_channel(lchip, index), ret, error_proc);
            }

            CTC_ERROR_GOTO(_sys_usw_dma_common_init(lchip, &dma_chan_info), ret, error_proc);
            CTC_ERROR_GOTO(_sys_usw_dma_sync_init(lchip, index), ret, error_proc);

            /*keep knet functoni enable, need reset other channel to default status when wb reloadnig*/
            if (p_usw_dma_master[lchip]->wb_keep_knet && p_usw_dma_master[lchip]->wb_reloading && !dma_chan_info.pkt_knet_en
                && (DRV_DMA_TBL_WR_CHAN_ID <= GET_CHAN_TYPE(index)) &&(GET_CHAN_TYPE(index )<  DRV_DMA_TCAM_SCAN_CHAN_ID))
            {
                CTC_ERROR_GOTO(_sys_usw_dma_recover_channel_dyn(lchip, index), ret, error_proc);
            }
        }
    }

    sys_usw_mchip_set_feature_en(lchip,CTC_FEATURE_DMA);

    sys_usw_chip_get_device_info(lchip, &device_info);
    if ((device_info.version_id == 3) && DRV_IS_TSINGMA(lchip) && !p_usw_dma_master[lchip]->pkt_tx_timer_en && fetch_mode_cnt)
    {
        uint32 field_value = 0;

        cmd = DRV_IOR(DmaCtlReserved_t, DmaCtlReserved_reserved_f);
        DRV_FIELD_IOCTL(lchip, 0, cmd, &field_value);
        field_value &= ~(1<<15|0x1F<<9);
        field_value |= (1<<15|2<<9);
        cmd = DRV_IOW(DmaCtlReserved_t, DmaCtlReserved_reserved_f);
        DRV_FIELD_IOCTL(lchip, 0, cmd, &field_value);

        field_value = 1;
        cmd = DRV_IOW(DmaMiscCfg_t, DmaMiscCfg_cfgDescReqNum_f);
        DRV_FIELD_IOCTL(lchip, 0, cmd, &field_value);
    }
    if ((DRV_IS_TMM(lchip) || DRV_IS_TMG(lchip)))
    {
        /* TM_MX default do not use fetch Mode */
        cmd = DRV_IOR(DmaCtlReserved_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, 0, cmd, &field_value);
        field_value |= 0x2;
        cmd = DRV_IOW(DmaCtlReserved_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, 0, cmd, &field_value);
    }
    if (DRV_IS_TMG(lchip))
    {
        field_value = 1;
        cmd = DRV_IOW(DmaFetchStallDis_t, DmaFetchStallDis_cfgFetchStallDis_f);
        DRV_FIELD_IOCTL(lchip, 0, cmd, &field_value);
    }
    if (DRV_IS_AT(lchip))
    {
        field_value = 1;
        cmd = DRV_IOW(DmaMiscCfg_t, DmaMiscCfg_cfgFetchStallDis_f);
        DRV_FIELD_IOCTL(lchip, 0, cmd, &field_value);
    }
    if (lchip != SYS_PP_BASE(lchip))
    {
        field_value = 1;
        cmd = DRV_IOW(DmaIntfSel_t, DmaIntfSel_dmaIntfSel_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &field_value));
    }

    for (index = 0; index <= MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID); index++)
    {
        if (CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, index))
        {
            cmd = DRV_IOW(DmaStaticInfo_t, DmaStaticInfo_chanEn_f);
            field_value = 1;
            CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, index, DRV_CMD_PP_EN(cmd), &field_value));

            p_usw_dma_master[lchip]->dma_chan_info[index].chan_en = 1;
        }
    }
    CTC_ERROR_RETURN(sys_usw_set_dma_channel_drop_en(lchip, FALSE));
    sal_memset(&cpu_map_info_flush, 0, sizeof(CpuMapInfoFlushCtl_m));
    cmd = DRV_IOW(CpuMapInfoFlushCtl_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, 0, cmd, &cpu_map_info_flush);

    sal_memset(&flush_dma, 0, sizeof(DmaFlushCtl_m));
    cmd = DRV_IOW(DmaFlushCtl_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, 0, cmd, &flush_dma);

    if (CTC_WB_ENABLE(lchip) && (0 == in_chip_reset))
    {
        ctc_l2_flush_fdb_t flush_fdb;
        /*flush pending entry to relearning when warmboot reloading done*/
        sal_memset(&flush_fdb, 0, sizeof(ctc_l2_flush_fdb_t));
        flush_fdb.flush_flag = CTC_L2_FDB_ENTRY_PENDING;
        flush_fdb.flush_type = CTC_L2_FDB_ENTRY_OP_ALL;
        sys_usw_l2_flush_fdb(lchip, &flush_fdb);
    }

    p_usw_dma_master[lchip]->wb_reloading = 0;
    for (pp_id = SYS_CORE_PP_BASE(lchip); pp_id < SYS_CORE_PP_BASE(lchip) + SYS_CORE_PP_NUM(lchip); pp_id ++)
    {
        drv_sdb_register_dma_write_cb(pp_id, (DRV_DMA_WRITE_CB)MCHIP_DMA(lchip)->dma_write_table);
        drv_sdb_register_dma_read_cb(pp_id, (DRV_DMA_READ_CB)MCHIP_DMA(lchip)->dma_read_table);
        drv_sdb_register_dma_wait_done_cb(pp_id, sys_usw_dma_wait_desc_finish);
        drv_sdb_register_dma_trigger_cb(pp_id, sys_usw_dma_read_trigger);
    }
    return CTC_E_NONE;

error_proc:
    sys_usw_dma_deinit(lchip);
    return ret;
}

STATIC int32
_sys_usw_dma_free_node_data(void* node_data, void* user_data)
{
    if (node_data)
    {
        mem_free(node_data);
    }

    return CTC_E_NONE;
}

int32
sys_usw_dma_deinit(uint8 lchip)
{
    sys_dma_thread_t* p_thread_info = NULL;
    sys_dma_chan_t*   p_dma_chan = NULL;
    uint32 index = 0;
    uint8 sync_chan = 0;
    uint32  desc_index = 0;
    void* logic_addr = 0;
    uint32 low_phy_addr = 0;
    uint64 phy_addr = 0;
    DsDesc_m* p_desc = NULL;
    uint32 cmd = 0;
    uint16 rx_flush = 0;
    uint16 tx_flush = 0;
    DmaFlushCtl_m flush_dma;
    CpuMapInfoFlushCtl_m cpu_map_info_flush;
    uint32 data_addr = 0;
    uint32 len = 0;
    DmaPktTx0Ctl_m tx_ctl;
    DmaPktRx0Ctl_m rx_ctl;
    uint32 tbl_id = 0;
#if (SDK_WORK_PLATFORM==1)
    uint32 chan_id = 0;
#endif
    uint8 vchip_pp_base = 0;
    uint8 pp_id = 0;

    LCHIP_CHECK(lchip);

    vchip_pp_base = drv_vchip_get_pp_base(lchip);
    pp_id = lchip - vchip_pp_base;
    if (pp_id > 0 && pp_id < PP_NUM_PER_CORE)
    {
        p_usw_dma_master[lchip] = NULL;
        return CTC_E_NONE;
    }
    else if (pp_id > PP_NUM_PER_CORE && pp_id < (PP_NUM_PER_CORE<<1))
    {
        p_usw_dma_master[lchip] = NULL;
        return CTC_E_NONE;
    }

    if (NULL == p_usw_dma_master[lchip])
    {
        return CTC_E_NONE;
    }
    sal_memset(&flush_dma,0,sizeof(DmaFlushCtl_m));

    if (!p_usw_dma_master[lchip]->wb_keep_knet)
    {
        sys_usw_set_dma_channel_drop_en(lchip, TRUE);
    }
    /*disable all dma channel*/
    if (!CTC_WB_ENABLE(lchip) || (SDK_WORK_PLATFORM==1))
    {
        for (index = 0; index <= MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID); index++)
        {
            if (!CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, index))
            {
                continue;
            }
            p_dma_chan = (sys_dma_chan_t*)&p_usw_dma_master[lchip]->dma_chan_info[index];
            if (p_dma_chan->pkt_knet_en && p_usw_dma_master[lchip]->wb_keep_knet)
            {
                continue;
            }
            sys_usw_dma_set_chan_en(lchip, index, 0);
        }
    }

#if (SDK_WORK_PLATFORM==1)
    /* disable ECPU dma channel for uml dma simulation */
    if (DRV_IS_AT(lchip))
    {
        DmaStaticInfo_m static_info;
        uint32 start_chan = MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID)+1;
        uint32 end_chan = start_chan + (MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID) + 1) * 2 - 1;

        for (chan_id = start_chan; chan_id <= end_chan; chan_id++)
        {
            cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
            CTC_ERROR_RETURN(DRV_IOCTL(lchip, chan_id, DRV_CMD_PP_EN(cmd), &static_info));
            SetDmaStaticInfo(V, chanEn_f, &static_info, 0);
            cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
            CTC_ERROR_RETURN(DRV_IOCTL(lchip, chan_id, DRV_CMD_PP_EN(cmd), &static_info));
        }
    }
#endif

    /*enable flush function*/
    for (index = 0; index <= MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID); index++)
    {
        if (!CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, index))
        {
            continue;
        }
        p_dma_chan = (sys_dma_chan_t*)&p_usw_dma_master[lchip]->dma_chan_info[index];
        if (p_dma_chan->pkt_knet_en && p_usw_dma_master[lchip]->wb_keep_knet)
        {
            continue;
        }
    
        if (index < SYS_DMA_PACKET_TX0_CHAN_ID)
        {
            rx_flush |= 1 << index;
            SetDmaFlushCtl(V, dmaPktRxFlushEn_f, &flush_dma, rx_flush);
            if (DRV_IS_AT(lchip))
            {
                _sys_at_dma_get_table_id_by_ring(lchip, index, &tbl_id);
                cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
                DRV_IOCTL(lchip, 0, cmd, &rx_ctl);
                SetDmaPktRx0Ctl(V, dmaPktRxFlushEn_f, &rx_ctl, 1);
                cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
                DRV_IOCTL(lchip, 0, cmd, &rx_ctl);
            }
        }
        else if (index < SYS_DMA_PACKET_TX3_CHAN_ID)
        {
            tx_flush |= 1 << (index-SYS_DMA_PACKET_TX0_CHAN_ID);
            if (DRV_IS_AT(lchip))
            {
                _sys_at_dma_get_table_id_by_ring(lchip, index, &tbl_id);
                cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
                DRV_IOCTL(lchip, 0, cmd, &tx_ctl);
                SetDmaPktTx0Ctl(V, dmaPktTxFlushEn_f, &tx_ctl, 1);
                cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
                DRV_IOCTL(lchip, 0, cmd, &tx_ctl);
            }
        }
    }

    SetDmaFlushCtl(V, dmaPktRxFlushEn_f, &flush_dma, rx_flush);
    SetDmaFlushCtl(V, dmaPktTxFlushEn_f, &flush_dma, tx_flush);
    cmd = DRV_IOW(DmaFlushCtl_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, 0, cmd, &flush_dma);
    
    sal_memset(&cpu_map_info_flush, 0xFF, sizeof(CpuMapInfoFlushCtl_m));
    cmd = DRV_IOW(CpuMapInfoFlushCtl_t, DRV_ENTRY_FLAG);
    DRV_IOCTL(lchip, 0, cmd, &cpu_map_info_flush);

    if (!p_usw_dma_master[lchip]->wb_keep_knet)
    {
        /*reset dmactl */
        MCHIP_DMA(lchip)->dma_reset(lchip);
    }

    p_usw_dma_master[lchip]->init = 0;

    for (index = 0; index < SYS_DMA_CB_MAX_TYPE; index++)
    {
        p_usw_dma_master[lchip]->dma_cb[index] = _sys_usw_dma_cb_null;
    }
    for (index = 0; index <= MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID); index++)
    {
        if (!CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, index))
        {
            continue;
        }
        p_dma_chan = (sys_dma_chan_t*)&p_usw_dma_master[lchip]->dma_chan_info[index];
        if (p_dma_chan->p_mutex)
        {
            if ((p_dma_chan->channel_id == SYS_DMA_PACKET_TX0_CHAN_ID)
                || (p_dma_chan->channel_id == SYS_DMA_PACKET_TX1_CHAN_ID)
                || (p_dma_chan->channel_id == SYS_DMA_PACKET_TX2_CHAN_ID)
                || (p_dma_chan->channel_id == SYS_DMA_PACKET_TX3_CHAN_ID))
            {
#ifndef PACKET_TX_USE_SPINLOCK
                sal_mutex_destroy(p_dma_chan->p_mutex);
#else
                sal_spinlock_destroy((sal_spinlock_t*)p_dma_chan->p_mutex);
#endif
            }
            else
            {
                sal_mutex_destroy(p_dma_chan->p_mutex);
            }
            p_dma_chan->p_mutex = NULL;
        }
        if (p_dma_chan->p_desc_check)
        {
            mem_free(p_dma_chan->p_desc_check);
            p_dma_chan->p_desc_check = NULL;
        }
        if(p_dma_chan->p_desc_info)
        {
            mem_free(p_dma_chan->p_desc_info);
            p_dma_chan->p_desc_info= NULL;
        }
        if(p_dma_chan->p_tx_mem_info)
        {
            mem_free(p_dma_chan->p_tx_mem_info);
            p_dma_chan->p_tx_mem_info= NULL;
        }
        if (p_dma_chan->p_data_fifo)
        {
            do{
                len = sal_fifo_get(p_dma_chan->p_data_fifo, (uint8*)&data_addr, sizeof(uint32));
                if(len == sizeof(uint32))
                {
                    logic_addr = SYS_DMA_PHY_TO_LOGIC(lchip, data_addr);
                    SYS_DMA_FREE(lchip, logic_addr);
                }
            }while(len == sizeof(uint32));
            sal_fifo_destroy(p_dma_chan->p_data_fifo);
            p_dma_chan->p_data_fifo = NULL;
        }
    }

    /* From TMM, need free tcan scam task */
    for (index=0; index<= (MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID) + (DRV_FROM_TMM(lchip)?1:0)); index++)
    {
        sync_chan = p_usw_dma_master[lchip]->dma_chan_info[index].sync_chan;
        p_thread_info = ctc_vector_get(p_usw_dma_master[lchip]->p_thread_vector, sync_chan);
        if (p_thread_info)
        {
            if (NULL != p_thread_info->p_sync_sem)
            {
                sal_sem_give(p_thread_info->p_sync_sem);
                sal_task_sleep(1);
                sal_task_destroy(p_thread_info->p_sync_task);
                p_thread_info->p_sync_task = NULL;
                sal_sem_destroy(p_thread_info->p_sync_sem);
                p_thread_info->p_sync_sem = NULL;
            }
        }
    }

    /*free vector data*/
    ctc_vector_traverse(p_usw_dma_master[lchip]->p_thread_vector, (vector_traversal_fn)_sys_usw_dma_free_node_data, NULL);
    ctc_vector_release(p_usw_dma_master[lchip]->p_thread_vector);

    for (index = 0; index <= MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID); index++)
    {
        if (!CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, index))
        {
            continue;
        }
        p_dma_chan = (sys_dma_chan_t*)&p_usw_dma_master[lchip]->dma_chan_info[index];
        if(NULL == p_dma_chan)
        {
            continue;
        }
        if (p_dma_chan->pkt_knet_en)
        {
            continue;
        }
        if(p_dma_chan->mem_base)
        {
            SYS_DMA_FREE(lchip, (void*)p_dma_chan->mem_base);
            p_dma_chan->mem_base = (uintptr)0;
        }
        else
        {
            for (desc_index = 0; desc_index < p_dma_chan->desc_depth; desc_index++)
            {
                p_desc = &p_dma_chan->p_desc[desc_index].desc_info;
                if (p_desc)
                {
                    low_phy_addr = DRV_IS_DUET2(lchip)?GetDsDescEncap2(V, memAddr_f, p_desc):GetDsDescEncap(V, memAddr_f, p_desc);
                    COMBINE_64BITS_DATA(p_usw_dma_master[lchip]->dma_high_addr, low_phy_addr<<4, phy_addr);
                    if (0 != phy_addr)
                    {
                        logic_addr = SYS_DMA_PHY_TO_LOGIC(lchip, phy_addr);
                        SYS_DMA_FREE(lchip, (void*)logic_addr);

                    }
                }
            }
        }
        if (p_dma_chan && (NULL != p_dma_chan->p_desc))
        {
            SYS_DMA_FREE(lchip, p_dma_chan->p_desc);
            p_dma_chan->p_desc = NULL;
        }
    }

    if (p_usw_dma_master[lchip]->p_nonuc_bmask_dma)
    {
        SYS_DMA_FREE(lchip, (void*)p_usw_dma_master[lchip]->p_nonuc_bmask_dma);
    }

    /* free pkt rx fifo */
    sal_fifo_destroy(p_usw_dma_master[lchip]->pkt_thread_fifo);
    p_usw_dma_master[lchip]->pkt_thread_fifo = NULL;

    /*free master*/
    mem_free(p_usw_dma_master[lchip]);

    return CTC_E_NONE;
}

int32
sys_usw_dma_free_task(uint8 lchip)
{
    uint8 sync_chan = 0;
    uint32 index = 0;
    sys_dma_thread_t* p_thread_info = NULL;

    SYS_DMA_INIT_CHECK(lchip);
    for (index=0; index<= MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID) + (DRV_FROM_TMM(lchip)?1:0); index++)
    {
        sync_chan = p_usw_dma_master[lchip]->dma_chan_info[index].sync_chan;
        p_thread_info = ctc_vector_get(p_usw_dma_master[lchip]->p_thread_vector, sync_chan);
        if (p_thread_info)
        {
            if (NULL != p_thread_info->p_sync_sem)
            {
                sal_sem_give(p_thread_info->p_sync_sem);
            }
        }
    }
    return CTC_E_NONE;

}

