/**
 @file sys_duet2_dma.c

 @date 2022-02-08

 @version v1.0

 The file contains AT dma APIs of sys layer
*/

/***************************************************************
 *
 * Header Files
 *
 ***************************************************************/
#include "sal.h"
#include "sal_fifo.h"
#include "dal.h"
#include "sys_usw_common.h"
#include "sys_usw_interrupt.h"
#include "sys_usw_packet.h"
#include "sys_usw_dma.h"
#include "../sys_usw_dma_priv.h"

#define SYS_PKT_U3_OTHER_FROM_CPU 8
#if (HOST_IS_LE == 1 && SDK_WORK_PLATFORM == 0)
#define GetDsDescEncapD2(X, ...)   DRV_GET_FLD(X, DsDescLittle, ##__VA_ARGS__)
#define SetDsDescEncapD2(X, ...)   DRV_SET_FLD(X, DsDescLittle, ##__VA_ARGS__)
#else
#define GetDsDescEncapD2(X, ...) DRV_GET_FLD(X, DsDesc, ##__VA_ARGS__)
#define SetDsDescEncapD2(X, ...) DRV_SET_FLD(X, DsDesc, ##__VA_ARGS__)
#endif

int32
sys_duet2_dma_get_flow_stats_memory(uint8 lchip, uint8 block_id, uint32** p_mem, uint8* p_desc_done)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    DsDesc_m* p_desc = NULL;
    uint32 cur_index = 0;
    uint16 loop_pp_dsc = 0;
    uint16 loop_pp_dep = 0;
    uint64 phy_addr = 0;
#ifdef EMULATION_ENV
    uint8 pp_num = 2;
#else
    uint8 pp_num = SYS_PP_NUM(lchip);
#endif
    uint8 pp_base = SYS_PP_BASE(lchip);

#ifndef CTC_HOT_PLUG_DIS
    SYS_DMA_INIT_CHECK(lchip);
#endif

    p_dma_chan = &p_usw_dma_master[pp_base]->dma_chan_info[SYS_DMA_FLOW_STATS_CHAN_ID];
    for(loop_pp_dsc=0; loop_pp_dsc < MCHIP_CAP(SYS_CAP_STATS_DMA_PP_BLOCK_NUM); loop_pp_dsc++)
    {
        if (block_id != (p_dma_chan->p_desc_info[loop_pp_dsc].value0 & 0xff))
        {
            continue;
        }
        for (loop_pp_dep = 0; loop_pp_dep < pp_num*SYS_DMA_FLOW_STATS_DESC_DEPTH; loop_pp_dep++)
        {
            p_desc = &(p_dma_chan->p_desc[MCHIP_CAP(SYS_CAP_STATS_DMA_PP_BLOCK_NUM)*loop_pp_dep+loop_pp_dsc].desc_info);
            COMBINE_64BITS_DATA(p_usw_dma_master[pp_base]->dma_high_addr,(GetDsDescEncapD2(V, memAddr_f, p_desc)<<4), phy_addr);
            p_mem[cur_index] = SYS_DMA_PHY_TO_LOGIC(lchip, phy_addr);
            if (p_desc_done)
            {
                p_desc_done[cur_index] = GetDsDescEncapD2(V, done_f, p_desc);
            }
            cur_index++;
        }
    }

    return CTC_E_NONE;
}

int32
sys_duet2_dma_get_data_memory(uint8 lchip, uint8 chan_id, uint32 cur_index, uint32 mem_id, uint32 *cfg_size, uint32 **pp_desc_addr, uint32 **pp_logic_addr)
{
    sys_dma_chan_t *p_chan_info = NULL;
    sys_dma_desc_t *p_base_desc = NULL;
    DsDesc_m *p_desc = NULL;
    uint64 phy_addr = 0;
    uint32 cfg_addr = 0;
    uint32 entry_num = 0;
    uint32 per_entry_size = 0;
    uint8 entry_num_per_unit = 0;
    
    SYS_DMA_INIT_CHECK(lchip);

    p_chan_info = (sys_dma_chan_t*)&p_usw_dma_master[lchip]->dma_chan_info[chan_id];
    if (!p_chan_info->chan_en)
    {
        return CTC_E_NOT_INIT;
    }
    p_base_desc = p_chan_info->p_desc;
    p_desc = &(p_base_desc[cur_index].desc_info);

    COMBINE_64BITS_DATA(p_usw_dma_master[lchip]->dma_high_addr, (GetDsDescEncapD2(V, memAddr_f, p_desc) << 4), phy_addr);

    *pp_logic_addr = SYS_DMA_PHY_TO_LOGIC(lchip, phy_addr);
    *pp_desc_addr = (uint32*)(&p_base_desc[cur_index]);

    CTC_ERROR_RETURN(drv_usw_ftm_get_tcam_memory_info(lchip, mem_id, &cfg_addr, &entry_num,  &per_entry_size, NULL));

    entry_num_per_unit = SYS_DMA_TCAM_SCAN_ENTRY_PER_UNIT;

    *cfg_size = (entry_num / entry_num_per_unit + ((entry_num % entry_num_per_unit) ? 1 : 0)) * SYS_DMA_TCAM_SCAN_BYTE_PER_UNIT;

    return CTC_E_NONE;
}

int32
sys_duet2_dma_set_tcam_scan_mode(uint8 lchip, uint8 mode, uint32 timer)
{
    uint32 cmd = 0;
    uint64 timer_ns = 0;
    uint32 timer_v[2] = {0};
    uint32 tmp_time = 0;
    DmaRegRd3TrigCfg_m trigger1_timer;
    DmaRegTrigEnCfg_m trigger_ctl;

    if(mode > 2)
    {
        return CTC_E_INVALID_PARAM;
    }

    if(mode == 0)
    {
        tmp_time = 1;
    }
    else if(mode == 1)
    {
        /*the max value is 60bits ns,so the max timer is (1<<62-1)/1000/1000/1000/60 minutes*/
        /*the max value is 48 bit from TM*/
        if(timer > 76861433)
        {
            return CTC_E_INVALID_PARAM;
        }
        tmp_time = timer*60;
    }

    if (tmp_time)
    {
        timer_ns = (uint64)tmp_time*1000000*1000/DOWN_FRE_RATE;
        timer_v[0] = timer_ns&0xFFFFFFFF;
        timer_v[1] = (timer_ns >> 32) & 0xFFFFFFFF;
        cmd = DRV_IOR(DmaRegRd3TrigCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger1_timer));
        SetDmaRegRd3TrigCfg(A, cfgRegRd3TrigNs_f, &trigger1_timer, timer_v);
        cmd = DRV_IOW(DmaRegRd3TrigCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger1_timer));
    }

    cmd = DRV_IOR(DmaRegTrigEnCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger_ctl));
    SetDmaRegTrigEnCfg(V, cfgRegRd3TrigEn_f, &trigger_ctl, (tmp_time?1:0));
    cmd = DRV_IOW(DmaRegTrigEnCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger_ctl));

    return DRV_E_NONE;
}


int32
sys_duet2_dma_function_pause(uint8 lchip, uint8 chan_id, uint8 en)
{
    DmaCtlDrainEnable_m dma_drain;
    uint32 cmd = 0;
    uint32 value = (en)? 0 : 1;

    SYS_DMA_INIT_CHECK(lchip);
    if (chan_id > MCHIP_CAP(SYS_CAP_DMA_MAX_CHAN_ID))
    {
        return CTC_E_INVALID_PARAM;
    }

    cmd = DRV_IOR(DmaCtlDrainEnable_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &dma_drain));
    switch (GET_CHAN_TYPE(chan_id))
    {
        case DRV_DMA_LEARNING_CHAN_ID:
        case DRV_DMA_BUFFER_CHAN_ID:/* Only TMM */
        case DRV_DMA_LATENCY_CHAN_ID:/* Only TMM */
        case DRV_DMA_EFD_CHAN_ID:/* Only TMM */
        case DRV_DMA_OAM_CHAN_ID:/* Only TMM */
        case DRV_DMA_SC_OAM_CHAN_ID:/* Only TMM */
            SetDmaCtlDrainEnable(V, dmaInfo0DrainEn_f, &dma_drain, value);
            break;

        case DRV_DMA_HASHKEY_CHAN_ID:
            SetDmaCtlDrainEnable(V, dmaInfo1DrainEn_f, &dma_drain, value);
            break;

        case DRV_DMA_IPFIX_CHAN_ID:
            SetDmaCtlDrainEnable(V, dmaInfo2DrainEn_f, &dma_drain, value);
            break;

        case DRV_DMA_SDC_CHAN_ID:
            SetDmaCtlDrainEnable(V,dmaInfo3DrainEn_f, &dma_drain, value);
            break;

        case DRV_DMA_MONITOR_CHAN_ID:
            SetDmaCtlDrainEnable(V,dmaInfo4DrainEn_f, &dma_drain, value);
            break;

        case DRV_DMA_PACKET_RX0_CHAN_ID:
        case DRV_DMA_PACKET_RX1_CHAN_ID:
        case DRV_DMA_PACKET_RX2_CHAN_ID:
        case DRV_DMA_PACKET_RX3_CHAN_ID:
        case DRV_DMA_PACKET_RX4_CHAN_ID:
        case DRV_DMA_PACKET_RX5_CHAN_ID:
        case DRV_DMA_PACKET_RX6_CHAN_ID:
        case DRV_DMA_PACKET_RX7_CHAN_ID:
            value =  GetDmaCtlDrainEnable(V,dmaPktRxDrainEn_f, &dma_drain);
            if (en)
            {
                value &= ~(1 << (chan_id - SYS_DMA_PACKET_RX0_CHAN_ID));
            }
            else
            {
                value |= (1 << (chan_id - SYS_DMA_PACKET_RX0_CHAN_ID));
            }
            SetDmaCtlDrainEnable(V,dmaPktRxDrainEn_f, &dma_drain, value);
            break;

       case DRV_DMA_PACKET_TX0_CHAN_ID:
       case DRV_DMA_PACKET_TX1_CHAN_ID:
       case DRV_DMA_PACKET_TX2_CHAN_ID:
       case DRV_DMA_PACKET_TX3_CHAN_ID:
            value =  GetDmaCtlDrainEnable(V,dmaPktTxDrainEn_f, &dma_drain);
            if (en)
            {
                value &= ~(1 << (chan_id - SYS_DMA_PACKET_TX0_CHAN_ID));
            }
            else
            {
                value |= (1 << (chan_id - SYS_DMA_PACKET_TX0_CHAN_ID));
            }
            SetDmaCtlDrainEnable(V,dmaPktTxDrainEn_f, &dma_drain, value);
            break;

        case DRV_DMA_TBL_RD_CHAN_ID:
        case DRV_DMA_PORT_STATS_CHAN_ID:
        case DRV_DMA_FLOW_STATS_CHAN_ID:
        case DRV_DMA_REG_MAX_CHAN_ID:
        case DRV_DMA_TBL_RD1_CHAN_ID:
        case DRV_DMA_BUF_SCAN_CHAN_ID:
            value =  GetDmaCtlDrainEnable(V,dmaRegRdDrainEn_f, &dma_drain);
            if (en)
            {
                value &= ~(1 << (chan_id - SYS_DMA_TBL_RD_CHAN_ID));
            }
            else
            {
                value |= (1 << (chan_id - SYS_DMA_TBL_RD_CHAN_ID));
            }
            SetDmaCtlDrainEnable(V,dmaRegRdDrainEn_f, &dma_drain, value);
            break;

        case DRV_DMA_TBL_WR_CHAN_ID:
            SetDmaCtlDrainEnable(V, dmaRegWrDrainEn_f, &dma_drain, value);
            break;

        default:
            break;
    }

    cmd = DRV_IOW(DmaCtlDrainEnable_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &dma_drain));

    return CTC_E_NONE;
}

int32
sys_duet2_dma_sync_pkt_rx_stats(uint8 lchip)
{
    uint32 cmd = 0;
    DmaPktRxStats_m stats;
    SYS_DMA_INIT_CHECK(lchip);

    cmd = DRV_IOR(DmaPktRxStats_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &stats));
    p_usw_dma_master[lchip]->dma_stats[0].u1.total_pkt_cnt += GetDmaPktRxStats(V, dmaPktRx0FrameCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[0].u2.total_byte_cnt += GetDmaPktRxStats(V, dmaPktRx0ByteCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[0].u3.drop_cnt      += GetDmaPktRxStats(V, dmaPktRx0DropCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[0].u4.error_cnt    += GetDmaPktRxStats(V, dmaPktRx0ErrorCnt_f, &stats);

    p_usw_dma_master[lchip]->dma_stats[1].u1.total_pkt_cnt += GetDmaPktRxStats(V, dmaPktRx1FrameCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[1].u2.total_byte_cnt += GetDmaPktRxStats(V, dmaPktRx1ByteCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[1].u3.drop_cnt      += GetDmaPktRxStats(V, dmaPktRx1DropCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[1].u4.error_cnt    += GetDmaPktRxStats(V, dmaPktRx1ErrorCnt_f, &stats);

    p_usw_dma_master[lchip]->dma_stats[2].u1.total_pkt_cnt += GetDmaPktRxStats(V, dmaPktRx2FrameCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[2].u2.total_byte_cnt += GetDmaPktRxStats(V, dmaPktRx2ByteCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[2].u3.drop_cnt      += GetDmaPktRxStats(V, dmaPktRx2DropCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[2].u4.error_cnt    += GetDmaPktRxStats(V, dmaPktRx2ErrorCnt_f, &stats);

    p_usw_dma_master[lchip]->dma_stats[3].u1.total_pkt_cnt += GetDmaPktRxStats(V, dmaPktRx3FrameCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[3].u2.total_byte_cnt += GetDmaPktRxStats(V, dmaPktRx3ByteCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[3].u3.drop_cnt      += GetDmaPktRxStats(V, dmaPktRx3DropCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[3].u4.error_cnt    += GetDmaPktRxStats(V, dmaPktRx3ErrorCnt_f, &stats);

    return CTC_E_NONE;
}

int32
sys_duet2_dma_sync_pkt_tx_stats(uint8 lchip)
{
    uint32 cmd = 0;
    DmaPktTxStats_m stats;
    SYS_DMA_INIT_CHECK(lchip);

    sal_memset(&stats, 0, sizeof(DmaPktTxStats_m));
    cmd = DRV_IOR(DmaPktTxStats_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &stats));
    p_usw_dma_master[lchip]->dma_stats[4].u4.bad_byte_cnt += GetDmaPktTxStats(V, dmaPktTx0BadByteCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[4].u3.bad_pkt_cnt    += GetDmaPktTxStats(V, dmaPktTx0BadFrameCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[4].u2.good_byte_cnt  += GetDmaPktTxStats(V, dmaPktTx0GoodByteCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[4].u1.good_pkt_cnt   += GetDmaPktTxStats(V, dmaPktTx0GoodFrameCnt_f, &stats);

    p_usw_dma_master[lchip]->dma_stats[5].u4.bad_byte_cnt += GetDmaPktTxStats(V, dmaPktTx1BadByteCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[5].u3.bad_pkt_cnt    += GetDmaPktTxStats(V, dmaPktTx1BadFrameCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[5].u2.good_byte_cnt  += GetDmaPktTxStats(V, dmaPktTx1GoodByteCnt_f, &stats);
    p_usw_dma_master[lchip]->dma_stats[5].u1.good_pkt_cnt   += GetDmaPktTxStats(V, dmaPktTx1GoodFrameCnt_f, &stats);

    return CTC_E_NONE;
}

int32
sys_duet2_dma_tcam_scan_func(uint8 lchip, uint16 chan)
{
    sys_dma_chan_t* p_dma_chan;
    sys_dma_desc_t* p_base_desc;
    DsDesc_m* p_desc;
    uint32 cur_index;
    uint32 process_cnt = 0;
    drv_ser_dma_tcam_param_t tcam_param;

#ifndef CTC_HOT_PLUG_DIS
    /* init check */
    SYS_DMA_INIT_CHECK(lchip);
#endif
    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[chan];
    DMA_LOCK(p_dma_chan->p_mutex);
    p_base_desc = p_dma_chan->p_desc;

    /*D2/TM: tcam scan dump memory */
    {
        uint8  mode = 0;
        /*uint32 interval = 0;*/
        drv_ser_scan_info_t ecc_scan_info;

        sal_memset(&ecc_scan_info, 0, sizeof(drv_ser_scan_info_t));
        ecc_scan_info.type = DRV_SER_SCAN_TYPE_TCAM;
        drv_ser_get_cfg(lchip, DRV_SER_CFG_TYPE_SCAN_MODE,&ecc_scan_info);
        mode =  ecc_scan_info.mode;
        /*interval =  ecc_scan_info.scan_interval;*/

        if(mode == 0)
        {
            sys_duet2_dma_set_tcam_scan_mode(lchip, 2, 0);
        }
    }

    for (cur_index = p_dma_chan->current_index; cur_index < p_dma_chan->desc_depth; cur_index++)
    {
        p_desc = &(p_base_desc[cur_index].desc_info);
        SYS_USW_DMA_CACHE_INVALID(lchip, p_desc, sizeof(DsDesc_m));

        if (0 == GetDsDescEncapD2(V, done_f, p_desc))
        {
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "No desc is not done, processed %d desc\n", process_cnt);
#endif
            break;
        }

        process_cnt++;

        SYS_USW_DMA_CACHE_INVALID(lchip, p_dma_chan->p_desc_info[cur_index].data_addr, p_dma_chan->p_desc_info[cur_index].value1);
        sal_memset(&tcam_param, 0, sizeof(tcam_param));

        GetDsDescEncapD2(A, timestamp_f, p_desc, &(tcam_param.time_stamp));
        tcam_param.mem_id = p_dma_chan->p_desc_info[cur_index].value0 >> 8;
        tcam_param.sub_mem_id = p_dma_chan->p_desc_info[cur_index].value0&0xFF;
        tcam_param.p_memory = p_dma_chan->p_desc_info[cur_index].data_addr;
        tcam_param.entry_size = p_dma_chan->p_desc_info[cur_index].value1;
        drv_ser_set_cfg(lchip, DRV_SER_CFG_TYPE_DMA_RECOVER_TCAM, &tcam_param);

        SetDsDescEncapD2(V, done_f, p_desc, 0);
        #if(1 == SDK_WORK_PLATFORM)
            SetDsDescEncapD2(V, realSize_f, p_desc, 0);
        #endif

        SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(DsDesc_m));
    }
    if(process_cnt)
    {
        SYS_USW_DMA_CLEAR_MULTI_DESC_WITH_IO(lchip, p_dma_chan, process_cnt);
        p_dma_chan->current_index = ((p_dma_chan->current_index + process_cnt) % (p_dma_chan->desc_depth));
    }

    DMA_UNLOCK(p_dma_chan->p_mutex);

    return CTC_E_NONE;
}

STATIC INLINE int32
_sys_duet2_dma_wait_desc_finish(uint8 lchip, DsDesc_m* p_tx_desc_mem,sys_dma_chan_t* p_dma_chan)
{
    uint32  cnt = 0;
    int32   ret = CTC_E_NONE;
    bool    done = FALSE;

#ifdef EMULATION_ENV
    while(cnt < 10000)
#else
    while(cnt < 100)
#endif
    {
    #ifndef CTC_HOT_PLUG_DIS
        SYS_DMA_INIT_CHECK(lchip);
    #endif
        SYS_USW_DMA_CACHE_INVALID(lchip, p_tx_desc_mem, sizeof(DsDesc_m));
        if (GetDsDescEncapD2(V, done_f, p_tx_desc_mem))
        {
            done = TRUE;
            break;
            /* last transmit is done */
        }
    #ifndef PACKET_TX_USE_SPINLOCK
        sal_task_sleep(1);
    #else
        sal_udelay(1000);
    #endif
        cnt++;
    }

    if (!done)
    {
#ifdef DMA_DBG_ON
       SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "last transmit is not done,%d\n", p_dma_chan->current_index);
#endif
       ret = CTC_E_DMA;
    }
    return ret;
}

int32
sys_duet2_dma_pkt_tx_array(uint8 lchip, ctc_pkt_tx_t** p_pkt_tx_array, uint32 count)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    uint32 pkt_idx = 0;
    DsDesc_m* p_tx_desc_mem;    
    ctc_pkt_tx_t* p_pkt_tx = NULL;
    uint64 phy_addr;
    uint32 cur_index = 0;
    uint16 pkt_len = 0;
    uint16 desc_cnt = 0;
    uint32 cmd = 0;
    uint32 vld_num = 0;
    uint32 tmp_index = 0;
    uint32 intr_vec[SYS_DMA_INTR_VEC] = {0};
    uint8 is_empty = 0;

    /*1. get dma chan info, use SYS_DMA_PACKET_TX1_CHAN_ID defaultly */
    p_dma_chan = (sys_dma_chan_t*)&(p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID]);
    if (count > p_dma_chan->desc_depth)
    {
#ifdef DMA_DBG_ON
       SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, "Count is more than desc-depth %u, not support\n", p_dma_chan->desc_depth);
#endif
        return CTC_E_NOT_SUPPORT;
    }
    cur_index = p_dma_chan->current_index;

    /*2. wait the end descriptor that current tx will use done  */
    tmp_index = (cur_index+count);
    tmp_index = (tmp_index >= p_dma_chan->desc_depth) ?(tmp_index-p_dma_chan->desc_depth):tmp_index;
    p_tx_desc_mem = &(p_dma_chan->p_desc[tmp_index].desc_info);
    CTC_ERROR_RETURN(_sys_duet2_dma_wait_desc_finish(lchip, p_tx_desc_mem, p_dma_chan));

    /*3. configure desc one by one*/
    for (pkt_idx = 0; pkt_idx < count; pkt_idx++)
    {
        if (p_dma_chan->p_desc_check[cur_index].pool_id)
        {
            uint32 data_address = 0;
            sal_fifo_t* p_data_fifo = NULL;
            /*recycle data address*/
            p_data_fifo =  p_usw_dma_master[lchip]->dma_chan_info[p_dma_chan->p_desc_check[cur_index].pool_id-1].p_data_fifo;
            if (NULL != p_data_fifo)
            {
                is_empty = sal_fifo_len(p_data_fifo) == 0;
                data_address = p_dma_chan->p_desc_check[cur_index].phy_address;
                sal_fifo_put(p_data_fifo, (uint8*)&data_address, sizeof(uint32));
                if (is_empty)
                {/* if buffer pool is empty before putting , need trigger DMA interrupt affer putting for rx process */
                    intr_vec[0] = 1<<(p_dma_chan->p_desc_check[cur_index].pool_id-1);/* rx channel id = pool_id - 1 */
                    cmd = DRV_IOW(DmaCtlIntrFunc_t, DRV_ENTRY_FLAG);
                    CTC_ERROR_RETURN(DRV_IOCTL(lchip, INTR_INDEX_VAL_SET, cmd, intr_vec));
                }
            }
        }

        p_tx_desc_mem = &(p_dma_chan->p_desc[cur_index].desc_info);

        p_pkt_tx = p_pkt_tx_array[pkt_idx];

        /* use zero copy mode defaultly, skb.head is dma logic address */
        phy_addr = (p_pkt_tx->l2p_addr_func) ? p_pkt_tx->l2p_addr_func((void*)p_pkt_tx->skb.head, p_pkt_tx->l2p_user_data):\
            SYS_DMA_LOGIC_TO_PHY(lchip, (void*)p_pkt_tx->skb.head);
        pkt_len = p_pkt_tx->skb.len+SYS_USW_PKT_HEADER_LEN;

        SetDsDescEncapD2(V, u1_pkt_eop_f, p_tx_desc_mem, 1);
        SetDsDescEncapD2(V, u1_pkt_sop_f, p_tx_desc_mem, 1);
        SetDsDescEncapD2(V, cfgSize_f, p_tx_desc_mem, pkt_len);
        SetDsDescEncapD2(V, memAddr_f, p_tx_desc_mem, (phy_addr >> 4));
        SetDsDescEncapD2(V, done_f, p_tx_desc_mem, 0);
        SetDsDescEncapD2(V, realSize_f, p_tx_desc_mem, pkt_len-SYS_USW_PKT_HEADER_LEN);
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_tx_desc_mem, sizeof(DsDesc_m));

        desc_cnt++;

        /* store phy_address & pool_id in db to recycle fifo when tx done */
        p_dma_chan->p_desc_check[cur_index].phy_address = phy_addr;
        p_dma_chan->p_desc_check[cur_index].pool_id = p_pkt_tx->skb.pool_id;

        cur_index++;
        if (cur_index >= p_dma_chan->desc_depth)
        {
            cur_index = 0;
        }
    }

    /*4. update current index*/
    p_dma_chan->current_index = cur_index;

    /*5. update desc vld_num */
#if (0 == SDK_WORK_PLATFORM)
    vld_num = desc_cnt;
#else
    {
        uint32 valid_cnt = 0;
        cmd = DRV_IOR(DmaCtlTab_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_dma_chan->channel_id, cmd, &vld_num));
        valid_cnt = GetDmaCtlTab(V, vldNum_f, &vld_num);
        valid_cnt += desc_cnt;
        SetDmaCtlTab(V, vldNum_f, &vld_num, valid_cnt);
    }
#endif
    cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_dma_chan->channel_id, cmd, &vld_num));

    return CTC_E_NONE;
}

int32
_sys_duet2_dma_do_packet_tx(uint8 lchip, ctc_pkt_tx_t* p_pkt_tx, sys_dma_chan_t* p_dma_chan)
{
    DsDesc_m* p_tx_desc_mem;
    sys_dma_tx_mem_t* p_tx_mem_info;
    uint64 phy_addr;
    int32 ret = 0;
    uint32 cur_index = p_dma_chan->current_index;
    uint16 data_size = p_dma_chan->data_size;
    uint16 left_pkt_len = p_pkt_tx->skb.len+SYS_USW_PKT_HEADER_LEN;
    uint16 pkt_len_offset = 0;
    uint16 pkt_len = 0;
    uint8 is_eop = 0;
    uint8 desc_cnt = 0;
    uint32 desc_depth = p_dma_chan->desc_depth;
    uint32 cmd = 0;
    uint32 vld_num = 0;
    uint32 last_index = cur_index;
    uint32 intr_vec[SYS_DMA_INTR_VEC] = {0};
    uint8 is_empty = 0;

    do
    {
        if (cur_index >= desc_depth)
        {
            cur_index = 0;
        }

        /*1. Free last & old tx info*/
        p_tx_desc_mem = &(p_dma_chan->p_desc[cur_index].desc_info);
        p_tx_mem_info = p_dma_chan->p_tx_mem_info + cur_index;

        ret = _sys_duet2_dma_wait_desc_finish(lchip, p_tx_desc_mem, p_dma_chan);
        if (ret == CTC_E_NONE && p_dma_chan->p_desc_check[cur_index].pool_id)
        {
            uint32 data_address = 0;
            sal_fifo_t* p_data_fifo = NULL;
            /*recycle data address*/
            p_data_fifo =  p_usw_dma_master[lchip]->dma_chan_info[p_dma_chan->p_desc_check[cur_index].pool_id-1].p_data_fifo;
            if (NULL != p_data_fifo)
            {
                is_empty = sal_fifo_len(p_data_fifo) == 0;
                data_address = p_dma_chan->p_desc_check[cur_index].phy_address;
                sal_fifo_put(p_data_fifo, (uint8*)&data_address, sizeof(uint32));
                if (is_empty)
                {/* if buffer pool is empty before putting , need trigger DMA interrupt affer putting for rx process */
                    intr_vec[0] = 1<<(p_dma_chan->p_desc_check[cur_index].pool_id-1);/* rx channel id = pool_id - 1 */
                    cmd = DRV_IOW(DmaCtlIntrFunc_t, DRV_ENTRY_FLAG);
                    CTC_ERROR_RETURN(DRV_IOCTL(lchip, INTR_INDEX_VAL_SET, cmd, intr_vec));
                }
            }
        }
        if (p_tx_mem_info->callback)
        {
            p_tx_mem_info->callback(p_tx_mem_info->p_pkt_addr, p_tx_mem_info->user_data);
        }
        if (ret != CTC_E_NONE)
        {
            return ret;
        }

        p_tx_mem_info->callback = p_pkt_tx->callback;
        p_tx_mem_info->user_data = p_pkt_tx->user_data;
        p_tx_mem_info->p_pkt_addr = p_tx_mem_info->callback ? p_pkt_tx->skb.data : NULL;
        if(p_pkt_tx->tx_info.flags & CTC_PKT_FLAG_ZERO_COPY)
        {
            is_eop = 1;/*zero copy must only use one desc*/
            phy_addr = (p_pkt_tx->l2p_addr_func) ? p_pkt_tx->l2p_addr_func((void*)p_pkt_tx->skb.head, p_pkt_tx->l2p_user_data):\
                SYS_DMA_LOGIC_TO_PHY(lchip, (void*)p_pkt_tx->skb.head);

            if(p_pkt_tx->l2p_addr_func && (p_usw_dma_master[lchip]->dma_high_addr != (phy_addr>>32) || (0 != (phy_addr&0xF))))
            {
                return CTC_E_INVALID_PARAM;
            }
            pkt_len = left_pkt_len;
            left_pkt_len = 0;
        }
        else
        {
            void* new_addr = p_tx_mem_info->p_mem_addr;
            if(p_pkt_tx->skb.len< SYS_USW_PKT_MIN_PKT_LEN )
            {
                sal_memcpy((uint8*)new_addr, p_pkt_tx->skb.head, p_pkt_tx->skb.len+SYS_USW_PKT_HEADER_LEN);
                sal_memset((uint8*)new_addr + p_pkt_tx->skb.len + SYS_USW_PKT_HEADER_LEN, 0,SYS_USW_PKT_MIN_PKT_LEN - p_pkt_tx->skb.len);
                is_eop = 1;
                pkt_len = SYS_USW_PKT_MIN_PKT_LEN+SYS_USW_PKT_HEADER_LEN;
            }
            else
            {
                is_eop = (left_pkt_len<= data_size)?1:0;
                pkt_len = is_eop?left_pkt_len:data_size;
                sal_memcpy((uint8*)new_addr, p_pkt_tx->skb.head+pkt_len_offset, pkt_len);
            }
            SYS_USW_DMA_CACHE_FLUSH(lchip, new_addr, pkt_len);

            if (0 == is_eop)
            {
                pkt_len_offset += data_size;
                left_pkt_len = (left_pkt_len > data_size)?(left_pkt_len-data_size):0;
            }
            else
            {
                pkt_len_offset = 0;
                left_pkt_len = 0;
            }
            phy_addr = SYS_DMA_LOGIC_TO_PHY(lchip, new_addr);
        }

        SetDsDescEncapD2(V, u1_pkt_eop_f, p_tx_desc_mem, 1);
        SetDsDescEncapD2(V, u1_pkt_sop_f, p_tx_desc_mem, 1);
        SetDsDescEncapD2(V, cfgSize_f, p_tx_desc_mem, pkt_len);
        SetDsDescEncapD2(V, memAddr_f, p_tx_desc_mem, (phy_addr >> 4));
        SetDsDescEncapD2(V, done_f, p_tx_desc_mem, 0);
        SetDsDescEncapD2(V, realSize_f, p_tx_desc_mem, pkt_len-SYS_USW_PKT_HEADER_LEN);
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_tx_desc_mem, sizeof(DsDesc_m));

        /* store phy_address & pool_id in db to recycle fifo when tx done */
        p_dma_chan->p_desc_check[cur_index].phy_address = phy_addr;
        p_dma_chan->p_desc_check[cur_index].pool_id = p_pkt_tx->skb.pool_id;

        desc_cnt++;
        cur_index++;

        if(is_eop)
        {
            #if (0 == SDK_WORK_PLATFORM)
            vld_num = desc_cnt;
            #else
            {
                uint32 valid_cnt = 0;
                cmd = DRV_IOR(DmaCtlTab_t, DRV_ENTRY_FLAG);
                CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_dma_chan->channel_id, cmd, &vld_num));
                valid_cnt = GetDmaCtlTab(V, vldNum_f, &vld_num);
                valid_cnt += 1;
                SetDmaCtlTab(V, vldNum_f, &vld_num, valid_cnt);
            }
            #endif
            cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
            CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_dma_chan->channel_id, cmd, &vld_num));

            if(!p_pkt_tx->callback && (p_pkt_tx->tx_info.flags & CTC_PKT_FLAG_ZERO_COPY) && 0 == p_dma_chan->p_desc_check[last_index].pool_id)
            {
                ret = _sys_duet2_dma_wait_desc_finish(lchip, p_tx_desc_mem, p_dma_chan);
                if (ret != CTC_E_NONE)
                {
                    return ret;
                }
                SetDsDescEncapD2(V, memAddr_f, p_tx_desc_mem, 0);
            }
            /* next descriptor, tx_desc_index: 0~tx_desc_num-1*/
            p_dma_chan->current_index = cur_index% desc_depth;
        }
    }while(left_pkt_len>0);
    return ret;
}

int32
sys_duet2_dma_pkt_tx(uint8 lchip, ctc_pkt_tx_t* p_pkt_tx)
{
    int32 ret = CTC_E_NONE;
    sys_dma_chan_t* p_dma_chan = NULL;
    uint8 dma_en_tx0 = 0;
    uint8 dma_en_tx1 = 0;
    uint8 chan_idx = 0;

    SYS_DMA_INIT_CHECK(lchip);

   /* packet length check */
    dma_en_tx0 = CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_PACKET_TX0_CHAN_ID);
    dma_en_tx1 = CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_PACKET_TX1_CHAN_ID);
    if (!dma_en_tx0 || (!dma_en_tx1 && !p_usw_dma_master[lchip]->pkt_tx_timer_en))
    {
#ifdef DMA_DBG_ON
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, "Dma Packet Tx  Function is not enabled!!!\n");
#endif
        return CTC_E_NOT_SUPPORT;
    }

    /* use which channel should by the packet's priority */
    chan_idx = (p_pkt_tx->tx_info.priority <= SYS_DMA_RING_SELECT_PRIROITY || p_usw_dma_master[lchip]->pkt_tx_timer_en)?0:1;
    p_dma_chan = (sys_dma_chan_t*)&(p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX0_CHAN_ID + chan_idx]);

    DMA_TX_LOCK(p_dma_chan->p_mutex);
    ret = _sys_duet2_dma_do_packet_tx(lchip, p_pkt_tx, p_dma_chan);
    DMA_TX_UNLOCK(p_dma_chan->p_mutex);

    return ret;

}

STATIC INLINE int32
_sys_duet2_dma_pkt_rx_func(uint8 lchip, uint8 chan)
{
    ctc_pkt_buf_t pkt_buf[64];
    ctc_pkt_rx_t pkt_rx;
    ctc_pkt_rx_t* p_pkt_rx = &pkt_rx;
    sys_dma_chan_t* p_dma_chan;
    sys_dma_desc_t* p_base_desc;
    DsDesc_m* p_desc;
    uint32 cur_index;
    uint32 buf_count = 0;
    uint64 phy_addr;
    uint32 process_cnt = 0;
    uint32 is_sop;
    uint32 is_eop;
    uint8 need_eop = 0;
    uint32 wait_cnt = 0;
    uint32 desc_count = 0;
    uint32 desc_depth = 0;
	uint8 data_fifo_valid = 0;

    sal_memset(p_pkt_rx, 0, sizeof(ctc_pkt_rx_t));
    p_pkt_rx->mode = CTC_PKT_MODE_DMA;
    p_pkt_rx->pkt_buf = pkt_buf;
    p_pkt_rx->lchip = lchip;

#ifndef CTC_HOT_PLUG_DIS
    /* init check */
    SYS_DMA_INIT_CHECK(lchip);
#endif

    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[chan];
    p_base_desc = p_dma_chan->p_desc;
    cur_index = p_dma_chan->current_index;
    desc_depth = p_dma_chan->desc_depth;
    data_fifo_valid = p_dma_chan->p_data_fifo?1:0;

    for (;; cur_index++)
    {
#ifndef CTC_HOT_PLUG_DIS
        SYS_DMA_INIT_CHECK(lchip);

        if (sys_usw_chip_check_active(lchip))
        {
            break;
        }
#endif

        if (cur_index >= desc_depth)
        {
            cur_index = 0;
        }

        p_desc = &(p_base_desc[cur_index].desc_info);
        if (0 == GetDsDescEncapD2(V, done_f, p_desc))
        {
            if (need_eop)
            {
#ifdef DMA_DBG_ON
                 SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "Desc not done, But need eop!!desc index %d\n", cur_index);
#endif

                while(wait_cnt < 0xffff)
                {
                    SYS_USW_DMA_CACHE_INVALID(lchip, p_base_desc, sizeof(sys_dma_desc_t)*desc_depth);
                    if (GetDsDescEncapD2(V, done_f, p_desc))
                    {
                        break;
                    }
                    wait_cnt++;
                }

                /* Cannot get EOP, means no EOP packet error, just clear desc*/
                if (wait_cnt >= 0xffff)
                {
#ifdef DMA_DBG_ON
                    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "No EOP, desc index %d, buf_count %d\n", cur_index, buf_count);
#endif
                    if(desc_count)  SYS_USW_DMA_CLEAR_MULTI_DESC_WITH_IO(lchip, p_dma_chan,desc_count);
                    desc_count = 0;
                   buf_count = 0;
                   need_eop = 0;
                   break;
                }
                wait_cnt = 0;
            }
            else
            {
#ifdef DMA_DBG_ON
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "No desc is not done, processed %d desc index %d\n", process_cnt, cur_index);
#endif
                if(desc_count)  SYS_USW_DMA_CLEAR_MULTI_DESC_WITH_IO(lchip, p_dma_chan, desc_count);
                desc_count = 0;
                break;
            }
        }

        is_sop = GetDsDescEncapD2(V, u1_pkt_sop_f, p_desc);
        is_eop = GetDsDescEncapD2(V, u1_pkt_eop_f, p_desc);
        if (is_sop)
        {
            /*Before get EOP, next packet SOP come, no EOP packet error, drop error packet */
            if (need_eop)
            {
                buf_count = 0;
            }
            p_pkt_rx->pkt_len = 0;
            need_eop = 1;
        }

        /* Cannot get SOP, means no SOP packet error, just clear desc*/
        if (0 == buf_count)
        {
            if (0 == is_sop)
            {
#ifdef DMA_DBG_ON
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "[DMA] PKT RX error, lchip %d chan %d index %d first is not SOP\n", lchip, chan, cur_index);
#endif
                goto error_proc;
            }
        }

        p_pkt_rx->pkt_buf[buf_count].data = (uint8 *)(p_dma_chan->p_desc_info[cur_index].data_addr);
        p_pkt_rx->pkt_buf[buf_count].len = GetDsDescEncapD2(V, realSize_f, p_desc);
        p_pkt_rx->pkt_len += p_pkt_rx->pkt_buf[buf_count].len;

        if (data_fifo_valid)
        {
            uint32 new_address = 0;
            uint32 len = 0;
            
            p_pkt_rx->pkt_buf[buf_count].pool_id = (chan+1);
            /*step1:alloc new data address*/
            len = sal_fifo_get(p_dma_chan->p_data_fifo, (unsigned char*)&new_address, sizeof(uint32));
            if (len < sizeof(uint32))
            {
                if(desc_count)  SYS_USW_DMA_CLEAR_MULTI_DESC_WITH_IO(lchip, p_dma_chan, desc_count);
                goto error_proc2;
            }
        
            /*step2:set new address to desc*/
            SetDsDescEncapD2(V, memAddr_f, p_desc, new_address >> 4);

            /*step3:update logic address in db*/
            COMBINE_64BITS_DATA(p_usw_dma_master[lchip]->dma_high_addr, new_address, phy_addr);
            p_dma_chan->p_desc_info[cur_index].data_addr = SYS_DMA_PHY_TO_LOGIC(lchip, phy_addr);
        }

        /*Max desc num for one packet is 64, so, % 64*/
        buf_count = ((buf_count+1) & 0x3f);

        if (is_eop)
        {
            p_pkt_rx->buf_count = buf_count;
            p_pkt_rx->dma_chan = chan;
            SYS_DMA_CB_IN_CNT_ADD(lchip, chan);
            p_usw_dma_master[lchip]->dma_rx_cb(p_pkt_rx);
            SYS_DMA_CB_OUT_CNT_ADD(lchip, chan);
            process_cnt += buf_count;
            buf_count = 0;
            need_eop = 0;
        }

error_proc:
        SetDsDescEncapD2(V, done_f, p_desc, 0);
        SetDsDescEncapD2(V, reserved0_f, p_desc, 0);
        #if(1 == SDK_WORK_PLATFORM)
            /*Uml need clear realsize*/
            SetDsDescEncapD2(V, realSize_f, p_desc, 0);
        #endif
        desc_count++;
        if(desc_count >= p_dma_chan->threshold && (!need_eop))
        {
            SYS_USW_DMA_CLEAR_MULTI_DESC_WITH_IO(lchip, p_dma_chan, desc_count);
            desc_count = 0;
            if(process_cnt >= desc_depth )
            {
                cur_index++;
                break;
            }
        }
    }

error_proc2:
#ifndef CTC_HOT_PLUG_DIS
    SYS_DMA_INIT_CHECK(lchip);
#endif
    p_dma_chan->current_index = (cur_index>=desc_depth)?(cur_index%desc_depth):cur_index;

    return CTC_E_NONE;
}

int32
sys_duet2_dma_pkt_rx(uint8 lchip, uint8 chan_id, void* thread_info)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    uint32 intr_vec[SYS_DMA_INTR_VEC] = {0};
    int32 ret = CTC_E_NONE;

    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[chan_id];

    if (p_dma_chan->pkt_knet_en)
    {
        /* release mask channel isr */
        SYS_USW_DMA_INTR_ENABLE(lchip, DmaCtlIntrFunc_t, intr_vec, chan_id);
        return CTC_E_NONE;
    }

    ret = _sys_duet2_dma_pkt_rx_func(lchip, chan_id);
#ifndef CTC_HOT_PLUG_DIS
    if (ret == CTC_E_NOT_INIT)
    {
        return CTC_E_NOT_INIT;
    }
#endif

    /* release mask channel isr */
    SYS_USW_DMA_INTR_ENABLE(lchip, DmaCtlIntrFunc_t, intr_vec, chan_id);

    return ret;
}

STATIC INLINE int32
_sys_duet2_dma_get_tx_session_from_desc_index(uint8 lchip, uint32 desc_idx, uint16* p_id)
{
    uint16 index = 0;
    uint8 find_flag = 0;

    for (index = 0; index < SYS_PKT_MAX_TX_SESSION; index++)
    {
        if ((p_usw_dma_master[lchip]->tx_session[index].desc_idx == desc_idx) && p_usw_dma_master[lchip]->tx_session[index].state)
        {
            find_flag = 1;
            *p_id = index;
            break;
        }
    }

    return (find_flag?CTC_E_NONE:CTC_E_NOT_EXIST);
}

/*timer uinit is s, 0 means disable*/
int32
sys_duet2_dma_set_pkt_timer(uint8 lchip, uint32 timer, uint8 enable)
{
    uint32 cmd = 0;
    DmaStaticInfo_m static_info;
    uint32 session_num = 0;
    DmaCtlTab_m tab_ctl;
    uint32 timer_v[2] = {0};
    DmaRegWrTrigCfg_m wr_timer;
    uint32 value_en = 0;
    uint64 timer_l = 0;
    uint8 tx_session_chan = 0;

    tx_session_chan = SYS_DMA_PACKET_TX1_CHAN_ID;
    if (!p_usw_dma_master[lchip]->pkt_tx_timer_en || !p_usw_dma_master[lchip]->dma_chan_info[tx_session_chan].p_desc)
    {
        return CTC_E_INVALID_CONFIG;
    }

    if (enable == TRUE)
    {
        uint32 valid_num = 0;

        if (!p_usw_dma_master[lchip]->dma_chan_info[tx_session_chan].desc_num)
        {
            return CTC_E_NOT_READY;
        }

        p_usw_dma_master[lchip]->tx_timer = timer;

        cmd = DRV_IOR(DmaCtlTab_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_TBL_WR_CHAN_ID, cmd, &tab_ctl));
        valid_num = GetDmaCtlTab(V, vldNum_f, &tab_ctl);
        if (p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].desc_num > valid_num)
        {
            SetDmaCtlTab(V, vldNum_f, &tab_ctl, (p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].desc_num-valid_num));
            cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
            CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_TBL_WR_CHAN_ID, cmd, &tab_ctl));
        }

        cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_PACKET_TX1_CHAN_ID, cmd, &static_info));
        SetDmaStaticInfo(V, chanEn_f, &static_info, 1);
        cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_PACKET_TX1_CHAN_ID, cmd, &static_info));
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].chan_en = 1;
    
        cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_TBL_WR_CHAN_ID, cmd, &static_info));
        SetDmaStaticInfo(V, chanEn_f, &static_info, 1);
        cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_TBL_WR_CHAN_ID, cmd, &static_info));
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].chan_en = 1;

        /*cfg real ring depth*/
        session_num = p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].desc_depth;
        cmd = DRV_IOW(DmaStaticInfo_t, DmaStaticInfo_ringDepth_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, SYS_DMA_PACKET_TX1_CHAN_ID, cmd, &session_num));

        /*cfg wr register trigger interrupt per 255 desc*/
        session_num = p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].desc_depth;
        value_en = (session_num<255)?session_num:255;
        cmd = DRV_IOW(DmaRegWrIntrCntCfg_t, DmaRegWrIntrCntCfg_cfgRegWrIntrCnt_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value_en));

        session_num = p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].desc_depth;
        timer_l = (uint64)timer*1000000/session_num; /*uints is ms*/
        timer_v[0] = timer_l&0xFFFFFFFF;
        timer_v[1] = (timer_l >> 32) & 0xFFFFFFFF;
        cmd = DRV_IOR(DmaRegWrTrigCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &wr_timer));
        SetDmaRegWrTrigCfg(A, cfgRegWrTrigNs_f, &wr_timer, timer_v);
        cmd = DRV_IOW(DmaRegWrTrigCfg_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &wr_timer));
        value_en = 1;
    }
    else
    {
        cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_PACKET_TX1_CHAN_ID, cmd, &static_info));
        SetDmaStaticInfo(V, chanEn_f, &static_info, 0);
        cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_PACKET_TX1_CHAN_ID, cmd, &static_info));
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].chan_en = 0;

        cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_TBL_WR_CHAN_ID, cmd, &static_info));
        SetDmaStaticInfo(V, chanEn_f, &static_info, 0);
        cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_TBL_WR_CHAN_ID, cmd, &static_info));
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].chan_en = 0;
        value_en = 0;
    }

    cmd = DRV_IOW(DmaRegTrigEnCfg_t, DmaRegTrigEnCfg_cfgRegWrTrigEn_f);
    CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value_en));

    cmd = DRV_IOW(DmaRegIntrEnCfg_t, DmaRegIntrEnCfg_cfgRegWrDmaIntrEn_f);
    CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value_en));

    return CTC_E_NONE;

}

int32
sys_duet2_dma_set_session_pkt(uint8 lchip, uint16 session_id, ctc_pkt_tx_t* p_pkt)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    DsDesc_m* p_sys_desc = NULL;
    uint64 phy_addr = 0;
    void*  p_mem_addr = NULL;
    uint32 desc_idx = 0;
    uint8 tx_enable = 0;
    uint32 max_session = 0;
    uint32 phy_addr_low = 0;
    uint8 is_update = 0;
    uint32 cmd = 0;
    uint32 value = 0;
    uint32 tx_start = 0;
    uint16 last_session_id = 0;
    uint8 is_remove = 0;
    uint8 tx_session_chan = 0;

    tx_session_chan = SYS_DMA_PACKET_TX1_CHAN_ID;
    if (!p_usw_dma_master[lchip]->pkt_tx_timer_en || !p_usw_dma_master[lchip]->dma_chan_info[tx_session_chan].p_desc)
    {
        return CTC_E_INVALID_CONFIG;
    }

    tx_enable = !CTC_FLAG_ISSET(p_pkt->tx_info.flags, CTC_PKT_FLAG_SESSION_PENDING_EN);
    max_session = p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].desc_depth;

    /*new session check session ID*/
    if (tx_enable && !p_usw_dma_master[lchip]->tx_session[session_id].state &&
        (p_usw_dma_master[lchip]->dma_chan_info[tx_session_chan].desc_num >= max_session))
    {
        return CTC_E_INVALID_PARAM;
    }

    if (p_pkt->skb.len > p_usw_dma_master[lchip]->dma_chan_info[tx_session_chan].data_size-SYS_USW_PKT_HEADER_LEN)
    {
        return CTC_E_INVALID_PARAM;
    }

    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[tx_session_chan];

    p_mem_addr = SYS_DMA_ALLOC(lchip, (p_pkt->skb.len + SYS_USW_PKT_HEADER_LEN), 0);
    if (NULL == p_mem_addr)
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
        return CTC_E_NO_MEMORY;
    }
    sal_memset(p_mem_addr, 0, (p_pkt->skb.len + SYS_USW_PKT_HEADER_LEN));
    phy_addr = SYS_DMA_LOGIC_TO_PHY(lchip, p_mem_addr);
    phy_addr_low = phy_addr&0xFFFFFFFF;

    sal_memcpy((uint8*)p_mem_addr, p_pkt->skb.head, p_pkt->skb.len + SYS_USW_PKT_HEADER_LEN);


    cmd = DRV_IOR(DmaStaticInfo_t, DmaStaticInfo_chanEn_f);
    CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, SYS_DMA_PACKET_TX1_CHAN_ID, cmd, &tx_start));
    if (tx_enable)
    {
        if (p_usw_dma_master[lchip]->tx_session[session_id].state)
        {
            /*session already tx enable, update desc info*/
            desc_idx = p_usw_dma_master[lchip]->tx_session[session_id].desc_idx;
        }
        else
        {
            /*new added session, cfg desc info*/
            desc_idx = p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].desc_depth;
        }
        p_sys_desc = &(p_dma_chan->p_desc[desc_idx].desc_info);
        SetDsDescEncapD2(V, memAddr_f, p_sys_desc, (phy_addr_low >> 4));
        SetDsDescEncapD2(V, cfgSize_f, p_sys_desc, (p_pkt->skb.len + SYS_USW_PKT_HEADER_LEN));
        SetDsDescEncapD2(V, u1_pkt_eop_f, p_sys_desc, 1);
        SetDsDescEncapD2(V, u1_pkt_sop_f, p_sys_desc, 1);
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_sys_desc, sizeof(DsDesc_m));

        if (!p_usw_dma_master[lchip]->tx_session[session_id].state)
        {
            p_usw_dma_master[lchip]->tx_session[session_id].desc_idx = desc_idx;
            p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].desc_num++;
            p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].desc_depth++;
        }
    }
    else if (p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].desc_num &&
        p_usw_dma_master[lchip]->tx_session[session_id].state)
    {
        /*remove tx session, tx timer already start, need re-order dma desc*/
        CTC_ERROR_RETURN(_sys_duet2_dma_get_tx_session_from_desc_index(lchip,
            (p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].desc_num-1), &last_session_id));
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].desc_num--;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].desc_depth--;
        is_remove = 1;
    }
    is_update = (p_usw_dma_master[lchip]->tx_session[session_id].state != tx_enable) && tx_start;
    if (is_update)
    {
        uint32 timer_v[2] = {0};
        DmaRegWrTrigCfg_m wr_timer;
        uint64 timer_l = 0;
        uint16 cnt = 0;
        uint8 clear_done = 0;

        /*1. disable table wr trigger */
        value = 0;
        cmd = DRV_IOW(DmaRegTrigEnCfg_t, DmaRegTrigEnCfg_cfgRegWrTrigEn_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value));

        /*2. confirm tx channel valid num is 0*/
        value = 0;
        do
        {
            cmd = DRV_IOR(DmaCtlTab_t, DmaCtlTab_vldNum_f);
            CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, SYS_DMA_PACKET_TX1_CHAN_ID, cmd, &value));
            cnt++;
        } while(value&&(cnt<0xffff));

        cmd = DRV_IOR(DmaCtlTab_t, DmaCtlTab_vldNum_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, SYS_DMA_PACKET_TX1_CHAN_ID, cmd, &value));
        if (value)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Tx timer channel cannot clear cnt:%u\n", cnt);
            return CTC_E_DMA;
        }

        /*3. clear dma channel dynamic info*/
        value = (1 << SYS_DMA_PACKET_TX1_CHAN_ID);
        cmd = DRV_IOW(DmaClearCtl_t, DmaClearCtl_dmaClearEn_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value));

        /*3.1 wait clear action done*/
        cnt = 0;
        do
        {
            cmd = DRV_IOR(DmaClearCtl_t, DmaClearCtl_dmaClearPending_f);
            CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value));
            clear_done = !CTC_IS_BIT_SET(value, SYS_DMA_PACKET_TX1_CHAN_ID);
            cnt++;
        } while(!clear_done&&(cnt<0xffff));

        cmd = DRV_IOR(DmaClearCtl_t, DmaClearCtl_dmaClearPending_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value));
        clear_done = !CTC_IS_BIT_SET(value, SYS_DMA_PACKET_TX1_CHAN_ID);
        if (!clear_done)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Tx timer channel cannot clear cnt:%u\n", cnt);
            return CTC_E_DMA;
        }

        if (is_remove)
        {
            uint16 last_size = 0;

            /*disble session tx function, move last sesssion in dma channel to remove position*/
            desc_idx = p_usw_dma_master[lchip]->tx_session[session_id].desc_idx;
            p_sys_desc = &(p_dma_chan->p_desc[desc_idx].desc_info);
            phy_addr_low = (uint32)p_usw_dma_master[lchip]->tx_session[last_session_id].phy_addr;

            SYS_USW_DMA_CACHE_INVALID(lchip, (&(p_dma_chan->p_desc[p_usw_dma_master[lchip]->tx_session[last_session_id].desc_idx].desc_info)), sizeof(DsDesc_m));

            last_size = GetDsDescEncapD2(V, cfgSize_f, p_sys_desc);
            SetDsDescEncapD2(V, memAddr_f, p_sys_desc, (phy_addr_low >> 4));
            SetDsDescEncapD2(V, cfgSize_f, p_sys_desc, last_size);
            SYS_USW_DMA_CACHE_FLUSH(lchip, p_sys_desc, sizeof(DsDesc_m));
            p_usw_dma_master[lchip]->tx_session[last_session_id].desc_idx = desc_idx;
        }

        /*4. update tx channel ring depth*/
        value = p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].desc_depth;
        cmd = DRV_IOW(DmaStaticInfo_t, DmaStaticInfo_ringDepth_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, SYS_DMA_PACKET_TX1_CHAN_ID, cmd, &value));

        /*4. re-config table wr trigger timer*/
        value = p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].desc_depth;
        if(value)
        {
            timer_l = (uint64)p_usw_dma_master[lchip]->tx_timer*1000000/value; /*uints is ms*/
            timer_v[0] = timer_l&0xFFFFFFFF;
            timer_v[1] = (timer_l >> 32) & 0xFFFFFFFF;
            cmd = DRV_IOR(DmaRegWrTrigCfg_t, DRV_ENTRY_FLAG);
            CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &wr_timer));
            SetDmaRegWrTrigCfg(A, cfgRegWrTrigNs_f, &wr_timer, timer_v);
            cmd = DRV_IOW(DmaRegWrTrigCfg_t, DRV_ENTRY_FLAG);
            CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &wr_timer));

            /*5. recover table wr trigger function*/
            value = 1;
            cmd = DRV_IOW(DmaRegTrigEnCfg_t, DmaRegTrigEnCfg_cfgRegWrTrigEn_f);
            CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, cmd, &value));
        }
    }
    else if (is_remove)
    {
        uint16 last_size = 0;
    
        /* move last sesssion in dma channel to remove position*/
        desc_idx = p_usw_dma_master[lchip]->tx_session[session_id].desc_idx;
        p_sys_desc = &(p_dma_chan->p_desc[desc_idx].desc_info);

        phy_addr_low = (uint32)p_usw_dma_master[lchip]->tx_session[last_session_id].phy_addr;

        SYS_USW_DMA_CACHE_INVALID(lchip, (&(p_dma_chan->p_desc[p_usw_dma_master[lchip]->tx_session[last_session_id].desc_idx].desc_info)), sizeof(DsDesc_m));
        last_size = GetDsDescEncapD2(V, cfgSize_f, p_sys_desc);
        SetDsDescEncapD2(V, memAddr_f, p_sys_desc, (phy_addr_low >> 4));
        SetDsDescEncapD2(V, cfgSize_f, p_sys_desc, last_size);
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_sys_desc, sizeof(DsDesc_m));
        p_usw_dma_master[lchip]->tx_session[last_session_id].desc_idx = desc_idx;
    }

    /*store session info*/
    if (p_usw_dma_master[lchip]->tx_session[session_id].phy_addr)
    {
        uint32* logic_addr = NULL;
        /*free old memory*/
        logic_addr = SYS_DMA_PHY_TO_LOGIC(lchip, p_usw_dma_master[lchip]->tx_session[session_id].phy_addr);
        SYS_DMA_FREE(lchip, logic_addr);
        p_usw_dma_master[lchip]->tx_session[session_id].phy_addr = 0;
    }

    p_usw_dma_master[lchip]->tx_session[session_id].state = tx_enable;
    if (!is_remove)
    {
        p_usw_dma_master[lchip]->tx_session[session_id].phy_addr = phy_addr;
    }

    return CTC_E_NONE;
}

int32
sys_duet2_dma_sync_hash_dump(uint8 lchip, void* p_param, uint16* p_entry_num, void* p_data )
{
    sys_dma_chan_t* p_dma_chan = NULL;
    sys_dma_desc_t* p_base_desc = NULL;
    DsDesc_m* p_desc = NULL;
    DmaFibDumpFifo_m* p_dump = NULL;
    DmaFibDumpFifo_m* p_addr_start = NULL;
    uint32 cur_index = 0;
    uint8 process_cnt = 0;
    uint16 wait_cnt = 0;
    uint8 dma_done = 0;
    uint32 real_size = 0;
    /*uint32 cfg_size = 0;*/
    uint64 phy_addr = 0;
    uint32 entry_num = 0;
    int32 ret = 0;
    uint8 end_flag = 0;
    uint32 vld_num = 0;
    uint32 cmd = 0;
    dma_dump_cb_parameter_t* p_pa = (dma_dump_cb_parameter_t*)p_param;

    p_addr_start = (DmaFibDumpFifo_m*)p_data;
    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_HASHKEY_CHAN_ID];
    p_base_desc = p_dma_chan->p_desc;
    cur_index = p_dma_chan->current_index;

    for(;; cur_index++)
    {
        dma_done = 0;
        wait_cnt = 0;
        if (cur_index >= p_dma_chan->desc_depth)
        {
            cur_index = 0;
        }

        p_desc = &p_base_desc[cur_index].desc_info;
        do
        {
            SYS_USW_DMA_CACHE_INVALID(lchip, p_desc, sizeof(DsDesc_m));
            if (GetDsDescEncapD2(V, done_f, p_desc)) 
            {
                dma_done = 1;
                break;
            }

            sal_task_sleep(1);
            wait_cnt++;

        }while(wait_cnt < 1000);

        if (dma_done == 0)
        {
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "Desc is not done!! \n");
#endif
            ret = CTC_E_DMA;
            /* dma not done,  need end current dma operate */
            goto end;
        }

        process_cnt++;

        /* get current desc real size */
        real_size = GetDsDescEncapD2(V, realSize_f, p_desc);

        /*cfg_size = GetDsDescEncapD2(V, cfgSize_f, p_desc);*/
        COMBINE_64BITS_DATA(p_usw_dma_master[lchip]->dma_high_addr,             \
                                GetDsDescEncapD2(V, memAddr_f, p_desc)<<4, phy_addr);

        p_dump = (DmaFibDumpFifo_m*)(p_dma_chan->p_desc_info[cur_index].data_addr);
        entry_num += real_size;

        /* check the last one */
        if (GetDmaFibDumpFifo(V, isLastEntry_f, &p_dump[real_size-1]))
        {
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "is lastEntry! \n");
#endif

            if (GetDmaFibDumpFifo(V, isMac_f, &p_dump[real_size-1]))
            {
#ifdef DMA_DBG_ON
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "Last entry is Mac entry! \n");
#endif
                /* query is end */
                *p_entry_num = entry_num;
                sal_memcpy((uint8*)p_addr_start, (uint8*)p_dump, sizeof(DmaFibDumpFifo_m)*real_size);
            }
            else
            {
#ifdef DMA_DBG_ON
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "Last entry isnot Mac entry! \n");
#endif

                /* query is end, but last is invalid */
                *p_entry_num = entry_num-1;
                sal_memcpy((uint8*)p_addr_start, (uint8*)p_dump, sizeof(DmaFibDumpFifo_m)*(real_size-1));
            }

            end_flag = 1;
        }
        else
        {
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "Not Last entry but is Mac entry! \n");
#endif

            if (entry_num >= p_pa->threshold)
            {
#ifdef DMA_DBG_ON
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "Exceed, entry_num:%d, threshold:%d real_size:%d!! \n", entry_num, p_pa->threshold, real_size);
#endif
                ret = CTC_E_INVALID_PARAM;
                SetDsDescEncapD2(V, done_f, p_desc, 0);
                SetDsDescEncapD2(V, reserved0_f, p_desc, 0);
                #if (1 == SDK_WORK_PLATFORM)
                    SetDsDescEncapD2(V, realSize_f, p_desc, 0);
                #endif
                SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(DsDesc_m));

                #if (0 == SDK_WORK_PLATFORM)
                    vld_num = 1;
                #else
                    vld_num = p_dma_chan->desc_depth;
                #endif
                cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
                DRV_IOCTL(lchip, p_dma_chan->channel_id, cmd, &vld_num);
                goto end;
            }

            /* process next desc */
            sal_memcpy((uint8*)p_addr_start, (uint8*)p_dump, sizeof(DmaFibDumpFifo_m)*real_size);
            p_addr_start += real_size;

            if(p_pa->fifo_full && entry_num == p_pa->entry_count)
            {
                    *p_entry_num = entry_num;
                    end_flag = 1;
            }
        }

        /* clear desc */
        SetDsDescEncapD2(V, done_f, p_desc, 0);
        SetDsDescEncapD2(V, reserved0_f, p_desc, 0);
        #if (1 == SDK_WORK_PLATFORM)
            SetDsDescEncapD2(V, realSize_f, p_desc, 0);
        #endif
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(DsDesc_m));

        #if (0 == SDK_WORK_PLATFORM)
            vld_num = 1;
        #else
            vld_num = p_dma_chan->desc_depth;
        #endif
        cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, p_dma_chan->channel_id, cmd, &vld_num);

        if (end_flag == 1)
        {
            break;
        }
    }

end:
    p_dma_chan->current_index = ((p_dma_chan->current_index + process_cnt) % (p_dma_chan->desc_depth));
    return ret;

}

int32
sys_duet2_dma_set_packet_timer_cfg(uint8 lchip, uint16 max_session, uint16 interval, uint16 pkt_len, uint8 is_destroy)
{
    uint32 desc_num = 0;
    sys_dma_desc_t* p_sys_desc_pad = NULL;
    DsDesc_m* p_desc = NULL;
    uint32 phy_addr = 0;
    uint32 cmd = 0;
    DmaStaticInfo_m static_info;
    DmaWeightCfg_m dma_weight;
    uint32 cfg_addr = 0;
    void*  p_mem_addr = NULL;
    sys_dma_desc_t* p_sys_desc_tbl = NULL;

    if (!is_destroy)
    {
        if (p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].p_desc ||
            p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].p_desc)
        {
            return CTC_E_IN_USE;
        }

        /*alloc table write trigger member data*/
        p_mem_addr = SYS_DMA_ALLOC(lchip, 4, 0);
        if (NULL == p_mem_addr)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            return CTC_E_NO_MEMORY;
        }

        /*1. process packet tx channel*/
        p_sys_desc_pad = (sys_dma_desc_t*)SYS_DMA_ALLOC(lchip, (max_session) * sizeof(sys_dma_desc_t), 0);
        if (NULL == p_sys_desc_pad)
        {
            SYS_DMA_FREE(lchip, p_mem_addr);
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            return CTC_E_NO_MEMORY;
        }

        p_sys_desc_tbl = (sys_dma_desc_t*)SYS_DMA_ALLOC(lchip, (max_session)*sizeof(sys_dma_desc_t), 0);
        if (!p_sys_desc_tbl)
        {
            SYS_DMA_FREE(lchip, p_mem_addr);
            SYS_DMA_FREE(lchip, p_sys_desc_pad);
            return CTC_E_NO_MEMORY;
        }

        sal_memset(p_sys_desc_tbl, 0, sizeof(sys_dma_desc_t)*max_session);
        sal_memset(p_sys_desc_pad, 0, sizeof(sys_dma_desc_t)*max_session);

        for (desc_num = 0; desc_num < max_session; desc_num++)
        {
            p_desc = &(p_sys_desc_pad[desc_num].desc_info);

            SetDsDescEncapD2(V, u1_pkt_eop_f, p_desc, 1);
            SetDsDescEncapD2(V, u1_pkt_sop_f, p_desc, 1);
            SetDsDescEncapD2(V, memAddr_f, p_desc, (phy_addr >> 4));
            SetDsDescEncapD2(V, cfgSize_f, p_desc, 41);
            SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(DsDesc_m));
        }

        /*2. process table write trigger channel*/
        for (desc_num = 0; desc_num < max_session; desc_num++)
        {
            p_desc = &(p_sys_desc_tbl[desc_num].desc_info);

            *((uint32*)p_mem_addr) = 1;
            phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_mem_addr);
            cfg_addr = 0x00028b80 + SYS_DMA_PACKET_TX1_CHAN_ID*4;
            SetDsDescEncapD2(V, memAddr_f, p_desc, (phy_addr >> 4));
            SetDsDescEncapD2(V, cfgSize_f, p_desc, 4);
            SetDsDescEncapD2(V, chipAddr_f, p_desc, cfg_addr);
            SetDsDescEncapD2(V, dataStruct_f, p_desc, 1);
            SetDsDescEncapD2(V, pause_f, p_desc, 1);

            SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(DsDesc_m));
        }

        /*3. cfg static infor for dma channel:MemBase, ring depth */
        sal_memset(&static_info, 0, sizeof(DmaStaticInfo_m));
        phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_sys_desc_pad);
        cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        (DRV_IOCTL(lchip, SYS_DMA_PACKET_TX1_CHAN_ID, cmd, &static_info));
        SetDmaStaticInfo(V, highBase_f, &static_info, p_usw_dma_master[lchip]->dma_high_addr);
        SetDmaStaticInfo(V, ringBase_f, &static_info, (phy_addr >> 4));
        cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        (DRV_IOCTL(lchip, SYS_DMA_PACKET_TX1_CHAN_ID, cmd, &static_info));

        sal_memset(&static_info, 0, sizeof(DmaStaticInfo_m));
        phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_sys_desc_tbl);
        cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        (DRV_IOCTL(lchip, SYS_DMA_TBL_WR_CHAN_ID, cmd, &static_info));
        SetDmaStaticInfo(V, highBase_f, &static_info, p_usw_dma_master[lchip]->dma_high_addr);
        SetDmaStaticInfo(V, ringBase_f, &static_info, (phy_addr >> 4));
        SetDmaStaticInfo(V, ringDepth_f, &static_info, max_session);
        cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        (DRV_IOCTL(lchip, SYS_DMA_TBL_WR_CHAN_ID, cmd, &static_info));

        /*4. cfg weight */
        sal_memset(&dma_weight, 0, sizeof(DmaWeightCfg_m));
        cmd = DRV_IOR(DmaWeightCfg_t, DRV_ENTRY_FLAG);
        (DRV_IOCTL(lchip, 0, cmd, &dma_weight));
        DRV_SET_FIELD_V(lchip, DmaWeightCfg_t, DmaWeightCfg_cfgChan4Weight_f, &dma_weight, SYS_DMA_LOW_WEIGHT);
        DRV_SET_FIELD_V(lchip, DmaWeightCfg_t, DmaWeightCfg_cfgChan5Weight_f, &dma_weight, SYS_DMA_LOW_WEIGHT);
        cmd = DRV_IOW(DmaWeightCfg_t, DRV_ENTRY_FLAG);
        (DRV_IOCTL(lchip, 0, cmd, &dma_weight));

        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].p_desc = p_sys_desc_pad;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].mem_base = 0;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].channel_id = SYS_DMA_PACKET_TX1_CHAN_ID;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].desc_num = 0;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].desc_depth = 0;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].data_size = pkt_len+SYS_USW_PKT_HEADER_LEN;

        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].p_desc = p_sys_desc_tbl;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].mem_base = 0;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].channel_id = SYS_DMA_TBL_WR_CHAN_ID;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].desc_num = max_session;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].desc_depth = max_session;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].data_size = 4;
        CTC_ERROR_RETURN(sys_usw_dma_set_chan_en(lchip, SYS_DMA_TBL_WR_CHAN_ID, 1));
    }
    else
    {
        uint64 phy_addr = 0;
        uint32* logic_addr = NULL;
        uint8  chan_id[2] = {SYS_DMA_PACKET_TX1_CHAN_ID, SYS_DMA_TBL_WR_CHAN_ID};
        uint8  index  = 0;
        uint8  mem_free = 1;

        if (!p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID].p_desc ||
            !p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].p_desc)
        {
            return CTC_E_NONE;
        }

        for (index = 0; index < 2; index++)
        {
            mem_free = 1;
            p_sys_desc_pad = p_usw_dma_master[lchip]->dma_chan_info[chan_id[index]].p_desc;
            /*clear data*/
            for (desc_num = 0; desc_num < p_usw_dma_master[lchip]->dma_chan_info[chan_id[index]].desc_depth; desc_num++)
            {
                p_desc = &(p_sys_desc_pad[desc_num].desc_info);
                SYS_USW_DMA_CACHE_INVALID(lchip, p_desc, sizeof(DsDesc_m));
                COMBINE_64BITS_DATA(p_usw_dma_master[lchip]->dma_high_addr,             \
                                GetDsDescEncapD2(V, memAddr_f, p_desc)<<4, phy_addr);
                logic_addr = SYS_DMA_PHY_TO_LOGIC(lchip, phy_addr);
                if (logic_addr && mem_free)
                {
                    /*table wr desc data using only one data memory, just need free once*/
                    SYS_DMA_FREE(lchip, logic_addr);
                    if (index == 1)
                    {
                        mem_free = 0;
                    }
                }
            }

            SYS_DMA_FREE(lchip, p_sys_desc_pad);
            p_usw_dma_master[lchip]->dma_chan_info[chan_id[index]].p_desc = NULL;
            p_usw_dma_master[lchip]->dma_chan_info[chan_id[index]].mem_base = 0;
            p_usw_dma_master[lchip]->dma_chan_info[chan_id[index]].channel_id = 0;
            p_usw_dma_master[lchip]->dma_chan_info[chan_id[index]].desc_num = 0;
            p_usw_dma_master[lchip]->dma_chan_info[chan_id[index]].desc_depth = 0;
            p_usw_dma_master[lchip]->dma_chan_info[chan_id[index]].data_size = 0;

        }

        sal_memset(p_usw_dma_master[lchip]->tx_session, 0, sizeof(sys_dma_timer_session_t)*SYS_PKT_MAX_TX_SESSION);
    }

    return CTC_E_NONE;
}

/**
@brief  DMA info function process
*/
int32
sys_duet2_dma_info_func(uint8 lchip, uint8 chan)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    sys_dma_desc_t* p_base_desc = NULL;
    DsDesc_m* p_desc = NULL;
    uint32 cur_index = 0;
    uint32 desc_done = 0;
    uint32 process_cnt = 0;
    uint32 real_size = 0;
    sys_dma_info_t dma_info;
    uint32* logic_addr = NULL;
    uint32 timestamp[2] = {0};
    uint64 tm_ns = 0;
    uint32 cmd = 0;
    uint32 mask[SYS_DMA_INTR_VEC] = {0};
    uint32 vld_num = 0;

#ifndef CTC_HOT_PLUG_DIS
    /* init check */
    SYS_DMA_INIT_CHECK(lchip);
#endif
    sal_memset(&dma_info, 0, sizeof(sys_dma_info_t));

    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[chan];

    p_base_desc = p_dma_chan->p_desc;
    cur_index = p_dma_chan->current_index;
    SYS_USW_DMA_CACHE_INVALID(lchip, p_dma_chan->p_desc, sizeof(sys_dma_desc_t)*p_dma_chan->desc_depth);

    while(1)
    {
#ifndef CTC_HOT_PLUG_DIS
        SYS_DMA_INIT_CHECK(lchip);
#endif
        if (cur_index >= p_usw_dma_master[lchip]->dma_chan_info[chan].desc_depth)
        {
            cur_index = 0;
        }

        p_desc = &(p_base_desc[cur_index].desc_info);
        desc_done = GetDsDescEncapD2(V, done_f, p_desc);

        /* get realsize from desc */
        real_size = GetDsDescEncapD2(V, realSize_f, p_desc);

        if (!desc_done)
        {
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "No desc is not done, processed %d desc\n", process_cnt);
#endif
            break;
        }

        logic_addr = p_dma_chan->p_desc_info[cur_index].data_addr;

        if (real_size > p_dma_chan->cfg_size)
        {
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "Info realsize error real:%d cfg:%d\n", real_size, p_dma_chan->cfg_size);
#endif
            return CTC_E_INVALID_PARAM;
        }
        process_cnt++;
        dma_info.p_data = logic_addr;
        dma_info.entry_num = real_size;
        dma_info.base_lchip = lchip;
        if(SYS_DMA_IPFIX_CHAN_ID == chan)
        {
            GetDsDescEncapD2(A, timestamp_f, p_desc, timestamp);

            tm_ns = timestamp[1];
            tm_ns = (tm_ns << 32);
            tm_ns |= timestamp[0];

            dma_info.seconds = tm_ns/1000000000;
            dma_info.nanoseconds = tm_ns - dma_info.seconds*1000000000;
        }

        SYS_DMA_CB_IN_CNT_ADD(lchip, chan);
        p_usw_dma_master[lchip]->dma_cb[p_dma_chan->cb_type](lchip, (void*)&dma_info);
        SYS_DMA_CB_OUT_CNT_ADD(lchip, chan);

        SetDsDescEncapD2(V, done_f, p_desc, 0);
        SetDsDescEncapD2(V, reserved0_f, p_desc, 0);
        #if (1 == SDK_WORK_PLATFORM)
            SetDsDescEncapD2(V, realSize_f, p_desc, 0);
        #endif
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(DsDesc_m));

        #if (0 == SDK_WORK_PLATFORM)
            vld_num = 1;
        #else
            vld_num = p_dma_chan->desc_depth;
        #endif
        cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, p_dma_chan->channel_id, cmd, &vld_num);

        cur_index++;
        /* one interrupt process 1000 entry, for other channel using same sync channel to be processed in time */
        if (process_cnt >= 1000)
        {
            break;
        }
    }
#ifndef CTC_HOT_PLUG_DIS
    SYS_DMA_INIT_CHECK(lchip);
#endif
    p_dma_chan->current_index = (cur_index>=p_dma_chan->desc_depth)?(cur_index%p_dma_chan->desc_depth):cur_index;

    /* release mask channel isr */
    SYS_USW_DMA_INTR_ENABLE(lchip,  DmaCtlIntrFunc_t, mask, chan);

    /*inval dma before read*/
    SYS_USW_DMA_CACHE_INVALID(lchip, &(p_dma_chan->p_desc[p_dma_chan->current_index].desc_info), sizeof(DsDesc_m));
    if (GetDsDescEncapD2(V, done_f, &(p_dma_chan->p_desc[p_dma_chan->current_index].desc_info)))
    {
#ifdef DMA_DBG_ON
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "Dma Trigger interrupt chan:%d \n", chan);
#endif
        sal_memset(mask, 0,SYS_DMA_INTR_VEC*sizeof(uint32));
        SetDmaCtlIntrFunc(V, dmaIntrValidVec_f, mask, (1<<chan));
        cmd = DRV_IOW(DmaCtlIntrFunc_t, DRV_ENTRY_FLAG);
        (DRV_IOCTL(lchip, 0, cmd, mask));
    }

    return CTC_E_NONE;
}

/**
@brief DMA stats function process
*/
int32
sys_duet2_dma_stats_func(uint8 lchip, uint8 chan)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    sys_dma_desc_t* p_base_desc = NULL;
    sys_dma_desc_info_t* p_desc_info = NULL;
    DsDesc_m* p_desc = NULL;
    uint32 cur_index = 0;
    uint32 desc_done = 0;
    uint32 process_cnt = 0;
    uint32 real_size = 0;
    uint32 cfg_size = 0;
    sys_dma_reg_t dma_reg;

#ifndef CTC_HOT_PLUG_DIS
    /* init check */
    SYS_DMA_INIT_CHECK(lchip);
#endif
    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[chan];
    DMA_LOCK(p_dma_chan->p_mutex);
    p_base_desc = p_dma_chan->p_desc;
    p_desc_info = p_dma_chan->p_desc_info;

    /* gg dma stats process desc from index 0 to max_desc*/
    SYS_USW_DMA_CACHE_INVALID(lchip, p_dma_chan->p_desc, sizeof(sys_dma_desc_t)*p_dma_chan->desc_depth);
    for (cur_index = p_dma_chan->current_index; cur_index < p_dma_chan->desc_depth; cur_index++)
    {
        p_desc = &(p_base_desc[cur_index].desc_info);
        desc_done = GetDsDescEncapD2(V, done_f, p_desc);
        cfg_size = GetDsDescEncapD2(V, cfgSize_f, p_desc);
        if (0 == desc_done)
        {
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "No desc is not done, processed %d desc\n", process_cnt);
#endif
            break;
        }

        process_cnt++;
        if (SYS_DMA_DESC_NOT_PROC_SIZE != cfg_size)
        {

        /* get current desc data memory logic address in db */
        dma_reg.p_data = p_dma_chan->p_desc_info[cur_index].data_addr;

        /* get realsize from desc */
        real_size = GetDsDescEncapD2(V, realSize_f, p_desc);
        if ((real_size != cfg_size) && (0 != cfg_size))
        {
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "ag stats realsize error real:%d cfg:%d\n", real_size, p_dma_chan->cfg_size);
#endif
            DMA_UNLOCK(p_dma_chan->p_mutex);
            return CTC_E_INVALID_PARAM;
        }

        dma_reg.p_ext = &(p_desc_info[cur_index].value0); /*uint16*/

        SYS_DMA_CB_IN_CNT_ADD(lchip, chan);
        p_usw_dma_master[lchip]->dma_cb[ p_dma_chan->cb_type](lchip, (void*)&dma_reg);
        SYS_DMA_CB_OUT_CNT_ADD(lchip, chan);
        }
        SetDsDescEncapD2(V, done_f, p_desc, 0);
        #if(1 == SDK_WORK_PLATFORM)
            SetDsDescEncapD2(V, realSize_f, p_desc, 0);
        #endif
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(DsDesc_m));
    }

    if(process_cnt)
    {
         SYS_USW_DMA_CLEAR_MULTI_DESC_WITH_IO(lchip, p_dma_chan, process_cnt);
         p_dma_chan->current_index = ((p_dma_chan->current_index + process_cnt) % (p_dma_chan->desc_depth));
    }

    if(p_usw_dma_master[lchip]->dma_stats_en)
    {
        sys_duet2_dma_sync_pkt_rx_stats(lchip);
        sys_duet2_dma_sync_pkt_tx_stats(lchip);
    }
    DMA_UNLOCK(p_dma_chan->p_mutex);

    return CTC_E_NONE;
}

int32
sys_duet2_dma_read_table(uint8 lchip, void* p_tbl_cfg)
{

    int32 ret = CTC_E_NONE;
    sys_dma_tbl_rw_t* tbl_cfg = (sys_dma_tbl_rw_t*)p_tbl_cfg;
    DsDesc_m* p_tbl_desc = NULL;
    uint32* p_tbl_buff = NULL;
    uint32 words_in_chip = 0;
    uint32 words_num = 0;
    uint32 entry_num = 0;
    uint32 tbl_buffer_len = 0;
    uint32 tbl_addr = 0;
    uint16 idx = 0;
    uint32* p_src = NULL;
    uint32* p_dst = NULL;
    uint32 cmd = 0;
    uint32 vld_num = 1;
    uint32 tbl_buffer_max_size = 0xFFFF;
    sys_dma_chan_t* p_dma_chan = NULL;
    uint64 phy_addr = 0;
    sal_mutex_t* p_mutex = NULL;

    SYS_DMA_INIT_CHECK(lchip);
    CTC_PTR_VALID_CHECK(tbl_cfg);

    /* mask bit(1:0) */
    tbl_addr = tbl_cfg->tbl_addr;

    words_num = (tbl_cfg->entry_len / SYS_DMA_WORD_LEN);
    entry_num = tbl_cfg->entry_num;
    words_in_chip = tbl_cfg->entry_offset>>2;
    tbl_buffer_len = entry_num * tbl_cfg->entry_offset;

    /* check data size should smaller than desc's cfg MAX size */
    if (tbl_buffer_len > tbl_buffer_max_size)
    {
        return CTC_E_INVALID_PARAM;
    }
    if (p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_RD_CHAN_ID].chan_en == 0)
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "Dma Read Function is not enabled!!!\n");
        return CTC_E_NOT_SUPPORT;
    }

    if(!tbl_cfg->user_dma_mode)
    {
        if (NULL == g_dal_op.dma_alloc)
        {
            return CTC_E_DRV_FAIL;
        }
        p_tbl_buff = SYS_DMA_ALLOC(lchip, tbl_buffer_len, 0);
        if (NULL == p_tbl_buff)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            return CTC_E_NO_MEMORY;
        }
        sal_memset(p_tbl_buff, 0, tbl_buffer_len);
    }
    else
    {
        p_tbl_buff = tbl_cfg->buffer;
    }

    p_dma_chan = (sys_dma_chan_t*)&(p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_RD_CHAN_ID]);
    p_mutex = p_dma_chan->p_mutex;

    DMA_LOCK(p_mutex);
    phy_addr = SYS_DMA_LOGIC_TO_PHY(lchip, (void*)p_tbl_buff);
    p_tbl_desc = &(p_dma_chan->p_desc[p_dma_chan->current_index].desc_info);
    sal_memset(p_tbl_desc, 0, sizeof(DsDesc_m));
    SetDsDescEncapD2(V, memAddr_f, p_tbl_desc, ((phy_addr & 0xFFFFFFFF) >> 4));
    SetDsDescEncapD2(V, cfgSize_f, p_tbl_desc, tbl_buffer_len);
    SetDsDescEncapD2(V, chipAddr_f, p_tbl_desc, (tbl_addr));
    SetDsDescEncapD2(V, dataStruct_f, p_tbl_desc, ((words_num == 64)?0:words_num));

    SYS_USW_DMA_CACHE_FLUSH(lchip, p_tbl_desc, sizeof(DsDesc_m));
    if (2 != tbl_cfg->user_dma_mode)
    {
    cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
    CTC_ERROR_GOTO(DRV_IOCTL(lchip, SYS_DMA_TBL_RD_CHAN_ID, cmd, &vld_num),ret,error_proc);

    ret = _sys_duet2_dma_wait_desc_finish(lchip,p_tbl_desc,p_dma_chan);
    if(ret != CTC_E_NONE)
    {
      goto error_proc;
    }
    SYS_USW_DMA_CACHE_INVALID_WITH_PHY_ADDR(lchip, phy_addr, entry_num*tbl_cfg->entry_offset);

    if(!tbl_cfg->user_dma_mode)
    {
        /* get read result */
        for (idx = 0; idx < entry_num; idx++)
        {
            p_src = p_tbl_buff + idx * words_in_chip;
            p_dst = tbl_cfg->buffer + idx * words_num;
            sal_memcpy(p_dst, p_src, words_num*SYS_DMA_WORD_LEN);
        }
    }
    GetDsDescEncapD2(A, timestamp_f, p_tbl_desc, &(tbl_cfg->time_stamp));
    }
    else
    {
        tbl_cfg->desc_index = p_dma_chan->current_index;
    }
    p_dma_chan->current_index =
        ((p_dma_chan->current_index + 1) == p_dma_chan->desc_depth) ? 0 : (p_dma_chan->current_index + 1); 
error_proc:
    if ((!tbl_cfg->user_dma_mode) && g_dal_op.dma_free)
    {
        SYS_DMA_FREE(lchip, p_tbl_buff);
    }
    DMA_UNLOCK(p_mutex);
    return ret;

}

int32
sys_duet2_dma_write_table(uint8 lchip, void* p_tbl_cfg)
{
    int32 ret = CTC_E_NONE;
    sys_dma_tbl_rw_t* tbl_cfg = (sys_dma_tbl_rw_t*)p_tbl_cfg;
    DsDesc_m* p_tbl_desc = NULL;
    uint32* p_tbl_buff = NULL;
    uint32 words_in_chip = 0;
    uint32 words_num = 0;
    uint32 entry_num = 0;
    uint32 tbl_buffer_len = 0;
    uint32 tbl_addr = 0;
    uint16 idx = 0;
    uint32* p_src = NULL;
    uint32* p_dst = NULL;
    uint32 vld_num = 0;
    uint32 cmd =0;
    uint32 tbl_buffer_max_size = 0xFFFF;
    sys_dma_chan_t* p_dma_chan = NULL;
    uint64 phy_addr = 0;
    sal_mutex_t* p_mutex = NULL;

    SYS_DMA_INIT_CHECK(lchip);

    /* mask bit(1:0) */
    tbl_addr = tbl_cfg->tbl_addr;
    /*tbl_addr &= 0xfffffffc;*/

    words_num = (tbl_cfg->entry_len / SYS_DMA_WORD_LEN);
    entry_num = tbl_cfg->copy_mode ? 1 : tbl_cfg->entry_num;
    words_in_chip = tbl_cfg->entry_offset>>2;

    tbl_buffer_len = tbl_cfg->entry_num * tbl_cfg->entry_offset;

    /* check data size should smaller than desc's cfg MAX size */
    if (tbl_buffer_len > tbl_buffer_max_size)
    {
        return CTC_E_INVALID_PARAM;
    }

    if (p_usw_dma_master[lchip]->pkt_tx_timer_en)
    {
        return CTC_E_INVALID_CONFIG;
    }

    if (p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID].chan_en == 0)
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "Dma Write Function is not enabled!!!\n");
        return CTC_E_NOT_SUPPORT;
    }

    p_dma_chan = (sys_dma_chan_t*)&(p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID]);
    p_mutex = p_dma_chan->p_mutex;
    DMA_LOCK(p_mutex);

    p_tbl_desc = &(p_dma_chan->p_desc[p_dma_chan->current_index].desc_info);
    /*need to free the last dma memory*/
    if (p_dma_chan->p_desc_check[p_dma_chan->current_index].is_used)
    {
        ret = _sys_duet2_dma_wait_desc_finish(lchip,p_tbl_desc,p_dma_chan);
        if(ret != CTC_E_NONE)
        {
          goto error_proc_0;
        }

        if (g_dal_op.dma_free)
        {
            uint64 phy_addr;
            COMBINE_64BITS_DATA(p_usw_dma_master[lchip]->dma_high_addr,
                            (GetDsDescEncapD2(V, memAddr_f, p_tbl_desc)<<4), phy_addr);
            SYS_DMA_FREE(lchip, SYS_DMA_PHY_TO_LOGIC(lchip, (phy_addr)));
        }
        p_dma_chan->p_desc_check[p_dma_chan->current_index].is_used = 0;
    }
    if(!tbl_cfg->user_dma_mode)
    {
        if (NULL == g_dal_op.dma_alloc)
        {
            ret = CTC_E_DRV_FAIL;
            goto error_proc_0;
        }
        p_tbl_buff = SYS_DMA_ALLOC(lchip, tbl_buffer_len, 0);
        if (NULL == p_tbl_buff)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            ret = CTC_E_NO_MEMORY;
            goto error_proc_0;
        }

        for (idx = 0; idx < entry_num; idx++)
        {
            p_dst = p_tbl_buff + idx * words_in_chip;
            p_src = tbl_cfg->buffer + idx * words_num;

            sal_memcpy(p_dst, p_src, words_num*SYS_DMA_WORD_LEN);
        }
    }
    else
    {
        p_tbl_buff = tbl_cfg->buffer;
    }

    phy_addr = SYS_DMA_LOGIC_TO_PHY(lchip, p_tbl_buff);
    SYS_USW_DMA_CACHE_FLUSH_WITH_PHY_ADDR(lchip, phy_addr, tbl_buffer_len);

    sal_memset(p_tbl_desc, 0, sizeof(DsDesc_m));
    SetDsDescEncapD2(V, memAddr_f, p_tbl_desc, ((phy_addr & 0xFFFFFFFF) >> 4));
    SetDsDescEncapD2(V, cfgSize_f, p_tbl_desc, (tbl_buffer_len));
    SetDsDescEncapD2(V, chipAddr_f, p_tbl_desc, (tbl_addr));
    SetDsDescEncapD2(V, dataStruct_f, p_tbl_desc, ((words_num == 64)?0:words_num));
    SYS_USW_DMA_CACHE_FLUSH(lchip, p_tbl_desc, sizeof(DsDesc_m));

    /* table DMA  valid num */
   vld_num = 1;
   cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
   CTC_ERROR_GOTO(DRV_IOCTL(lchip, SYS_DMA_TBL_WR_CHAN_ID, cmd, &vld_num), ret, error_proc);

    if(!tbl_cfg->user_dma_mode)
    {
        p_dma_chan->p_desc_check[p_dma_chan->current_index].is_used = 1;
    }
    else if(1 == tbl_cfg->user_dma_mode)
    {
        ret = _sys_duet2_dma_wait_desc_finish(lchip,p_tbl_desc,p_dma_chan);
        if(ret != CTC_E_NONE)
        {
          goto error_proc;
        }
    }

    tbl_cfg->desc_index = p_dma_chan->current_index;
   p_dma_chan->current_index =
        ((p_dma_chan->current_index + 1) == p_dma_chan->desc_depth) ? 0 : (p_dma_chan->current_index + 1);

    DMA_UNLOCK(p_mutex);
    return CTC_E_NONE;
error_proc:

    if ((!tbl_cfg->user_dma_mode) && g_dal_op.dma_free)
    {
        SYS_DMA_FREE(lchip, p_tbl_buff);
    }
error_proc_0:
    DMA_UNLOCK(p_mutex);
    return ret;

}

int32
sys_duet2_dma_reset(uint8 lchip)
{
    uint32 field_val = 0;
    uint32 cmd = 0;
    
    field_val = 1;
    cmd = DRV_IOW(SupResetCtl_t, SupResetCtl_resetDmaCtl_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);

    sal_task_sleep(1);

    field_val = 0;
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);

    /*clear credit about dmactl related module*/
    /*1. flow acc*/
    field_val = 0;
    cmd = DRV_IOW(FlowAccCreditStatus_t, FlowAccCreditStatus_flowAccDmaCreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);
    cmd = DRV_IOW(FlowAccAdCreditStatus_t, FlowAccAdCreditStatus_dmaFlowAccCreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);

    /*2. bufretr */
    cmd = DRV_IOW(BufRetrvCreditStatus_t, BufRetrvCreditStatus_bufRetrvDma0CreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);
    cmd = DRV_IOW(BufRetrvCreditStatus_t, BufRetrvCreditStatus_bufRetrvDma1CreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);
    cmd = DRV_IOW(BufRetrvCreditStatus_t, BufRetrvCreditStatus_bufRetrvDma2CreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);
    cmd = DRV_IOW(BufRetrvCreditStatus_t, BufRetrvCreditStatus_bufRetrvDma3CreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);

    /*3. fibacc */
    field_val = 0;
    cmd = DRV_IOW(FibAccCreditUsed_t, FibAccCreditUsed_fibAccDmaCreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);
    cmd = DRV_IOW(FibAccCreditStatus_t, FibAccCreditStatus_fibAccDmaCreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);
    cmd = DRV_IOW(PpAgingCreditStatus_t, PpAgingCreditStatus_dmaReqCreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);
    cmd = DRV_IOW(CoreAgingCreditStatus_t, CoreAgingCreditStatus_dmaReqCreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);

    /*4. Qmgr */
    cmd = DRV_IOW(QMgrEnqCreditStatus_t, QMgrEnqCreditStatus_dmaQMgrCreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);

    /*5. EPEHdr */
    cmd = DRV_IOW(EpeHdrProcCreditStatus_t, EpeHdrProcCreditStatus_dmaCreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);

    /*6. Bufstore */
    cmd = DRV_IOW(DmaBufStoreCreditStatus_t, DmaBufStoreCreditStatus_dmaBufStoreCreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);
    cmd = DRV_IOW(BufStoreDpCreditStatus_t, BufStoreDpCreditStatus_dmaBufStoreCreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);
    cmd = DRV_IOW(BufStoreDpCreditStatus_t, BufStoreDpCreditStatus_dmaBufStoreCreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 1, cmd, &field_val);
    cmd = DRV_IOW(BufStoreProcUcCreditStatus_t, BufStoreProcUcCreditStatus_ermToDmaCreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);

    /*7. OamFwd */
    cmd = DRV_IOW(OamFwdCreditStatus_t, OamFwdCreditStatus_dmaCreditUsed_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);

    return 0;
}

