#if defined(ARCTIC)
/**
 @file sys_at_dma.c

 @date 2022-02-08

 @version v1.0

 The file contains AT dma APIs of sys layer
*/

/***************************************************************
 *
 * Header Files
 *
 ***************************************************************/
#include "sal.h"
#include "dal.h"
#include "sys_usw_common.h"
#include "sys_usw_interrupt.h"
#include "sys_usw_packet.h"
#include "sys_usw_dma.h"
#include "../sys_usw_dma_priv.h"

extern int32
_sys_tmm_dma_tcam_scan_init(uint8 lchip, uint32 mem_id, uint16* p_desc_index, sys_dma_desc_t* p_sys_desc_pad, sys_dma_desc_info_t* p_sys_desc_info);
extern int32
sys_tmm_dma_function_pause(uint8 lchip, uint8 chan_id, uint8 en);

extern int32
sys_tsingma_dma_sync_pkt_rx_stats(uint8 lchip);
extern int32
sys_tsingma_dma_sync_pkt_tx_stats(uint8 lchip);

STATIC INLINE void
_sys_usw_dma_decode_table_op_bmp(uint8 lchip, uint32 op_bmp, uint8* o_pp_id, uint8* o_dp_id)
{
    uint8 core_id = 0;
    uint8 pp_id = 0;
    uint8 dp_id = 0;
    if (op_bmp & 0x3)
    {
        dp_id = CTC_IS_BIT_SET(op_bmp, 0) ? 0 : 1;
    }
    op_bmp = op_bmp >> 2;
    if (op_bmp & 0xFF)
    {
        pp_id = 0;
        do {
            if (CTC_IS_BIT_SET(op_bmp, pp_id))
            {
                break;
            }
        }while(++pp_id < PP_NUM_PER_CORE);
    }
    op_bmp = op_bmp >> 8;
    if (op_bmp & 0x3)
    {
        core_id = CTC_IS_BIT_SET(op_bmp, 0) ? 0 : 1;
    }
    if (o_pp_id)
    {
        *o_pp_id = core_id * PP_NUM_PER_CORE + pp_id;
    }
    if (o_dp_id)
    {
        *o_dp_id = dp_id;
    }
}

int32
sys_at_dma_tcam_scan_reg_init(uint8 lchip, sys_dma_chan_t* p_chan_info, sys_dma_desc_t* p_sys_desc_pad, sys_dma_desc_info_t* p_sys_desc_info)
{
    uint8 pp_id = 0;
    uint16 index = 0;
    uint16 mem_id = 0;
    uint32 tbl_id = 0;
    uint32 cmd = 0;
    DmaScanCfg_m dma_scan_cfg;
    ds_t ds;

    sal_memset(&dma_scan_cfg, 0, sizeof(dma_scan_cfg));

    CTC_ERROR_RETURN(_sys_tmm_dma_tcam_scan_init(lchip, 0xffffffff, &index, p_sys_desc_pad, p_sys_desc_info));

    pp_id = 0;
    do{
        /* flow tcam */
        for (mem_id = DRV_FTM_TCAM_KEY0; mem_id < DRV_FTM_TCAM_KEY0 + DRV_CONST(DRV_MAX_NOR_TCAM_NUM); mem_id++)
        {
            CTC_ERROR_RETURN(_sys_tmm_dma_tcam_scan_init(lchip+pp_id, mem_id, &index, p_sys_desc_pad, p_sys_desc_info));
        }

        /* lpm tcam */
        for (mem_id = DRV_FTM_LPM_TCAM_KEY0; mem_id < (DRV_FTM_LPM_TCAM_KEY0 + DRV_CONST(DRV_MAX_LPM_TCAM_NUM)); mem_id++)
        {
            CTC_ERROR_RETURN(_sys_tmm_dma_tcam_scan_init(lchip+pp_id, mem_id, &index, p_sys_desc_pad, p_sys_desc_info));
        }

        /* other tcam */
        CTC_ERROR_RETURN(_sys_tmm_dma_tcam_scan_init(lchip+pp_id, DRV_FTM_CID_TCAM, &index, p_sys_desc_pad, p_sys_desc_info));
        CTC_ERROR_RETURN(_sys_tmm_dma_tcam_scan_init(lchip+pp_id, DRV_FTM_SEL_TCAM, &index, p_sys_desc_pad, p_sys_desc_info));
        CTC_ERROR_RETURN(_sys_tmm_dma_tcam_scan_init(lchip+pp_id, DRV_FTM_SEL_TCAM_EGR, &index, p_sys_desc_pad, p_sys_desc_info));
        CTC_ERROR_RETURN(_sys_tmm_dma_tcam_scan_init(lchip+pp_id, DRV_FTM_RMAC_TCAM, &index, p_sys_desc_pad, p_sys_desc_info));
        CTC_ERROR_RETURN(_sys_tmm_dma_tcam_scan_init(lchip+pp_id, DRV_FTM_UDF_TCAM, &index, p_sys_desc_pad, p_sys_desc_info));
    }while(++pp_id < PP_NUM_PER_CORE);

    /* make sure entry's hash value has been written into data memory before enabling auto mode, default do not enable auto mode */
    tbl_id = DmaScanCtl_t;
    cmd = DRV_IOR(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &ds));
    SetDmaScanCtl(V, dmaAutoMode_f, &ds, 0);
    cmd = DRV_IOW(tbl_id, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &ds));

    cmd = DRV_IOW(DmaScanCfg_t, DRV_ENTRY_FLAG);
    SetDmaScanCfg(V, cfgScanHashType_f, &dma_scan_cfg, 4);    /* per unit 8 entry */
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &dma_scan_cfg));

    return CTC_E_NONE;
}

int32
sys_at_dma_tcam_scan_func(uint8 lchip, uint16 chan)
{
    uint8  pp_id = 0;
    uint8  scan_valid = 0;
    uint16 op_bmp = 0;
    uint32 cur_index = 0;
    uint32 process_cnt = 0;
    drv_ser_dma_tcam_param_t tcam_param;
    ds_desc_at_t* p_desc;
    sys_dma_desc_t* p_base_desc;
    sys_dma_chan_t* p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[28];/* SYS_DMA_TCAM_SCAN_CHAN_ID */

    DMA_LOCK(p_dma_chan->p_mutex);
    p_base_desc = p_dma_chan->p_desc;

    for (cur_index = p_dma_chan->current_index; cur_index < p_dma_chan->desc_depth; cur_index++)
    {
        p_desc = (ds_desc_at_t*)&(p_base_desc[cur_index].desc_info);
        SYS_USW_DMA_CACHE_INVALID(lchip, p_desc, sizeof(ds_desc_at_t));

        if (!p_desc->done)
        {
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "No desc is not done, processed %d desc\n", process_cnt);
#endif
            break;
        }

        process_cnt++;

        SYS_USW_DMA_CACHE_INVALID(lchip, p_dma_chan->p_desc_info[cur_index].data_addr, p_dma_chan->p_desc_info[cur_index].value1);
        sal_memset(&tcam_param, 0, sizeof(tcam_param));

        scan_valid = p_desc->scan_valid;
        if (!scan_valid)
        {
            continue;
        }
        /* op_bmp[11:10]->core bmp, op_bmp[9:2]->pp bmp, op_bmp[1:0]->dp bmp */
        op_bmp = (p_desc->u0.reg.slice_bmp)<<2;

        _sys_usw_dma_decode_table_op_bmp(lchip, op_bmp, &pp_id, NULL);
        tcam_param.mem_id = p_dma_chan->p_desc_info[cur_index].value0;
        tcam_param.time_stamp[0] = p_desc->u2.scan.tcam_scan_addr;
        op_bmp = (p_desc->ts_shift_or_dp_core_bmp >> 2) & 0x3;
        op_bmp <<= 4;
        op_bmp |= p_desc->u0.reg.slice_bmp;
        op_bmp <<= 2;
        tcam_param.time_stamp[1] = op_bmp;
        drv_ser_set_cfg(lchip+pp_id, DRV_SER_CFG_TYPE_DMA_RECOVER_TCAM, &tcam_param);

        p_desc->done = 0;
        #if(1 == SDK_WORK_PLATFORM)
            p_desc->real_size = 0;
        #endif
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(ds_desc_at_t));
    }
    DMA_UNLOCK(p_dma_chan->p_mutex);
    return CTC_E_NONE;
}

STATIC INLINE int32
_sys_at_dma_wait_desc_finish(uint8 lchip, ds_desc_at_t* p_tx_desc_mem,sys_dma_chan_t* p_dma_chan)
{
    uint32  cnt = 0;
    int32   ret = CTC_E_NONE;
    bool    done = FALSE;

#ifdef EMULATION_ENV
    while(cnt < 10000)
#else
    while(cnt < 100)
#endif
    {
    #ifndef CTC_HOT_PLUG_DIS
        SYS_DMA_INIT_CHECK(lchip);
    #endif
        SYS_USW_DMA_CACHE_INVALID(lchip, p_tx_desc_mem, sizeof(ds_desc_at_t));
        if (p_tx_desc_mem->done)
        {
            done = TRUE;
            break;
            /* last transmit is done */
        }
    #ifndef PACKET_TX_USE_SPINLOCK
        sal_task_sleep(1);
    #else
        sal_udelay(1000);
    #endif
        cnt++;
    }

    if (!done)
    {
#ifdef DMA_DBG_ON
       SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "last transmit is not done,%d\n", p_dma_chan->current_index);
#endif
       ret = CTC_E_DMA;
    }
    return ret;
}

int32
sys_at_dma_pkt_tx_array(uint8 lchip, ctc_pkt_tx_t** p_pkt_tx_array, uint32 count)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    uint32 pkt_idx = 0;
    ds_desc_at_t* p_tx_desc_mem;
    ctc_pkt_tx_t* p_pkt_tx = NULL;
    ds_desc_at_t desc_db;
    uint64 phy_addr;
    uint32 cur_index = 0;
    uint16 pkt_len = 0;
    uint16 desc_cnt = 0;
    uint32 vld_num = 0;
    uint32 tmp_index = 0;
    uint32 intr_vec[SYS_DMA_INTR_VEC] = {0};
    uint8 is_empty = 0;
    uint32 cmd = 0;


    lchip = p_usw_dma_master[lchip]->core_pp_base;

    /*1. get dma chan info, use SYS_DMA_PACKET_TX1_CHAN_ID defaultly */
    p_dma_chan = (sys_dma_chan_t*)&(p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX1_CHAN_ID]);
    if (count > p_dma_chan->desc_depth)
    {
#ifdef DMA_DBG_ON
       SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, "Count is more than desc-depth %u, not support\n", p_dma_chan->desc_depth);
#endif
        return CTC_E_NOT_SUPPORT;
    }
    cur_index = p_dma_chan->current_index;

    /*2. wait the end descriptor that current tx will use done  */
    tmp_index = (cur_index+count);
    tmp_index = (tmp_index >= p_dma_chan->desc_depth) ?(tmp_index-p_dma_chan->desc_depth):tmp_index;
    p_tx_desc_mem = (ds_desc_at_t*)&(p_dma_chan->p_desc[tmp_index].desc_info);
    CTC_ERROR_RETURN(_sys_at_dma_wait_desc_finish(lchip, p_tx_desc_mem, p_dma_chan));

    /*3. configure desc one by one*/
    for (pkt_idx = 0; pkt_idx < count; pkt_idx++)
    {
        if (p_dma_chan->p_desc_check[cur_index].pool_id)
        {
            uint32 data_address = 0;
            sal_fifo_t* p_data_fifo = NULL;
            /*recycle data address*/
            p_data_fifo =  p_usw_dma_master[lchip]->dma_chan_info[p_dma_chan->p_desc_check[cur_index].pool_id-1].p_data_fifo;
            if (NULL != p_data_fifo)
            {
                is_empty = sal_fifo_len(p_data_fifo) == 0;
                data_address = p_dma_chan->p_desc_check[cur_index].phy_address;
                sal_fifo_put(p_data_fifo, (uint8*)&data_address, sizeof(uint32));
                if (is_empty)
                {/* if buffer pool is empty before putting , need trigger DMA interrupt affer putting for rx process */
                    intr_vec[0] = 1<<(p_dma_chan->p_desc_check[cur_index].pool_id-1);/* rx channel id = pool_id - 1 */
                    cmd = DRV_IOW(DmaCtlIntrFunc0_t, DRV_ENTRY_FLAG);
                    CTC_ERROR_RETURN(DRV_IOCTL(lchip, INTR_INDEX_VAL_SET, DRV_CMD_PP_EN(cmd), intr_vec));
                }
            }

        }

        p_tx_desc_mem = (ds_desc_at_t*)&(p_dma_chan->p_desc[cur_index].desc_info);

        p_pkt_tx = p_pkt_tx_array[pkt_idx];

        /* use zero copy mode defaultly, skb.head is dma logic address */
        phy_addr = (p_pkt_tx->l2p_addr_func) ? p_pkt_tx->l2p_addr_func((void*)p_pkt_tx->skb.head, p_pkt_tx->l2p_user_data):\
            SYS_DMA_LOGIC_TO_PHY(lchip, (void*)p_pkt_tx->skb.head);
        pkt_len =p_pkt_tx->skb.len+SYS_USW_PKT_HEADER_LEN;

        p_tx_desc_mem->valid = 0;
        sal_memset(&desc_db, 0, sizeof(ds_desc_at_t));
        desc_db.u0.pkt_tx.sop = 1;
        desc_db.u0.pkt_tx.eop = 1;
        desc_db.cfg_size_20_11_0 = pkt_len&0xFFF;
        desc_db.cfg_size_20_19_12 = (pkt_len>>12)&0xFF;
        desc_db.mem_addr = (phy_addr >> 4);
        desc_db.done = 0;
        desc_db.real_size = pkt_len-SYS_USW_PKT_HEADER_LEN;
        desc_db.u0.pkt_tx.dest_id = (p_pkt_tx->lchip - lchip) & 0x3;
        sal_memcpy(p_tx_desc_mem, &desc_db, 12);
        p_tx_desc_mem->high_addr = p_usw_dma_master[lchip]->dma_high_addr;
        p_tx_desc_mem->valid = 1;
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_tx_desc_mem, sizeof(ds_desc_at_t));

        /* store phy_address & pool_id in db to recycle fifo when tx done */
        p_dma_chan->p_desc_check[cur_index].phy_address = phy_addr;
        p_dma_chan->p_desc_check[cur_index].pool_id = p_pkt_tx->skb.pool_id;

        desc_cnt++;
        cur_index++;
        if (cur_index >= p_dma_chan->desc_depth)
        {
            cur_index = 0;
        }
    }

    /*4. update current index*/
    p_dma_chan->current_index = cur_index;

    /*5. update desc vld_num */
#if (0 == SDK_WORK_PLATFORM)
    vld_num = desc_cnt;
    CTC_ERROR_RETURN(drv_ioctl_write_dma(lchip, p_dma_chan->channel_id, (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &vld_num));
#else
    {
        uint32 valid_cnt = 0;
        cmd = DRV_IOR(DmaCtlTab_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_dma_chan->channel_id, DRV_CMD_PP_EN(cmd), &vld_num));
        valid_cnt = GetDmaCtlTab(V, vldNum_f, &vld_num);
        valid_cnt += desc_cnt;
        SetDmaCtlTab(V, vldNum_f, &vld_num, valid_cnt);
        cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_dma_chan->channel_id, DRV_CMD_PP_EN(cmd), &vld_num));
    }
#endif

    return CTC_E_NONE;
}


STATIC INLINE int32
_sys_at_dma_do_packet_tx(uint8 lchip, ctc_pkt_tx_t* p_pkt_tx, sys_dma_chan_t* p_dma_chan)
{
    ds_desc_at_t* p_tx_desc_mem;
    ds_desc_at_t desc_db;
    sys_dma_tx_mem_t* p_tx_mem_info;
    uint64 phy_addr;
    int32 ret = 0;
    uint32 cur_index = p_dma_chan->current_index;
    uint16 data_size = p_dma_chan->data_size;
    uint16 left_pkt_len = p_pkt_tx->skb.len+SYS_USW_PKT_HEADER_LEN;
    uint16 pkt_len_offset = 0;
    uint16 pkt_len = 0;
    uint8 is_sop = 0;
    uint8 is_eop = 0;
    uint8 desc_cnt = 0;
    uint32 desc_depth = p_dma_chan->desc_depth;
    uint32 vld_num = 0;
    uint32 last_index = cur_index;
    uint32 intr_vec[SYS_DMA_INTR_VEC] = {0};
    uint8 is_empty = 0;
    uint32 cmd = 0;


    sal_memset(&desc_db, 0, sizeof(ds_desc_at_t));
    do
    {
        if (cur_index >= desc_depth)
        {
            cur_index = 0;
        }

        p_tx_desc_mem = (ds_desc_at_t*)&(p_dma_chan->p_desc[cur_index].desc_info);
        p_tx_mem_info = p_dma_chan->p_tx_mem_info + cur_index;

        ret = _sys_at_dma_wait_desc_finish(lchip, p_tx_desc_mem, p_dma_chan);
        if (ret == CTC_E_NONE && p_dma_chan->p_desc_check[cur_index].pool_id)
        {
            uint32 data_address = 0;
            sal_fifo_t* p_data_fifo = NULL;
            /*recycle data address*/
            p_data_fifo =  p_usw_dma_master[lchip]->dma_chan_info[p_dma_chan->p_desc_check[cur_index].pool_id-1].p_data_fifo;
            if (NULL != p_data_fifo)
            {
                is_empty = sal_fifo_len(p_data_fifo) == 0;
                data_address = p_dma_chan->p_desc_check[cur_index].phy_address;
                sal_fifo_put(p_data_fifo, (uint8*)&data_address, sizeof(uint32));
                if (is_empty)
                {/* if buffer pool is empty before putting , need trigger DMA interrupt affer putting for rx process */
                    intr_vec[0] = 1<<(p_dma_chan->p_desc_check[cur_index].pool_id-1);/* rx channel id = pool_id - 1 */
                    cmd = DRV_IOW(DmaCtlIntrFunc0_t, DRV_ENTRY_FLAG);
                    CTC_ERROR_RETURN(DRV_IOCTL(lchip, INTR_INDEX_VAL_SET, DRV_CMD_PP_EN(cmd), intr_vec));
                }
            }
        }
        if (p_tx_mem_info->callback)
        {
            p_tx_mem_info->callback(p_tx_mem_info->p_pkt_addr, p_tx_mem_info->user_data);
        }
        if (ret != CTC_E_NONE)
        {
            return ret;
        }

        is_sop = (0 == desc_cnt)?1:0;

        p_tx_mem_info->callback = p_pkt_tx->callback;
        p_tx_mem_info->user_data = p_pkt_tx->user_data;
        p_tx_mem_info->p_pkt_addr = p_tx_mem_info->callback ? p_pkt_tx->skb.data : NULL;
        if(p_pkt_tx->tx_info.flags & CTC_PKT_FLAG_ZERO_COPY)
        {
            is_eop = 1;/*zero copy must only use one desc*/
            phy_addr = (p_pkt_tx->l2p_addr_func) ? p_pkt_tx->l2p_addr_func((void*)p_pkt_tx->skb.head, p_pkt_tx->l2p_user_data):\
                SYS_DMA_LOGIC_TO_PHY(lchip, (void*)p_pkt_tx->skb.head);

            if(p_pkt_tx->l2p_addr_func && (p_usw_dma_master[lchip]->dma_high_addr != (phy_addr>>32) || (0 != (phy_addr&0xF))))
            {
                return CTC_E_INVALID_PARAM;
            }
            pkt_len = left_pkt_len;
            left_pkt_len = 0;
        }
        else
        {
            void* new_addr = p_tx_mem_info->p_mem_addr;
            if(p_pkt_tx->skb.len< SYS_USW_PKT_MIN_PKT_LEN )
            {
                sal_memcpy((uint8*)new_addr, p_pkt_tx->skb.head, p_pkt_tx->skb.len+SYS_USW_PKT_HEADER_LEN);
                sal_memset((uint8*)new_addr + p_pkt_tx->skb.len + SYS_USW_PKT_HEADER_LEN, 0,SYS_USW_PKT_MIN_PKT_LEN - p_pkt_tx->skb.len);
                is_eop = 1;
                pkt_len = SYS_USW_PKT_MIN_PKT_LEN+SYS_USW_PKT_HEADER_LEN;
            }
            else
            {
                is_eop = (left_pkt_len<= data_size)?1:0;
                pkt_len = is_eop?left_pkt_len:data_size;
                sal_memcpy((uint8*)new_addr, p_pkt_tx->skb.head+pkt_len_offset, pkt_len);
            }
            SYS_USW_DMA_CACHE_FLUSH(lchip, new_addr, pkt_len);

            if (0 == is_eop)
            {
                pkt_len_offset += data_size;
                left_pkt_len = (left_pkt_len > data_size)?(left_pkt_len-data_size):0;
            }
            else
            {
                pkt_len_offset = 0;
                left_pkt_len = 0;
            }
            phy_addr = SYS_DMA_LOGIC_TO_PHY(lchip, new_addr);
        }
        p_tx_desc_mem->valid = 0;
        desc_db.u0.pkt_tx.sop = is_sop;
        desc_db.u0.pkt_tx.eop = is_eop;
        desc_db.cfg_size_20_11_0 = pkt_len&0xFFF;
        desc_db.cfg_size_20_19_12 = (pkt_len>>12)&0xFF;
        desc_db.mem_addr = (phy_addr >> 4);
        desc_db.done = 0;
        desc_db.real_size = pkt_len-SYS_USW_PKT_HEADER_LEN;
        desc_db.u0.pkt_tx.dest_id = (p_pkt_tx->lchip - lchip) & 0x3;
        sal_memcpy(p_tx_desc_mem, &desc_db, 12);
        p_tx_desc_mem->high_addr = p_usw_dma_master[lchip]->dma_high_addr;
        p_tx_desc_mem->valid = 1;
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_tx_desc_mem, sizeof(ds_desc_at_t));

        /* store phy_address & pool_id in db to recycle fifo when tx done */
        p_dma_chan->p_desc_check[cur_index].phy_address = phy_addr;
        p_dma_chan->p_desc_check[cur_index].pool_id = p_pkt_tx->skb.pool_id;

        desc_cnt++;
        cur_index++;

        if(is_eop)
        {
            #if (0 == SDK_WORK_PLATFORM)
            {
                vld_num = desc_cnt;
                CTC_ERROR_RETURN(drv_ioctl_write_dma(lchip, p_dma_chan->channel_id,  (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &vld_num));
            }
            #else
            {
                uint32 valid_cnt = 0;
                cmd = DRV_IOR(DmaCtlTab_t, DRV_ENTRY_FLAG);
                CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_dma_chan->channel_id, cmd, &vld_num));
                valid_cnt = GetDmaCtlTab(V, vldNum_f, &vld_num);
                valid_cnt += 1;
                SetDmaCtlTab(V, vldNum_f, &vld_num, valid_cnt);
                cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
                CTC_ERROR_RETURN(DRV_IOCTL(lchip, p_dma_chan->channel_id, DRV_CMD_PP_EN(cmd), &vld_num));
            }
            #endif

            if(!p_pkt_tx->callback && (p_pkt_tx->tx_info.flags & CTC_PKT_FLAG_ZERO_COPY) && 0 == p_dma_chan->p_desc_check[last_index].pool_id)
            {
                ret = _sys_at_dma_wait_desc_finish(lchip, p_tx_desc_mem, p_dma_chan);
                if (ret != CTC_E_NONE)
                {
                    return ret;
                }
                p_tx_desc_mem->mem_addr = 0;
            }
            /* next descriptor, tx_desc_index: 0~tx_desc_num-1*/
            p_dma_chan->current_index = (cur_index)% desc_depth;
        }
    }while(left_pkt_len>0);
    return ret;
}

int32
sys_at_dma_pkt_tx(uint8 lchip, ctc_pkt_tx_t* p_pkt_tx)
{
    int32 ret = CTC_E_NONE;
    sys_dma_chan_t* p_dma_chan = NULL;
    uint8 dma_en_tx0 = 0;
    uint8 dma_en_tx1 = 0;
    uint8 chan_idx = 0;

    SYS_DMA_INIT_CHECK(lchip);
    lchip = p_usw_dma_master[p_pkt_tx->lchip]->core_pp_base;

    /* packet length check */
    dma_en_tx0 = CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_PACKET_TX0_CHAN_ID);
    dma_en_tx1 = CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, SYS_DMA_PACKET_TX1_CHAN_ID);
    if (!dma_en_tx0 ||  !dma_en_tx1)
    {
#ifdef DMA_DBG_ON
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, "Dma Packet Tx  Function is not enabled!!!\n");
#endif
        return CTC_E_NOT_SUPPORT;
    }

    /* use which channel should by the packet's priority */
    chan_idx = p_pkt_tx->tx_info.priority <= SYS_DMA_RING_SELECT_PRIROITY ?0:1;
    p_dma_chan = (sys_dma_chan_t*)&(p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PACKET_TX0_CHAN_ID + chan_idx]);

    DMA_TX_LOCK(p_dma_chan->p_mutex);
    ret = _sys_at_dma_do_packet_tx(lchip, p_pkt_tx, p_dma_chan);
    DMA_TX_UNLOCK(p_dma_chan->p_mutex);

    return ret;
}

STATIC INLINE int32
_sys_at_dma_pkt_rx_func(uint8 lchip, uint8 chan)
{
    ctc_pkt_buf_t pkt_buf[64];
    ctc_pkt_rx_t pkt_rx;
    ctc_pkt_rx_t* p_pkt_rx = &pkt_rx;
    sys_dma_chan_t* p_dma_chan;
    sys_dma_desc_t* p_base_desc;
    volatile ds_desc_at_t* p_desc;
    ds_desc_at_t tmp_desc;
    uint32 cur_index;
    uint32 buf_count = 0;
    uint64 phy_addr;
    uint32 process_cnt = 0;
    uint32 is_sop;
    uint32 is_eop;
    uint8 need_eop = 0;
    uint32 wait_cnt = 0;
    uint32 desc_count = 0;
    uint32 desc_depth = 0;
    uint8 data_fifo_valid = 0;
#if (1 == SDK_WORK_PLATFORM)
    uint32 cmd = 0;
#endif

    sal_memset(p_pkt_rx, 0, sizeof(ctc_pkt_rx_t));
    p_pkt_rx->mode = CTC_PKT_MODE_DMA;
    p_pkt_rx->pkt_buf = pkt_buf;
    p_pkt_rx->lchip = lchip;

#ifndef CTC_HOT_PLUG_DIS
    /* init check */
    SYS_DMA_INIT_CHECK(lchip);
#endif

    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[chan];
    p_base_desc = p_dma_chan->p_desc;
    cur_index = p_dma_chan->current_index;
    desc_depth = p_dma_chan->desc_depth;
    data_fifo_valid = p_dma_chan->p_data_fifo?1:0;

    for (;; cur_index++)
    {
#ifndef CTC_HOT_PLUG_DIS
        SYS_DMA_INIT_CHECK(lchip);

        if (sys_usw_chip_check_active(lchip))
        {
            break;
        }
#endif

        if (cur_index >= desc_depth)
        {
            cur_index = 0;
        }

        p_desc = (ds_desc_at_t*)&(p_base_desc[cur_index].desc_info);
        if (0 == p_desc->done)
        {
            if (need_eop)
            {
#ifdef DMA_DBG_ON
                 SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "Desc not done, But need eop!!desc index %d\n", cur_index);
#endif
                wait_cnt = 0;
                while(wait_cnt < 0xffff)
                {
                    SYS_USW_DMA_CACHE_INVALID(lchip, p_base_desc, sizeof(sys_dma_desc_t)*desc_depth);
                    if (p_desc->done)
                    {
                        break;
                    }
                    wait_cnt++;
                }

                /* Cannot get EOP, means no EOP packet error, just clear desc*/
                if (wait_cnt >= 0xffff)
                {
#ifdef DMA_DBG_ON
                    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "No EOP, desc index %d, buf_count %d\n", cur_index, buf_count);
#endif
                    /* write DmaCtlTab_t to recycle desciptors, for uml, write desc-depth */
                    if(desc_count)  
                    {
                        SYS_USW_DMA_CACHE_FLUSH(lchip, p_dma_chan->p_desc, sizeof(sys_dma_desc_t)*desc_depth);
#if (0 == SDK_WORK_PLATFORM)
                        drv_ioctl_write_dma(lchip, p_dma_chan->channel_id, (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &desc_count);
#else
                        cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
                        DRV_IOCTL(lchip, p_dma_chan->channel_id, DRV_CMD_PP_EN(cmd), &desc_depth);
#endif
                    }
                    desc_count = 0;
                    buf_count = 0;
                    need_eop = 0;
                    break;
                }
            }
            else
            {
#ifdef DMA_DBG_ON
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "No desc is not done, processed %d desc index %d\n", process_cnt, cur_index);
#endif
                if(desc_count)  
                {
                    SYS_USW_DMA_CACHE_FLUSH(lchip, p_dma_chan->p_desc, sizeof(sys_dma_desc_t)*desc_depth);
#if (0 == SDK_WORK_PLATFORM)
                    drv_ioctl_write_dma(lchip, p_dma_chan->channel_id, (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &desc_count);
#else
                    cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
                    DRV_IOCTL(lchip, p_dma_chan->channel_id, DRV_CMD_PP_EN(cmd), &desc_depth);
#endif
                }
                desc_count = 0;
                break;
            }
        }

        *((uint32*)&tmp_desc) = *((uint32*)p_desc);
        is_sop = tmp_desc.u0.pkt_tx.sop;
        is_eop = tmp_desc.u0.pkt_tx.eop;
        if (is_sop)
        {
            /*Before get EOP, next packet SOP come, no EOP packet error, drop error packet */
            if (need_eop)
            {
                buf_count = 0;
            }
            p_pkt_rx->pkt_len = 0;
            need_eop = 1;
        }

        /* Cannot get SOP, means no SOP packet error, just clear desc*/
        if (0 == buf_count)
        {
            if (0 == is_sop)
            {
#ifdef DMA_DBG_ON
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "[DMA] PKT RX error, lchip %d chan %d index %d first is not SOP\n", lchip, chan, cur_index);
#endif
                goto error_proc;
            }
        }

        p_pkt_rx->pkt_buf[buf_count].data = (uint8 *)(p_dma_chan->p_desc_info[cur_index].data_addr);
        p_pkt_rx->pkt_buf[buf_count].len = p_desc->real_size;
        p_pkt_rx->pkt_len += p_pkt_rx->pkt_buf[buf_count].len;

        if (data_fifo_valid)
        {
            uint32 new_address = 0;
            uint32 len = 0;
            
            p_pkt_rx->pkt_buf[buf_count].pool_id = (chan+1);
            /*step1:alloc new data address*/
            len = sal_fifo_get(p_dma_chan->p_data_fifo, (unsigned char*)&new_address, sizeof(uint32));
            if (len < sizeof(uint32))
            {
                if(desc_count)  
                {
                    SYS_USW_DMA_CACHE_FLUSH(lchip, p_dma_chan->p_desc, sizeof(sys_dma_desc_t)*desc_depth);
#if (0 == SDK_WORK_PLATFORM)
                    drv_ioctl_write_dma(lchip, p_dma_chan->channel_id, (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &desc_count);
#else
                    cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
                    DRV_IOCTL(lchip, p_dma_chan->channel_id, DRV_CMD_PP_EN(cmd), &desc_depth);
#endif
                }
                goto error_proc2;
            }
        
            /*step2:set new address to desc*/
            p_desc->mem_addr = new_address >> 4;

            /*step3:update logic address in db*/
            COMBINE_64BITS_DATA(p_usw_dma_master[lchip]->dma_high_addr, new_address, phy_addr);
            p_dma_chan->p_desc_info[cur_index].data_addr =   SYS_DMA_PHY_TO_LOGIC(lchip, phy_addr);
        }

        /*Max desc num for one packet is 64, so, buf_count % 64*/
        buf_count = ((buf_count+1) & 0x3f);

        if (is_eop)
        {
            p_pkt_rx->buf_count = buf_count;
            p_pkt_rx->dma_chan = chan;
            SYS_DMA_CB_IN_CNT_ADD(lchip, chan);
            p_usw_dma_master[lchip]->dma_rx_cb(p_pkt_rx);
            SYS_DMA_CB_OUT_CNT_ADD(lchip, chan);
            process_cnt += buf_count;
            buf_count = 0;
            need_eop = 0;
        }

error_proc:
        p_desc->done = 0;
        #if(1 == SDK_WORK_PLATFORM)
            /*Uml need clear realsize*/
            p_desc->real_size = 0;
        #endif

        desc_count++;
        if(desc_count >= p_dma_chan->threshold && (!need_eop))
        {
            SYS_USW_DMA_CACHE_FLUSH(lchip, p_dma_chan->p_desc, sizeof(sys_dma_desc_t)*desc_depth);
#if (0 == SDK_WORK_PLATFORM)
            drv_ioctl_write_dma(lchip, p_dma_chan->channel_id, (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &desc_count);
#else
            cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
            DRV_IOCTL(lchip, p_dma_chan->channel_id, DRV_CMD_PP_EN(cmd), &desc_depth);
#endif
            desc_count = 0;
            if(process_cnt >= desc_depth )
            {
                cur_index++;
                break;
            }
        }
    }

error_proc2:
#ifndef CTC_HOT_PLUG_DIS
    SYS_DMA_INIT_CHECK(lchip);
#endif
    p_dma_chan->current_index = (cur_index>=desc_depth)?(cur_index%desc_depth):cur_index;

    return CTC_E_NONE;
}

int32
sys_at_dma_pkt_rx(uint8 lchip, uint8 chan_id, void* thread_info)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    uint32 intr_vec[SYS_DMA_INTR_VEC] = {0};
    int32 ret = CTC_E_NONE;

    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[chan_id];
    if (p_dma_chan->pkt_knet_en)
    {
        /* release mask channel isr */
        SYS_USW_DMA_INTR_ENABLE(lchip, DmaCtlIntrFunc0_t, intr_vec, chan_id);

        return CTC_E_NONE;
    }

    ret = _sys_at_dma_pkt_rx_func(lchip, chan_id);
#ifndef CTC_HOT_PLUG_DIS
    if (ret == CTC_E_NOT_INIT)
    {
        return CTC_E_NOT_INIT;
    }
#endif
    
    /* release mask channel isr */
    SYS_USW_DMA_INTR_ENABLE(lchip, DmaCtlIntrFunc0_t, intr_vec, chan_id);

    return ret;
}

/*timer uinit is s, 0 means disable*/
int32
sys_at_dma_set_pkt_timer(uint8 lchip, uint32 timer, uint8 enable)
{
    uint32 cmd = 0;
    DmaStaticInfo_m static_info;
    uint32 session_num = 0;
    uint32 field_val = 0;
    uint8 tx_session_chan = 0;
    DmaPktTx3Ctl_m pkttx_ctl;

    tx_session_chan = SYS_DMA_PKT_TX_TIMER_CHAN_ID;

    if (!p_usw_dma_master[lchip]->pkt_tx_timer_en || !p_usw_dma_master[lchip]->dma_chan_info[tx_session_chan].p_desc)
    {
        return CTC_E_INVALID_CONFIG;
    }

    if (enable == TRUE)
    {
        if (!p_usw_dma_master[lchip]->dma_chan_info[tx_session_chan].desc_num)
        {
            return CTC_E_NOT_READY;
        }

        if (p_usw_dma_master[lchip]->tx_timer && (p_usw_dma_master[lchip]->tx_timer != timer))
        {
            return CTC_E_INVALID_CONFIG;
        }

        cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, DRV_CMD_PP_EN(cmd), &static_info));
        if (GetDmaStaticInfo(V, chanEn_f, &static_info))
        {
            return CTC_E_HW_BUSY;
        }

        /*cfg real ring depth*/
        session_num = p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PKT_TX_TIMER_CHAN_ID].desc_depth;

        cmd = DRV_IOW(DmaStaticInfo_t, DmaStaticInfo_ringDepth_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, DRV_CMD_PP_EN(cmd), &session_num));

#if (0 == SDK_WORK_PLATFORM)
        CTC_ERROR_RETURN(drv_ioctl_read_dma(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &field_val));
        if (field_val < session_num)
        {
            field_val = (session_num - field_val);
            CTC_ERROR_RETURN(drv_ioctl_write_dma(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &field_val));
        }
#else
        cmd = DRV_IOR(DmaCtlTab_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, DRV_CMD_PP_EN(cmd), &field_val));
        if (field_val < session_num)
        {
            field_val = (session_num - field_val);
            cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
            CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, DRV_CMD_PP_EN(cmd), &field_val));
        }
#endif
        cmd = DRV_IOR(DmaPktTx3Ctl_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &pkttx_ctl));
        SetDmaPktTx3Ctl(V, dmaAutoMode_f, &pkttx_ctl, 1);
        SetDmaPktTx3Ctl(V, cfgPktTxByteOrderEn_f, &pkttx_ctl, ((drv_get_host_type(lchip) == HOST_LE) ? 1 : 0));
        SetDmaPktTx3Ctl(V, cfgPktTxByteOrderNum_f, &pkttx_ctl, (SYS_USW_PKT_HEADER_LEN / sizeof(uint32) - 1));
        cmd = DRV_IOW(DmaPktTx3Ctl_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &pkttx_ctl));

        cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, DRV_CMD_PP_EN(cmd), &static_info));
        SetDmaStaticInfo(V, chanEn_f, &static_info, 1);
        cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, DRV_CMD_PP_EN(cmd), &static_info));
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PKT_TX_TIMER_CHAN_ID].chan_en = 1;
    }
    else
    {
        uint32 cnt = 0;
        uint32 clear_done = 0;

        cmd = DRV_IOR(DmaPktTx3Ctl_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &pkttx_ctl));
        SetDmaPktTx3Ctl(V, dmaAutoMode_f, &pkttx_ctl, 0);
        cmd = DRV_IOW(DmaPktTx3Ctl_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &pkttx_ctl));

        cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, DRV_CMD_PP_EN(cmd), &static_info));
        SetDmaStaticInfo(V, chanEn_f, &static_info, 0);
        cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, DRV_CMD_PP_EN(cmd), &static_info));
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PKT_TX_TIMER_CHAN_ID].chan_en = 0;
        /* clear dma channel dynmamic*/

        cnt = 0;
        do
        {
            cmd = DRV_IOR(DmaDynInfo_t, DmaDynInfo_cacheCnt_f);
            CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, DRV_CMD_PP_EN(cmd), &field_val));
            cnt++;
        } while (field_val && (cnt < 0xffff));
        if (field_val)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Tx timer channel dyninfo cachecnt:%u\n", field_val);
            return CTC_E_DMA;
        }
        field_val = 1;
        cmd = DRV_IOW(DmaPktTx3ClearCtl_t, DmaPktTx3ClearCtl_dmaClearEn_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &field_val));

        cnt = 0;
        do
        {
            cmd = DRV_IOR(DmaPktTx3ClearCtl_t, DmaPktTx3ClearCtl_dmaClearPending_f);
            CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &field_val));
            clear_done = !field_val;
            cnt++;
        } while (!clear_done && (cnt < 0xffff));

        cmd = DRV_IOR(DmaPktTx3ClearCtl_t, DmaPktTx3ClearCtl_dmaClearPending_f);
        CTC_ERROR_RETURN(DRV_FIELD_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), &field_val));
        clear_done = !field_val;
        if (!clear_done)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " Tx timer channel cannot clear cnt:%u\n", cnt);
            return CTC_E_DMA;
        }

        field_val = 0;
#if (0 == SDK_WORK_PLATFORM)
        CTC_ERROR_RETURN(drv_ioctl_write_dma(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &field_val));
#else
        cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, DRV_CMD_PP_EN(cmd), &field_val));

#endif
    }

    return CTC_E_NONE;
}

int32
sys_at_dma_set_session_pkt(uint8 lchip, uint16 session_id, ctc_pkt_tx_t* p_pkt)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    ds_desc_at_t* p_sys_desc = NULL;
    ds_desc_at_t tmp_desc;
    uint64 phy_addr = 0;
    void*  p_mem_addr = NULL;
    uint32 desc_idx = 0;
    uint8 tx_enable = 0;
    uint8 tx_session_chan = 0;

    tx_session_chan = SYS_DMA_PKT_TX_TIMER_CHAN_ID;
    if (!p_usw_dma_master[lchip]->pkt_tx_timer_en || !p_usw_dma_master[lchip]->dma_chan_info[tx_session_chan].p_desc)
    {
        return CTC_E_INVALID_CONFIG;
    }

    tx_enable = !CTC_FLAG_ISSET(p_pkt->tx_info.flags, CTC_PKT_FLAG_SESSION_PENDING_EN);

    if (p_pkt->skb.len > p_usw_dma_master[lchip]->dma_chan_info[tx_session_chan].data_size-SYS_USW_PKT_HEADER_LEN)
    {
        return CTC_E_INVALID_PARAM;
    }

    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[tx_session_chan];
    if (tx_enable)
    {
        uint32* logic_addr = NULL;

        /*enable session tx*/
        if (p_usw_dma_master[lchip]->tx_session[session_id].state)
        {
            /*session already tx enable, update desc info*/
            desc_idx = p_usw_dma_master[lchip]->tx_session[session_id].desc_idx;
        }
        else
        {
            /*get free desc index, temp using session id as desc index*/
            desc_idx = session_id;
        }

        if (desc_idx >= p_dma_chan->desc_depth)
        {
            return CTC_E_NONE;
        }
        sys_tmm_dma_function_pause(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, 1);
        sal_task_sleep(10);

    /*free old data memory*/
        if (p_usw_dma_master[lchip]->tx_session[session_id].state)
        {
            logic_addr = SYS_DMA_PHY_TO_LOGIC(lchip, p_usw_dma_master[lchip]->tx_session[session_id].phy_addr);
            SYS_DMA_FREE(lchip, logic_addr);
        }
        p_mem_addr = SYS_DMA_ALLOC(lchip, (p_pkt->skb.len + SYS_USW_PKT_HEADER_LEN), 0);
        if (NULL == p_mem_addr)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            return CTC_E_NO_MEMORY;
        }
        sal_memset(p_mem_addr, 0, (p_pkt->skb.len + SYS_USW_PKT_HEADER_LEN));
        phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_mem_addr);
        sal_memcpy((uint8*)p_mem_addr, p_pkt->skb.head, p_pkt->skb.len + SYS_USW_PKT_HEADER_LEN);

        /* use tmp variable, reduce the times of accessing dma memory */
        p_sys_desc = (ds_desc_at_t*)&(p_dma_chan->p_desc[desc_idx].desc_info);
        sal_memset(&tmp_desc, 0, sizeof(ds_desc_at_t));
        tmp_desc.mem_addr = (phy_addr>>4);
        tmp_desc.cfg_size_20_11_0 = (p_pkt->skb.len + SYS_USW_PKT_HEADER_LEN)&0xFFF;
        tmp_desc.cfg_size_20_19_12 = ((p_pkt->skb.len + SYS_USW_PKT_HEADER_LEN)>>12)&0xFF;
        tmp_desc.u0.pkt_tx.sop = 1;
        tmp_desc.u0.pkt_tx.eop = 1;
        tmp_desc.u0.pkt_tx.dest_id = (p_pkt->lchip - lchip) & 0x3;
        sal_memcpy(p_sys_desc, &tmp_desc, 12);
        p_sys_desc->high_addr = p_usw_dma_master[lchip]->dma_high_addr;

        SYS_USW_DMA_CACHE_FLUSH(lchip, p_sys_desc, sizeof(ds_desc_at_t));
        sys_tmm_dma_function_pause(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, 0);

        if (!p_usw_dma_master[lchip]->tx_session[session_id].state)
        {
            p_usw_dma_master[lchip]->tx_session[session_id].desc_idx = desc_idx;
        }

        p_usw_dma_master[lchip]->tx_session[session_id].phy_addr = phy_addr;
    }
    else
    {
        uint32* logic_addr = NULL;

        if (!p_usw_dma_master[lchip]->tx_session[session_id].state)
        {
            return CTC_E_NONE;
        }

        sys_tmm_dma_function_pause(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, 1);
        sal_task_sleep(10);
        /*disable session tx*/
        desc_idx = p_usw_dma_master[lchip]->tx_session[session_id].desc_idx;
        p_sys_desc = (ds_desc_at_t*)&(p_dma_chan->p_desc[desc_idx].desc_info);

        p_sys_desc->mem_addr = 0;
        p_sys_desc->cfg_size_20_11_0 = 0;
        p_sys_desc->cfg_size_20_19_12 = 0;
        p_sys_desc->u0.pkt_tx.sop = 0;
        p_sys_desc->u0.pkt_tx.eop = 0;
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_sys_desc, sizeof(ds_desc_at_t));
        sys_tmm_dma_function_pause(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, 0);

        /*free data memory*/
        logic_addr = SYS_DMA_PHY_TO_LOGIC(lchip, p_usw_dma_master[lchip]->tx_session[session_id].phy_addr);
        SYS_DMA_FREE(lchip, logic_addr);
        p_usw_dma_master[lchip]->tx_session[session_id].phy_addr = 0;
    }

    p_usw_dma_master[lchip]->tx_session[session_id].state = tx_enable;

    return CTC_E_NONE;
}

int32
sys_at_dma_sync_hash_dump(uint8 lchip, void* p_param, uint16* p_entry_num, void* p_data )
{
    sys_dma_chan_t* p_dma_chan = NULL;
    sys_dma_desc_t* p_base_desc = NULL;
    ds_desc_at_t* p_desc = NULL;
    CpuInfoDump_m* p_cpu_info_start = NULL;
    CpuInfoDump_m* p_cpu_info_dump = NULL;
    uint32 cur_index = 0;
    uint8 process_cnt = 0;
    uint16 wait_cnt = 0;
    uint8 dma_done = 0;
    uint32 real_size = 0;
    uint32 entry_num = 0;
    int32 ret = 0;
    uint8 end_flag = 0;
    uint32 vld_num = 0;
    dma_dump_cb_parameter_t* p_pa = (dma_dump_cb_parameter_t*)p_param;

    p_cpu_info_start = (CpuInfoDump_m*)p_data;
    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_HASHKEY_CHAN_ID];
    p_base_desc = p_dma_chan->p_desc;
    cur_index = p_dma_chan->current_index;

    for(;; cur_index++)
    {
        dma_done = 0;
        wait_cnt = 0;
        if (cur_index >= p_dma_chan->desc_depth)
        {
            cur_index = 0;
        }

        p_desc = (ds_desc_at_t*)&p_base_desc[cur_index].desc_info;
        do
        {
            SYS_USW_DMA_CACHE_INVALID(lchip, p_desc, sizeof(ds_desc_at_t));
            if (p_desc->done)
            {
                dma_done = 1;
                break;
            }

            sal_task_sleep(1);
            wait_cnt++;

        }while(wait_cnt < 1000);

        if (dma_done == 0)
        {
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "Desc is not done!! \n");
#endif
            ret = CTC_E_DMA;
            /* dma not done,  need end current dma operate */
            goto end;
        }

        process_cnt++;

        /* get current desc real size */
        real_size = p_desc->real_size;

        p_cpu_info_dump = (CpuInfoDump_m*)p_dma_chan->p_desc_info[cur_index].data_addr;
        entry_num += real_size;

        /* process next desc */
        if (entry_num >= p_pa->entry_count)
        {
            real_size -= (entry_num-p_pa->entry_count);
            *p_entry_num = p_pa->entry_count;
            end_flag = 1;
        }

        sal_memcpy((uint8*)p_cpu_info_start, (uint8*)p_cpu_info_dump, TABLE_ENTRY_SIZE(lchip, CpuInfoDump_t)*real_size);
        p_cpu_info_start = (CpuInfoDump_m*)((uint8*)p_cpu_info_start + TABLE_ENTRY_SIZE(lchip, CpuInfoDump_t) * real_size);

        if (entry_num == p_pa->entry_count)
        {
            *p_entry_num = entry_num;
            end_flag = 1;
        }

        /* clear desc */
        p_desc->done = 0;
        #if (1 == SDK_WORK_PLATFORM)
        p_desc->real_size = 0;
        #endif
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(ds_desc_at_t));

        #if (0 == SDK_WORK_PLATFORM)
        {
            vld_num = 1;
            drv_ioctl_write_dma(lchip, p_dma_chan->channel_id,  (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &vld_num);
        }
        #else
        {
            uint32 cmd = 0;

            vld_num = p_dma_chan->desc_depth;
            cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
            DRV_IOCTL(lchip, p_dma_chan->channel_id, DRV_CMD_PP_EN(cmd), &vld_num);
        }
        #endif

        if (end_flag == 1)
        {
            break;
        }
    }

end:
    p_dma_chan->current_index = ((p_dma_chan->current_index + process_cnt) % (p_dma_chan->desc_depth));
    return ret;

}

int32
sys_at_dma_set_packet_timer_cfg(uint8 lchip, uint16 max_session, uint16 interval, uint16 pkt_len, uint8 is_destroy)
{
    uint32 desc_num = 0;
    sys_dma_desc_t* p_sys_desc_pad = NULL;
    ds_desc_at_t* p_desc = NULL;
    uint32 phy_addr = 0;
    uint32 cmd = 0;
    DmaStaticInfo_m static_info;
    DmaPktTx3TrigCtl_m tx_timer;
    uint32 timer_v[2] = {0};
    uint64 timer = 0;

    if (!is_destroy)
    {
        if (p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PKT_TX_TIMER_CHAN_ID].p_desc)
        {
            return CTC_E_IN_USE;
        }

        p_usw_dma_master[lchip]->tx_timer = interval;

        /*1. process packet tx channel*/
        p_sys_desc_pad = (sys_dma_desc_t*)SYS_DMA_ALLOC(lchip, (max_session) * sizeof(sys_dma_desc_t), 0);
        if (NULL == p_sys_desc_pad)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            return CTC_E_NO_MEMORY;
        }

        sal_memset(p_sys_desc_pad, 0, sizeof(sys_dma_desc_t)*max_session);

        /*3. cfg static infor for dma channel:MemBase, ring depth */
        phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_sys_desc_pad);
        cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        (DRV_IOCTL(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, cmd, &static_info));
        SetDmaStaticInfo(V, highBase_f, &static_info, p_usw_dma_master[lchip]->dma_high_addr);
        SetDmaStaticInfo(V, ringBase_f, &static_info, (phy_addr >> 4));
        cmd = DRV_IOW(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        (DRV_IOCTL(lchip, SYS_DMA_PKT_TX_TIMER_CHAN_ID, cmd, &static_info));

        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PKT_TX_TIMER_CHAN_ID].p_desc = p_sys_desc_pad;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PKT_TX_TIMER_CHAN_ID].mem_base = 0;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PKT_TX_TIMER_CHAN_ID].channel_id = SYS_DMA_PKT_TX_TIMER_CHAN_ID;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PKT_TX_TIMER_CHAN_ID].desc_num = max_session;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PKT_TX_TIMER_CHAN_ID].desc_depth = max_session;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PKT_TX_TIMER_CHAN_ID].data_size = pkt_len+SYS_USW_PKT_HEADER_LEN;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PKT_TX_TIMER_CHAN_ID].chan_en = 1;

        /*tm cfg packet tx timer*/
        timer = (uint64)interval*1000000/DOWN_FRE_RATE/max_session; /*1ms*/
        timer_v[0] = timer&0xFFFFFFFF;
        timer_v[1] = (timer >> 32) & 0xFFFFFFFF;
        cmd = DRV_IOR(DmaPktTx3TrigCtl_t, DRV_ENTRY_FLAG);
        (DRV_IOCTL(lchip, 0, cmd, &tx_timer));
        SetDmaPktTx3TrigCtl(A, cfgPktTx3TrigNs_f, &tx_timer, timer_v);
        SetDmaPktTx3TrigCtl(V, cfgPktTx3TrigEn_f, &tx_timer, 1);
        cmd = DRV_IOW(DmaPktTx3TrigCtl_t, DRV_ENTRY_FLAG);
        (DRV_IOCTL(lchip, 0, cmd, &tx_timer));
    }
    else
    {
        uint64 phy_addr = 0;
        uint32* logic_addr = NULL;
        uint8  chan_id  = SYS_DMA_PKT_TX_TIMER_CHAN_ID;

        if (!p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PKT_TX_TIMER_CHAN_ID].p_desc)
        {
            return CTC_E_NONE;
        }

        p_sys_desc_pad = p_usw_dma_master[lchip]->dma_chan_info[chan_id ].p_desc;
        /*clear data*/
        for (desc_num = 0; desc_num < p_usw_dma_master[lchip]->dma_chan_info[chan_id].desc_depth; desc_num++)
        {
            p_desc = (ds_desc_at_t*)&(p_sys_desc_pad[desc_num].desc_info);
            SYS_USW_DMA_CACHE_INVALID(lchip, p_desc, sizeof(ds_desc_at_t));
            COMBINE_64BITS_DATA(p_usw_dma_master[lchip]->dma_high_addr,             \
                            (uint32)(p_desc->mem_addr<<4), phy_addr);
            logic_addr = SYS_DMA_PHY_TO_LOGIC(lchip, phy_addr);
            if (logic_addr)
            {
                /*table wr desc data using only one data memory, just need free once*/
                SYS_DMA_FREE(lchip, logic_addr);
            }
        }

        SYS_DMA_FREE(lchip, p_sys_desc_pad);
        p_usw_dma_master[lchip]->dma_chan_info[chan_id].p_desc = NULL;
        p_usw_dma_master[lchip]->dma_chan_info[chan_id].mem_base = 0;
        p_usw_dma_master[lchip]->dma_chan_info[chan_id].channel_id = 0;
        p_usw_dma_master[lchip]->dma_chan_info[chan_id].desc_num = 0;
        p_usw_dma_master[lchip]->dma_chan_info[chan_id].desc_depth = 0;
        p_usw_dma_master[lchip]->dma_chan_info[chan_id].data_size = 0;
        p_usw_dma_master[lchip]->tx_timer = 0;
        p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_PKT_TX_TIMER_CHAN_ID].chan_en = 0;

        sal_memset(p_usw_dma_master[lchip]->tx_session, 0, sizeof(sys_dma_timer_session_t)*SYS_PKT_MAX_TX_SESSION);
    }

    return CTC_E_NONE;
}

int32
sys_at_dma_set_monitor_buf_sync_time(uint8 lchip, uint32 interval)
{
    uint32 timer_v[2];
    uint32 cmd = 0;
    DmaRegRd3TrigCfg_m trigger3_timer;
    DmaRegRd3Ctl_m reg_rd_ctl;
    uint64 timer = (uint64)interval *1000 * 1000 / DOWN_FRE_RATE;

    if (timer >> 48)
    {
        return CTC_E_INVALID_PARAM;
    }

    SYS_DMA_INIT_CHECK(lchip);
    cmd = DRV_IOR(DmaRegRd3Ctl_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    SetDmaRegRd3Ctl(V, cfgRegRdTrigEn_f, &reg_rd_ctl, interval ? 1 : 0);
    cmd = DRV_IOW(DmaRegRd3Ctl_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));

    /* cfg dma sync buffer cnt trigger function, default: 100 ms */
    timer_v[0] = timer & CTC_MAX_UINT32_VALUE;
    timer_v[1] = (timer >> 32) & CTC_MAX_UINT32_VALUE;
    cmd = DRV_IOR(DmaRegRd3TrigCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger3_timer));
    SetDmaRegRd3TrigCfg(A, cfgRegRd3TrigNs_f, &trigger3_timer, timer_v);
    cmd = DRV_IOW(DmaRegRd3TrigCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger3_timer));

    return CTC_E_NONE;
}

int32
sys_at_dma_get_monitor_buf_sync_time(uint8 lchip, uint32* interval)
{
    uint32 timer_v[2] = {0};
    uint64 timer;
    uint32 cmd = 0;
    DmaRegRd3TrigCfg_m trigger3_timer;

    SYS_DMA_INIT_CHECK(lchip);
    cmd = DRV_IOR(DmaRegRd3TrigCfg_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &trigger3_timer));
    GetDmaRegRd3TrigCfg(A, cfgRegRd3TrigNs_f, &trigger3_timer, timer_v);
    timer = timer_v[1];
    timer <<= 32;
    timer |= timer_v[0];
    if (interval)
    {
        *interval = timer * DOWN_FRE_RATE / (1000*1000);
    }
    return CTC_E_NONE;
}

/*only at support buffer scan*/
int32
sys_at_dma_init_reg_buffer_scan(uint8 lchip, void* p_tmp_chan_info)
{
    int32 ret = 0;
    uint16 tbl_idx = 0;
    uint8 pp_num = SYS_PP_NUM(lchip);
    uint8 core_num = drv_vchip_get_core_num(lchip);
    uint16 desc_start = 0;
    uint16 desc_end = 0;
    uint16 desc_index = 0;
    uint32 cfg_addr = 0;
    void*  p_mem_addr = NULL;
    ds_desc_at_t* p_desc;
    uint32 data_size = 0;
    uint16 bmp = 0;
    uint8 pp_id = 0;
    uint8 dp_id = 0;
    uint8 struct_sz = 0;
    uint64 phy_addr = 0;
    sys_dma_desc_t* p_sys_desc_pad;
    sys_dma_desc_info_t* p_sys_desc_info;
    uint32 tbl_id_arr[] = {
        DsErmPortCntMc_t,   /*CORE*/
        DsErmPortScCntMc_t, /*CORE*/
        DsErmQueueCntMc_t,   /*CORE*/
        DsErmPortTcCntUc_t, /*DP*/
        DsIrmPortCntUc_t,   /*DP*/
        DsIrmPortCntMc_t,   /*DP*/
        DsIrmPortTcCntUc_t, /*DP*/
        DsIrmPortTcCntMc_t, /*DP*/
        DsIrmPortScCntUc_t, /*DP*/
        DsIrmPortScCntMc_t, /*DP*/
    };
    sys_dma_chan_t* p_chan_info = (sys_dma_chan_t*)p_tmp_chan_info;
    DmaRegRd3Ctl_m reg_rd_ctl;
    uint32 cmd = 0;

    p_chan_info->desc_num = 0;
    p_chan_info->data_size = 0;

    ret = sal_mutex_create(&(p_chan_info->p_mutex));
    if (ret || !(p_chan_info->p_mutex))
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No resource in ASIC \n");
        return CTC_E_NO_RESOURCE;
    }

    for (tbl_idx = 0; tbl_idx < sizeof(tbl_id_arr) / sizeof(uint32); tbl_idx++)
    {
        if (MEM_TYPE_PER_DP == TABLE_ENTRY_TYPE(lchip, tbl_id_arr[tbl_idx]))
        {
            p_chan_info->desc_num += (pp_num * 2);
        }
        else if(MEM_TYPE_PEER_DP == TABLE_ENTRY_TYPE(lchip, tbl_id_arr[tbl_idx]))
        {
            p_chan_info->desc_num += SYS_VCHIP_DUAL_CORE_MODE(lchip) ? (pp_num * 2 * 2) : (pp_num * 2);
        }
        else if (MEM_TYPE_PER_PP == TABLE_ENTRY_TYPE(lchip, tbl_id_arr[tbl_idx]))
        {
            p_chan_info->desc_num += pp_num;
        }
        else if(MEM_TYPE_PEER_PP == TABLE_ENTRY_TYPE(lchip, tbl_id_arr[tbl_idx]))
        {
            p_chan_info->desc_num += SYS_VCHIP_DUAL_CORE_MODE(lchip) ? (pp_num * 2) : pp_num;
        }
        else /*core*/
        {
            p_chan_info->desc_num += core_num;
        }
    }
    /* cfg desc num */
    p_sys_desc_pad = (sys_dma_desc_t *)SYS_DMA_ALLOC(lchip, (p_chan_info->desc_num) * sizeof(sys_dma_desc_t), 0);
    if (NULL == p_sys_desc_pad)
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
        return CTC_E_NO_MEMORY;
    }

    p_sys_desc_info = (sys_dma_desc_info_t *)mem_malloc(MEM_DMA_MODULE, (p_chan_info->desc_num) * sizeof(sys_dma_desc_info_t));
    if (!p_sys_desc_info)
    {
        SYS_DMA_FREE(lchip, p_sys_desc_pad);
        return CTC_E_NO_MEMORY;
    }
    sal_memset(p_sys_desc_info, 0, sizeof(sys_dma_desc_info_t) * p_chan_info->desc_num);
    sal_memset(p_sys_desc_pad, 0, sizeof(sys_dma_desc_t) * p_chan_info->desc_num);
    p_chan_info->p_desc_info = p_sys_desc_info;
    p_chan_info->p_desc =  p_sys_desc_pad;
    p_chan_info->desc_depth = p_chan_info->desc_num;

    for (tbl_idx = 0; tbl_idx < sizeof(tbl_id_arr) / sizeof(uint32); tbl_idx++)
    {
        desc_start = desc_end;
        data_size = DRV_TABLE_ENTRY_SIZE(lchip, tbl_id_arr[tbl_idx]) * DRV_TABLE_MAX_INDEX(lchip, tbl_id_arr[tbl_idx]);
        cfg_addr = DRV_TABLE_DATA_BASE(lchip, tbl_id_arr[tbl_idx], 0);
        struct_sz = TABLE_ENTRY_OFFSET(lchip, tbl_id_arr[tbl_idx])>>2;

        if (MEM_TYPE_PER_DP == TABLE_ENTRY_TYPE(lchip, tbl_id_arr[tbl_idx]))
        {
            desc_end += (pp_num * 2);
        }
        else if (MEM_TYPE_PEER_DP == TABLE_ENTRY_TYPE(lchip, tbl_id_arr[tbl_idx]))
        {
            data_size /= 2;
            if (SYS_VCHIP_DUAL_CORE_MODE(lchip))
            {
                desc_end += (pp_num * 2 * 2);
            }
            else
            {
                desc_end += (pp_num * 2);
            }
        }
        else if (MEM_TYPE_PER_PP == TABLE_ENTRY_TYPE(lchip, tbl_id_arr[tbl_idx]))
        {
            desc_end += pp_num;
        }
        else if (MEM_TYPE_PEER_PP == TABLE_ENTRY_TYPE(lchip, tbl_id_arr[tbl_idx]))
        {
            data_size /= 2;
            if (SYS_VCHIP_DUAL_CORE_MODE(lchip))
            {
                desc_end += pp_num * 2;
            }
            else
            {
                desc_end += pp_num;
            }
        }
        else /*core*/
        {
            desc_end += core_num;
        }
        /*use burst to read*/
        cfg_addr |= 0x1;
        p_chan_info->data_size += data_size;

        for (desc_index = desc_start; desc_index < desc_end; desc_index++)
        {
            if (MEM_TYPE_PER_DP == TABLE_ENTRY_TYPE(lchip, tbl_id_arr[tbl_idx]))
            {
                pp_id = (desc_index - desc_start) >> 1;
                dp_id = (desc_index - desc_start) & 0x1;
                /*DRV_OPER_BMP_CONTINUE_PP(pp_id, continue);*/
                bmp = (1 << (6 + (pp_id / PP_NUM_PER_CORE))) | (1 << (2 + pp_id)) | (1 << (dp_id));
            }
            else if (MEM_TYPE_PEER_DP == TABLE_ENTRY_TYPE(lchip, tbl_id_arr[tbl_idx]))
            {
                if (SYS_VCHIP_DUAL_CORE_MODE(lchip))
                {
                    pp_id = ((desc_index - desc_start) >> 1) % drv_vchip_get_pp_num(lchip);
                }
                else
                {
                    pp_id = ((desc_index - desc_start) >> 1);
                }
                dp_id = (desc_index - desc_start) & 0x1;
                /*DRV_OPER_BMP_CONTINUE_PP(pp_id, continue);*/
                bmp = (1 << (6 + (pp_id / PP_NUM_PER_CORE))) | (1 << (2 + pp_id)) | (1 << (dp_id));
            }
            else if (MEM_TYPE_PER_PP == TABLE_ENTRY_TYPE(lchip, tbl_id_arr[tbl_idx]))
            {
                dp_id = 0;
                pp_id = (desc_index - desc_start);
                //DRV_OPER_BMP_CONTINUE_PP(pp_id, continue);
                bmp = (1 << (6 + (pp_id / PP_NUM_PER_CORE))) | (1 << (2 + pp_id));
            }
            else if (MEM_TYPE_PEER_PP == TABLE_ENTRY_TYPE(lchip, tbl_id_arr[tbl_idx]))
            {
                dp_id = 0;
                if (SYS_VCHIP_DUAL_CORE_MODE(lchip))
                {
                    pp_id = (desc_index - desc_start) % drv_vchip_get_pp_num(lchip);
                }
                else
                {
                    pp_id = (desc_index - desc_start);
                }
                //DRV_OPER_BMP_CONTINUE_PP(pp_id, continue);
                bmp = (1 << (6 + (pp_id / PP_NUM_PER_CORE))) | (1 << (2 + pp_id));
            }
            else /*core*/
            {
                pp_id = 0;
                dp_id = 0;
                bmp = 1 << (6 + (pp_id / PP_NUM_PER_CORE));
            }
            p_desc = (ds_desc_at_t*)&(p_sys_desc_pad[desc_index].desc_info);
            p_mem_addr = SYS_DMA_ALLOC(lchip, data_size, 0);
            if (NULL == p_mem_addr)
            {
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
                mem_free(p_sys_desc_info);
    			return CTC_E_NO_MEMORY;
            }
            sal_memset(p_mem_addr, 0, data_size);
            p_sys_desc_info[desc_index].data_addr = p_mem_addr;
            phy_addr = (uint32)SYS_DMA_LOGIC_TO_PHY(lchip, p_mem_addr);
            p_desc->mem_addr = (phy_addr >> 4);
            p_desc->cfg_size_20_11_0 = data_size&0xFFF;
            p_desc->cfg_size_20_19_12 = (data_size>>12)&0xFF;
            p_desc->tsAddr = cfg_addr;
            p_desc->data_struct  = struct_sz;
            p_desc->high_addr  = p_usw_dma_master[lchip]->dma_high_addr;
            p_desc->u0.reg.slice_bmp  = (bmp >> 2) & 0xF;
            p_desc->ts_shift_or_dp_core_bmp  = ((((bmp >> 6) & 0x3) <<2) | (bmp & 0x3));
            p_desc->pause  =((desc_index == 0) ? 1 : 0);
            p_sys_desc_info[desc_index].value0 = ((pp_id & 0x7) << 1) | (dp_id & 0x1);
            p_sys_desc_info[desc_index].value1 = tbl_id_arr[tbl_idx];
        }
    }

    cmd = DRV_IOR(DmaRegRd3Ctl_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));
    SetDmaRegRd2Ctl(V, cfgRegRdDescIntrEn_f, &reg_rd_ctl, 0);
    SetDmaRegRd2Ctl(V, cfgRegRdDmaIntrEn_f, &reg_rd_ctl, 1);
    SetDmaRegRd2Ctl(V, cfgRegRdIntrCnt_f, &reg_rd_ctl, p_chan_info->desc_depth);
    SetDmaRegRd2Ctl(V, cfgDescFetchMode_f, &reg_rd_ctl, 0);
    cmd = DRV_IOW(DmaRegRd3Ctl_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &reg_rd_ctl));

    p_chan_info->cb_type = SYS_DMA_CB_TYPE_BUFFER_MON_SCAN;
    sal_memcpy(&p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id], p_chan_info, sizeof(sys_dma_chan_t));
    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_desc = p_sys_desc_pad;
    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].mem_base = 0;
    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_mutex = p_chan_info->p_mutex;
    p_usw_dma_master[lchip]->dma_chan_info[p_chan_info->channel_id].p_desc_info = (sys_dma_desc_info_t *)p_sys_desc_info;

    return CTC_E_NONE;
}

int32
sys_at_dma_batch_operation(uint8 lchip, void* p_batch)
{
    int32 ret = CTC_E_NONE;
    uint32* p_tbl_buff = NULL;
    uint32 entry_num = 0;
    uint16 idx = 0;
    uint32 vld_num = 1;
    sys_dma_chan_t* p_dma_chan = NULL;
    uint64 phy_addr = 0;
    sal_mutex_t* p_mutex = NULL;
    uint16 cmd_len = 0;
    sys_dma_batch_cmd_t* p_cmd = NULL;
    uint8 op_code = 0;
    uint16 req_info = 0;
    uint32 offset = 0;
    ds_desc_at_t* p_desc = NULL;
    sys_dma_batch_op_t* p_ba = (sys_dma_batch_op_t*)p_batch;
    uint16 word_idx = 0;
#if (1 == SDK_WORK_PLATFORM)
    uint32 cmd = 0;
#endif

    lchip = SYS_PP_BASE(lchip);

    SYS_DMA_INIT_CHECK(lchip);

    CTC_PTR_VALID_CHECK(p_ba);
    if (!p_ba->addr || !p_ba->entry_sz || !p_ba->entry_offset)
    {
        return CTC_E_INVALID_PARAM;
    }

    if (p_ba->op)
    {
        CTC_PTR_VALID_CHECK(p_ba->write_data);
        if (2 == p_ba->op)
        {
            CTC_PTR_VALID_CHECK(p_ba->write_mask);
        }
    }
    if (NULL == g_dal_op.dma_alloc)
    {
        return CTC_E_DRV_FAIL;
    }
    entry_num = p_ba->entry_num;
    if (p_ba->entry_num <= 1)
    {
        entry_num = 1;
    }
    cmd_len = (2 == p_ba->op)
                ? (p_ba->entry_sz * 2 + DMA_BATCH_HDR_CMD_LEN)
                : (p_ba->entry_sz + DMA_BATCH_HDR_CMD_LEN);

    if (entry_num * cmd_len >= 0x100000)
    {
        return CTC_E_EXCEED_MAX_SIZE;
    }
    p_dma_chan = (sys_dma_chan_t*)&(p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_BATCH_CHAN_ID]);
    if (p_dma_chan->chan_en == 0)
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "Dma Batch Function is not enabled!!!\n");
        return CTC_E_NOT_SUPPORT;
    }
    /*alloc dma memory*/
    p_tbl_buff = SYS_DMA_ALLOC(lchip, entry_num * cmd_len, 0);
    if (NULL == p_tbl_buff)
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
        return CTC_E_NO_MEMORY;
    }

    if (p_ba->op)
    {
        /*write operation*/
        op_code = (2 == p_ba->op)
                  ? DMA_BATCH_OP_CODE_MASK_WRITE
                  : p_ba->sigle ? DMA_BATCH_OP_CODE_SIGLE_WRITE : DMA_BATCH_OP_CODE_BURST_WRITE;
    }
    else
    {
        /*read operation*/
        op_code = p_ba->sigle ? DMA_BATCH_OP_CODE_SIGLE_READ : DMA_BATCH_OP_CODE_BURST_READ;
    }
    if (!p_ba->core_bmp && !p_ba->pp_bmp && !p_ba->dp_bmp)
    {
        if (p_ba->op)
        {
            /*write all*/
            req_info = DMA_BATCH_ENCODE_CMD_REQ_INFO(0x3, 0xf, 0x3);
        }
        else
        {
            /*read one*/
            req_info = DMA_BATCH_ENCODE_CMD_REQ_INFO(0x1, 0x1, 0x1);
        }
    }
    else
    {
        req_info = DMA_BATCH_ENCODE_CMD_REQ_INFO(p_ba->core_bmp, p_ba->pp_bmp, p_ba->dp_bmp);
    }

    /*process cmd*/
    for (idx = 0; idx < entry_num; idx++)
    {
        p_cmd = (sys_dma_batch_cmd_t*)((uint8*)p_tbl_buff + offset);
        p_cmd->OpCode = op_code;
        p_cmd->Status = 0;
        p_cmd->ReqInfo = req_info;
        p_cmd->DataLen = (cmd_len - sizeof(sys_dma_batch_cmd_t)) / 4 - 1;
        p_cmd->Address = p_ba->addr + idx * p_ba->entry_offset;
        if (p_ba->op == 1)
        {
            sal_memcpy(&p_cmd->data, p_ba->write_data, p_ba->entry_sz);
        }
        else if (p_ba->op == 2)
        {
            /* for mask-write, data and mask is layout as: data + mask + data + mask + data ... */
            for (word_idx = 0; word_idx < (p_ba->entry_sz >> 2); word_idx++)
            {
                ((uint32*)(p_cmd->data))[word_idx<<1] = ((uint32*)(p_ba->write_data))[word_idx];
                ((uint32*)(p_cmd->data))[(word_idx<<1) + 1] = ((uint32*)(p_ba->write_mask))[word_idx];
            }
        }
        offset += cmd_len;
    }
    p_cmd = (sys_dma_batch_cmd_t*)(p_tbl_buff);
    SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "Batch CMD Header: OpCode:%d, ReqInfo:0x%X, DataLen:%d, Address:0x%X\n",
                                    p_cmd->OpCode, p_cmd->ReqInfo, p_cmd->DataLen, p_cmd->Address);
    p_mutex = p_dma_chan->p_mutex;

    DMA_LOCK(p_mutex);
    phy_addr = SYS_DMA_LOGIC_TO_PHY(lchip, (void*)p_tbl_buff);
    p_desc = (ds_desc_at_t*)&(p_dma_chan->p_desc[0].desc_info);
    sal_memset(p_desc, 0, sizeof(ds_desc_at_t));
    p_desc->mem_addr = ((phy_addr & 0xFFFFFFFF) >> 4);
    p_desc->high_addr = p_usw_dma_master[lchip]->dma_high_addr;
    p_desc->cfg_size_20_11_0 = (entry_num * cmd_len)&0xFFF;
    p_desc->cfg_size_20_19_12 = ((entry_num * cmd_len)>>12)&0xFF;

    SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(ds_desc_at_t));

#if (0 == SDK_WORK_PLATFORM)
    CTC_ERROR_GOTO(drv_ioctl_write_dma(lchip, SYS_DMA_BATCH_CHAN_ID, (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &vld_num), ret, error_proc);
#else
    cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
    CTC_ERROR_GOTO(DRV_IOCTL(lchip, SYS_DMA_BATCH_CHAN_ID, DRV_CMD_PP_EN(cmd), &vld_num), ret, error_proc);
#endif

    CTC_ERROR_GOTO(_sys_at_dma_wait_desc_finish(lchip,p_desc,p_dma_chan), ret, error_proc);

    SYS_USW_DMA_CACHE_INVALID_WITH_PHY_ADDR(lchip, phy_addr, entry_num*cmd_len);

    p_ba->error = p_desc->error;
    p_ba->status[0] =  p_desc->u2.batch.batch_err_bitmap0;
    p_ba->status[1] =  p_desc->u2.batch.batch_err_bitmap1;        
    if (p_ba->error || p_ba->status[0] || p_ba->status[1])
    {
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "Error Happened, status: 0x%8X[0] 0x%8X[1] \n",
                                                               p_ba->status[0], p_ba->status[1]);
    }
    else if (!p_ba->op)
    {
        uint32 word_idx = 0;
        offset = 0;
        /*dump*/
        for (idx = 0; idx < entry_num; idx++)
        {
            p_cmd = (sys_dma_batch_cmd_t *)((uint8 *)p_tbl_buff + offset);
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "index: 0x%x, status: %x, addrss:0x%8X, length: %dB\n", idx, p_cmd->Status, p_cmd->Address, (p_cmd->DataLen + 1) * 4);
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "------------------------------------------------------------------\n");
            for (word_idx = 0; word_idx < p_cmd->DataLen + 1; word_idx++)
            {
                SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "0x%.8X: 0x%.8X\n", word_idx * 4 + p_cmd->Address, *((uint32 *)p_cmd->data + word_idx));
            }
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "\n");
            offset += cmd_len;
        }
    }

error_proc:
    DMA_UNLOCK(p_mutex);

    if (g_dal_op.dma_free)
    {
        SYS_DMA_FREE(lchip, p_tbl_buff);
    }
    return ret;
}

int32
sys_at_dma_info_func(uint8 lchip, uint8 chan)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    sys_dma_desc_t* p_base_desc = NULL;
    ds_desc_at_t* p_desc = NULL;
    uint32 cur_index = 0;
    uint32 process_cnt = 0;
    uint32 real_size = 0;
    sys_dma_info_t dma_info;
    uint64 tm_ns = 0;
    uint32 mask[SYS_DMA_INTR_VEC] = {0};
    uint32 vld_num = 0;
    uint32 cmd = 0;


#ifndef CTC_HOT_PLUG_DIS
    /* init check */
    SYS_DMA_INIT_CHECK(lchip);
#endif
    sal_memset(&dma_info, 0, sizeof(sys_dma_info_t));

    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[chan];

    p_base_desc = p_dma_chan->p_desc;
    cur_index = p_dma_chan->current_index;
    SYS_USW_DMA_CACHE_INVALID(lchip, p_dma_chan->p_desc, sizeof(sys_dma_desc_t)*p_dma_chan->desc_depth);
    while(1)
    {
#ifndef CTC_HOT_PLUG_DIS
        SYS_DMA_INIT_CHECK(lchip);
#endif
        if (cur_index >= p_usw_dma_master[lchip]->dma_chan_info[chan].desc_depth)
        {
            cur_index = 0;
        }

        p_desc = (ds_desc_at_t*)&(p_base_desc[cur_index].desc_info);

        /* get realsize from desc */
        real_size = p_desc->real_size;

        if (!p_desc->done)
        {
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "No desc is not done, processed %d desc\n", process_cnt);
#endif
            break;
        }

        if (real_size > p_dma_chan->cfg_size)
        {
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_DUMP, "Info realsize error real:%d cfg:%d\n", real_size, p_dma_chan->cfg_size);
#endif
            return CTC_E_INVALID_PARAM;
        }
        process_cnt++;
        dma_info.p_data = p_dma_chan->p_desc_info[cur_index].data_addr;
        dma_info.entry_num = real_size;
        dma_info.base_lchip = lchip;
        if(SYS_DMA_IPFIX_CHAN_ID == chan)
        {
            tm_ns = p_desc->timestamp0;
            tm_ns = (tm_ns << 32);
            tm_ns |= p_desc->u2.other.timestamp1;

            dma_info.seconds = tm_ns/1000000000;
            dma_info.nanoseconds = tm_ns - dma_info.seconds*1000000000;
        }

        SYS_DMA_CB_IN_CNT_ADD(lchip, chan);
        if (SYS_DMA_CB_TYPE_IPFIX == p_dma_chan->cb_type)/*For AT, ipfix and dot1ae is pp module*/
        {
             p_usw_dma_master[lchip]->dma_cb[p_dma_chan->cb_type](lchip, (void*)&dma_info);
        }
        else
        {
            p_usw_dma_master[drv_vchip_get_pp_base(lchip)]->dma_cb[p_dma_chan->cb_type](drv_vchip_get_core_pp_base(lchip), (void*)&dma_info);
        }
        SYS_DMA_CB_OUT_CNT_ADD(lchip, chan);

        /* clear descriptor */
        p_desc->done = 0;
#if (1 == SDK_WORK_PLATFORM)
        p_desc->real_size = 0;
#endif
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(ds_desc_at_t));

#if (0 == SDK_WORK_PLATFORM)
        vld_num = 1;
        drv_ioctl_write_dma(lchip, p_dma_chan->channel_id,  (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &vld_num);
#else
        vld_num = p_dma_chan->desc_depth;
        cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, p_dma_chan->channel_id, DRV_CMD_PP_EN(cmd), &vld_num);
#endif
        cur_index++;
        /* one interrupt process 1000 entry, for other channel using same sync channel to be processed in time */
        if (process_cnt >= 1000)
        {
            break;
        }
    }
#ifndef CTC_HOT_PLUG_DIS
    SYS_DMA_INIT_CHECK(lchip);
#endif
    p_dma_chan->current_index = (cur_index>=p_dma_chan->desc_depth)?(cur_index%p_dma_chan->desc_depth):cur_index;

    /* release mask channel isr */
    SYS_USW_DMA_INTR_ENABLE(lchip,  DmaCtlIntrFunc0_t, mask, chan);

    /*inval dma before read*/
    SYS_USW_DMA_CACHE_INVALID(lchip, &(p_dma_chan->p_desc[p_dma_chan->current_index].desc_info), sizeof(ds_desc_at_t));
    p_desc = (ds_desc_at_t*)&(p_dma_chan->p_desc[p_dma_chan->current_index].desc_info);
    if (p_desc->done)
    {
#ifdef DMA_DBG_ON
        SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "Dma Trigger interrupt chan:%d \n", chan);
#endif
        sal_memset(mask, 0, SYS_DMA_INTR_VEC*sizeof(uint32));
        SetDmaCtlIntrFunc0(V, dmaSupIntrVec0_f, mask, (1<<chan));
        cmd = DRV_IOW(DmaCtlIntrFunc0_t, DRV_ENTRY_FLAG);
        DRV_IOCTL(lchip, 0, DRV_CMD_PP_EN(cmd), mask);
    }

    return CTC_E_NONE;
}


/**
@brief DMA stats function process
*/
int32
sys_at_dma_stats_func(uint8 lchip, uint8 chan)
{
    sys_dma_chan_t* p_dma_chan = NULL;
    sys_dma_desc_t* p_base_desc = NULL;
    sys_dma_desc_info_t* p_desc_info = NULL;
    ds_desc_at_t* p_desc = NULL;
    ds_desc_at_t tmp_desc;
    uint32 cur_index = 0;
    uint32 process_cnt = 0;
    uint32 cfg_size = 0;
    sys_dma_reg_t dma_reg;
    sys_dma_npm_data_t ext_data;
    sys_dma_sync_tbl_t sync_tbl;
    uint64 tm_ns = 0;

#ifndef CTC_HOT_PLUG_DIS
    /* init check */
    SYS_DMA_INIT_CHECK(lchip);
#endif
    p_dma_chan = &p_usw_dma_master[lchip]->dma_chan_info[chan];
    DMA_LOCK(p_dma_chan->p_mutex);
    p_base_desc = p_dma_chan->p_desc;
    p_desc_info = p_dma_chan->p_desc_info;

    /* gg dma stats process desc from index 0 to max_desc*/
    SYS_USW_DMA_CACHE_INVALID(lchip, p_dma_chan->p_desc, sizeof(sys_dma_desc_t)*p_dma_chan->desc_depth);
    for (cur_index = p_dma_chan->current_index; cur_index < p_dma_chan->desc_depth; cur_index++)
    {
        p_desc = (ds_desc_at_t*)&(p_base_desc[cur_index].desc_info);
        sal_memcpy(&tmp_desc, p_desc, 12);/* copy first three words of desc, to get 'done', 'real_size', 'cfg_size', 'memAddr' */
        cfg_size = tmp_desc.cfg_size_20_19_12;
        cfg_size <<= 12;
        cfg_size |= tmp_desc.cfg_size_20_11_0;
        if (0 == tmp_desc.done)
        {
#ifdef DMA_DBG_ON
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_INFO, "No desc is not done, processed %d desc\n", process_cnt);
#endif
            break;
        }

        process_cnt++;
        if (SYS_DMA_DESC_NOT_PROC_SIZE != cfg_size)
        {
            /* get current desc data memory logic address in db */
            dma_reg.p_data = p_dma_chan->p_desc_info[cur_index].data_addr;

            if(chan == SYS_DMA_FLOW_STATS_CHAN_ID)
            {
               dma_reg.p_ext = &(p_desc_info[cur_index].value0); /*uint16*/
            }
            else if (chan == SYS_DMA_REG_BUF_SCAN_CHAN_ID)
            {
                sync_tbl.pp_id = (p_desc_info[cur_index].value0 >> 1) & 0x7;
                sync_tbl.dp_id = p_desc_info[cur_index].value0 & 0x1;
                sync_tbl.first_sync = (cur_index == 0);
                sync_tbl.ts[0] = p_desc->timestamp0;
                sync_tbl.ts[1] = p_desc->u2.other.timestamp1;
                sync_tbl.tbl_id = p_desc_info[cur_index].value1;
                dma_reg.p_ext = &sync_tbl;
            }
            else if(SYS_DMA_TBL_RD2_CHAN_ID == chan)
            {
                tm_ns = p_desc->u2.other.timestamp1;
                tm_ns = (tm_ns << 32);
                tm_ns |= p_desc->timestamp0;
                ext_data.time_stamp = tm_ns;
                ext_data.type = cur_index;
                dma_reg.p_ext = &ext_data;
            }
            else
            {
                dma_reg.p_ext = &cur_index; /*uint32*/
            }
            SYS_DMA_CB_IN_CNT_ADD(lchip, chan);
            p_usw_dma_master[lchip]->dma_cb[ p_dma_chan->cb_type](lchip, (void*)&dma_reg);
            SYS_DMA_CB_OUT_CNT_ADD(lchip, chan);
        }
        p_desc->done = 0;
        #if(1 == SDK_WORK_PLATFORM)
        p_desc->real_size = 0;
        #endif
        SYS_USW_DMA_CACHE_FLUSH(lchip, p_desc, sizeof(ds_desc_at_t));
    }

    if(process_cnt)
    {
         SYS_USW_DMA_CLEAR_MULTI_DESC_WITH_IO(lchip, p_dma_chan, process_cnt);
         p_dma_chan->current_index = ((p_dma_chan->current_index + process_cnt) % (p_dma_chan->desc_depth));
    }

    if(p_usw_dma_master[lchip]->dma_stats_en)
    {
        sys_tsingma_dma_sync_pkt_rx_stats(lchip);
        sys_tsingma_dma_sync_pkt_tx_stats(lchip);
    }
    DMA_UNLOCK(p_dma_chan->p_mutex);

    return CTC_E_NONE;
}

int32
sys_at_dma_read_table(uint8 lchip, void* p_tbl_cfg)
{
    int32 ret = CTC_E_NONE;
    sys_dma_tbl_rw_t* tbl_cfg = (sys_dma_tbl_rw_t*)p_tbl_cfg;
    ds_desc_at_t* p_tbl_desc = NULL;
    ds_desc_at_t tmp_desc;
    uint32* p_tbl_buff = NULL;
    uint32 words_in_chip = 0;
    uint32 words_num = 0;
    uint32 entry_num = 0;
    uint32 tbl_buffer_len = 0;
    uint32 tbl_addr = 0;
    uint16 idx = 0;
    uint16 bmp = 0;
    uint32* p_src = NULL;
    uint32* p_dst = NULL;
    uint32 vld_num = 1;
    uint32 tbl_buffer_max_size = 0xFFFFF;
    sys_dma_chan_t* p_dma_chan = NULL;
    uint64 phy_addr = 0;
    sal_mutex_t* p_mutex = NULL;
#if (1 == SDK_WORK_PLATFORM)
    uint32 cmd = 0;
#endif

    lchip = SYS_PP_BASE(lchip);

    SYS_DMA_INIT_CHECK(lchip);

    /* mask bit(1:0) */
    tbl_addr = tbl_cfg->tbl_addr;

    words_num = (tbl_cfg->entry_len>>2);
    entry_num = tbl_cfg->entry_num;
    words_in_chip = tbl_cfg->entry_offset>>2;
    tbl_buffer_len = entry_num * tbl_cfg->entry_offset;

    /* check data size should smaller than desc's cfg MAX size */
    if (tbl_buffer_len > tbl_buffer_max_size)
    {
        return CTC_E_INVALID_PARAM;
    }

    if(!tbl_cfg->user_dma_mode)
    {
        if (NULL == g_dal_op.dma_alloc)
        {
            return CTC_E_DRV_FAIL;
        }
        p_tbl_buff = SYS_DMA_ALLOC(lchip, tbl_buffer_len, 0);
        if (NULL == p_tbl_buff)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            return CTC_E_NO_MEMORY;
        }
        sal_memset(p_tbl_buff, 0, tbl_buffer_len);
    }
    else
    {
        p_tbl_buff = tbl_cfg->buffer;
    }

    p_dma_chan = (sys_dma_chan_t*)&(p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_RD_CHAN_ID]);
    p_mutex = p_dma_chan->p_mutex;

    DMA_LOCK(p_mutex);
    phy_addr = SYS_DMA_LOGIC_TO_PHY(lchip, (void*)p_tbl_buff);
    p_tbl_desc = (ds_desc_at_t*)&(p_dma_chan->p_desc[p_dma_chan->current_index].desc_info);
    /* use temp variable, reduce the times of accessing DMA desc memory */
    sal_memset(&tmp_desc, 0, sizeof(ds_desc_at_t));
    tmp_desc.mem_addr =  ((phy_addr & 0xFFFFFFFF) >> 4);
    tmp_desc.cfg_size_20_11_0 = tbl_buffer_len&0xFFF;
    tmp_desc.cfg_size_20_19_12 = (tbl_buffer_len>>12)&0xFF;
    tmp_desc.tsAddr = tbl_addr;
    tmp_desc.data_struct = ((words_num == 64)?0:words_num);
    bmp = tbl_cfg->op_bmp ? tbl_cfg->op_bmp : 0xFFF;
    tmp_desc.u0.reg.slice_bmp = (bmp >> SYS_DMA_IO_BMP_PP) & 0xf;
    tmp_desc.ts_shift_or_dp_core_bmp = ((((bmp >> 6) & 0x3) <<2) | (bmp & 0x3));
    sal_memcpy(p_tbl_desc, &tmp_desc, 16);/* copy 4 words contained fields above */
    p_tbl_desc->high_addr = p_usw_dma_master[lchip]->dma_high_addr;

    SYS_USW_DMA_CACHE_FLUSH(lchip, p_tbl_desc, sizeof(ds_desc_at_t));
    if (2 != tbl_cfg->user_dma_mode)
    {
#if (0 == SDK_WORK_PLATFORM)
    drv_ioctl_write_dma(lchip, SYS_DMA_TBL_RD_CHAN_ID, (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &vld_num);
#else
    cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
    CTC_ERROR_GOTO(DRV_IOCTL(lchip, SYS_DMA_TBL_RD_CHAN_ID, DRV_CMD_PP_EN(cmd), &vld_num),ret,error_proc);
#endif
    ret = _sys_at_dma_wait_desc_finish(lchip,p_tbl_desc,p_dma_chan);
    if(ret != CTC_E_NONE)
    {
      goto error_proc;
    }
    SYS_USW_DMA_CACHE_INVALID_WITH_PHY_ADDR(lchip, phy_addr, entry_num*tbl_cfg->entry_offset);

    if(!tbl_cfg->user_dma_mode)
    {
        /* get read result */
        for (idx = 0; idx < entry_num; idx++)
        {
            p_src = p_tbl_buff + idx * words_in_chip;
            p_dst = tbl_cfg->buffer + idx * words_num;
            sal_memcpy(p_dst, p_src, words_num*SYS_DMA_WORD_LEN);
        }
    }
    tbl_cfg->time_stamp[0] = p_tbl_desc->timestamp0;
    tbl_cfg->time_stamp[1] = p_tbl_desc->u2.other.timestamp1;
    }
    else
    {
        tbl_cfg->desc_index = p_dma_chan->current_index;
    }
    p_dma_chan->current_index =
        ((p_dma_chan->current_index + 1) == p_dma_chan->desc_depth) ? 0 : (p_dma_chan->current_index + 1); 
error_proc:
    if ((!tbl_cfg->user_dma_mode) && g_dal_op.dma_free)
    {
        SYS_DMA_FREE(lchip, p_tbl_buff);
    }
    DMA_UNLOCK(p_mutex);
    return ret;

}

int32
sys_at_dma_write_table(uint8 lchip, void* p_tbl_cfg)
{
    int32 ret = CTC_E_NONE;
    sys_dma_tbl_rw_t* tbl_cfg = p_tbl_cfg;
    ds_desc_at_t* p_tbl_desc = NULL;
    ds_desc_at_t tmp_desc;
    uint32* p_tbl_buff = NULL;
    uint32 words_in_chip = 0;
    uint32 words_num = 0;
    uint32 entry_num = 0;
    uint32 tbl_buffer_len = 0;
    uint32 tbl_addr = 0;
    uint16 idx = 0;
    uint16 bmp = 0;
    uint32* p_src = NULL;
    uint32* p_dst = NULL;
    uint32 vld_num = 0;
    uint32 tbl_buffer_max_size = 0xFFFFF;
    sys_dma_chan_t* p_dma_chan = NULL;
    uint64 phy_addr = 0;
    sal_mutex_t* p_mutex = NULL;
#if (1 == SDK_WORK_PLATFORM)
    uint32 cmd = 0;
#endif

    lchip = SYS_PP_BASE(lchip);

    SYS_DMA_INIT_CHECK(lchip);

    /* mask bit(1:0) */
    tbl_addr = tbl_cfg->tbl_addr;
    /*tbl_addr &= 0xfffffffc;*/

    words_num = (tbl_cfg->entry_len / SYS_DMA_WORD_LEN);
    entry_num = tbl_cfg->copy_mode ? 1 : tbl_cfg->entry_num;
    words_in_chip = tbl_cfg->entry_offset>>2;

    tbl_buffer_len = tbl_cfg->entry_num * tbl_cfg->entry_offset;

    /* check data size should smaller than desc's cfg MAX size */
    if (tbl_buffer_len > tbl_buffer_max_size)
    {
        return CTC_E_INVALID_PARAM;
    }

    p_dma_chan = (sys_dma_chan_t*)&(p_usw_dma_master[lchip]->dma_chan_info[SYS_DMA_TBL_WR_CHAN_ID]);
    p_mutex = p_dma_chan->p_mutex;
    DMA_LOCK(p_mutex);

    p_tbl_desc = (ds_desc_at_t*)&(p_dma_chan->p_desc[p_dma_chan->current_index].desc_info);
    /*need to free the last dma memory*/
    if (p_dma_chan->p_desc_check[p_dma_chan->current_index].is_used)
    {
        ret = _sys_at_dma_wait_desc_finish(lchip,p_tbl_desc,p_dma_chan);
        if(ret != CTC_E_NONE)
        {
          goto error_proc_0;
        }

        if (g_dal_op.dma_free && !tbl_cfg->user_dma_mode)
        {
            uint64 phy_addr;
            COMBINE_64BITS_DATA(p_usw_dma_master[lchip]->dma_high_addr,
                            p_tbl_desc->mem_addr<<4, phy_addr);
            SYS_DMA_FREE(lchip, SYS_DMA_PHY_TO_LOGIC(lchip, (phy_addr)));
        }
        p_dma_chan->p_desc_check[p_dma_chan->current_index].is_used = 0;
    }
    if(!tbl_cfg->user_dma_mode)
    {
        if (NULL == g_dal_op.dma_alloc)
        {
            ret = CTC_E_DRV_FAIL;
            goto error_proc_0;
        }
        p_tbl_buff = SYS_DMA_ALLOC(lchip, tbl_buffer_len, 0);
        if (NULL == p_tbl_buff)
        {
            SYS_DMA_DBG_OUT(CTC_DEBUG_LEVEL_ERROR, " No memory \n");
            ret = CTC_E_NO_MEMORY;
            goto error_proc_0;
        }

        for (idx = 0; idx < entry_num; idx++)
        {
            p_dst = p_tbl_buff + idx * words_in_chip;
            p_src = tbl_cfg->buffer + idx * words_num;

            sal_memcpy(p_dst, p_src, words_num*SYS_DMA_WORD_LEN);
        }
    }
    else
    {
        p_tbl_buff = tbl_cfg->buffer;
    }

    phy_addr = SYS_DMA_LOGIC_TO_PHY(lchip, p_tbl_buff);
    SYS_USW_DMA_CACHE_FLUSH_WITH_PHY_ADDR(lchip, phy_addr, tbl_buffer_len);

    sal_memset(&tmp_desc, 0, sizeof(ds_desc_at_t));
    /* use temp variable, reduce the times of accessing DMA desc memory */
    tmp_desc.mem_addr =  ((phy_addr & 0xFFFFFFFF) >> 4);
    tmp_desc.cfg_size_20_11_0 = tbl_buffer_len&0xFFF;
    tmp_desc.cfg_size_20_19_12 = (tbl_buffer_len>>12)&0xFF;
    tmp_desc.tsAddr = tbl_addr;
    tmp_desc.data_struct = ((words_num == 64)?0:words_num);
    bmp = tbl_cfg->op_bmp ? tbl_cfg->op_bmp : 0xFFF;
    tmp_desc.u0.reg.slice_bmp = (bmp >> SYS_DMA_IO_BMP_PP) & 0xf;
    tmp_desc.ts_shift_or_dp_core_bmp = ((((bmp >> 6) & 0x3) <<2) | (bmp & 0x3));
    tmp_desc.copy_entry = tbl_cfg->copy_mode;
    sal_memcpy(p_tbl_desc, &tmp_desc, 16);
    p_tbl_desc->high_addr = p_usw_dma_master[lchip]->dma_high_addr;

    SYS_USW_DMA_CACHE_FLUSH(lchip, p_tbl_desc, sizeof(ds_desc_at_t));

    /* table DMA  valid num */
    vld_num = 1;
#if (0 == SDK_WORK_PLATFORM)
    CTC_ERROR_GOTO(drv_ioctl_write_dma(lchip, SYS_DMA_TBL_WR_CHAN_ID, (uint64)p_usw_dma_master[lchip]->op_bmp<<32|DmaCtlTab_t, &vld_num), ret, error_proc);
#else
    cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
    CTC_ERROR_GOTO(DRV_IOCTL(lchip, SYS_DMA_TBL_WR_CHAN_ID, DRV_CMD_PP_EN(cmd), &vld_num), ret, error_proc);
#endif
    if(!tbl_cfg->user_dma_mode)
    {
        p_dma_chan->p_desc_check[p_dma_chan->current_index].is_used = 1;
    }
    else if(1 == tbl_cfg->user_dma_mode)
    {
        ret = _sys_at_dma_wait_desc_finish(lchip,p_tbl_desc,p_dma_chan);
        if(ret != CTC_E_NONE)
        {
          goto error_proc;
        }
    }

    tbl_cfg->desc_index = p_dma_chan->current_index;
   p_dma_chan->current_index =
        ((p_dma_chan->current_index + 1) == p_dma_chan->desc_depth) ? 0 : (p_dma_chan->current_index + 1);

    DMA_UNLOCK(p_mutex);
    return CTC_E_NONE;
error_proc:

    if ((!tbl_cfg->user_dma_mode) && g_dal_op.dma_free)
    {
        SYS_DMA_FREE(lchip, p_tbl_buff);
    }
error_proc_0:
    DMA_UNLOCK(p_mutex);
    return ret;

}


int32
sys_at_dma_reset(uint8 lchip)
{
    DmaRxStallState_m stall_state;
    MapNetTxPktDestMap_m nettx_dest;
    DmaStaticInfo_m static_info;
    uint32 field_val = 0;
    uint32 cmd = 0;
    uint8 index = 0;
    uint8  step = 0;
    uint8 chan_en = 0 ;

    if (!CTC_WB_ENABLE(lchip))
    {
        goto dma_rst;
    }

    /*set cpumap to drop dest*/        
    cmd = DRV_IOR(MapNet0PktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &nettx_dest));
    
    for (index = 0; index <  p_usw_dma_master[lchip]->packet_rx_chan_num; index++)
    {
        SetMapNet0PktDestMap(V, cfgNet0Chan0DestIntf_f + step * index , &nettx_dest, 7);/* drop */
        SetMapNet0PktDestMap(V, cfgNet0Chan0DestChan_f + step * index , &nettx_dest, index);/* Dma ring ID */
    }
    
    cmd = DRV_IOW(MapNet0PktDestMap_t, DRV_ENTRY_FLAG);
    CTC_ERROR_RETURN(DRV_IOCTL(lchip, 0, cmd, &nettx_dest));      

    /* recycle rx chan desc to release group stall */
    for (index = 0; index < 4; index++)
    {
        cmd = DRV_IOR(DmaStaticInfo_t, DRV_ENTRY_FLAG);
        CTC_ERROR_RETURN(DRV_IOCTL(lchip, index, DRV_CMD_PP_EN(cmd), &static_info));
        chan_en = GetDmaStaticInfo(V, chanEn_f, &static_info);
        
        if (CTC_IS_BIT_SET(p_usw_dma_master[lchip]->dma_en_flag, index) && chan_en)
        {
            do
            {
                cmd = DRV_IOR(DmaRxStallState_t, DRV_ENTRY_FLAG);
                DRV_IOCTL(lchip, 0, cmd, &stall_state);
                field_val = GetDmaRxStallState(V, stallEligibleVec_f, &stall_state);
                
                if (!CTC_IS_BIT_SET(field_val, index))
                {
                    break;
                }
                
                cmd = DRV_IOR(DmaCtlTab_t, DRV_ENTRY_FLAG);
                DRV_IOCTL(lchip, index, cmd, &field_val);
                field_val = p_usw_dma_master[lchip]->dma_chan_info[index].desc_depth - field_val;
                cmd = DRV_IOW(DmaCtlTab_t, DRV_ENTRY_FLAG);
                DRV_IOCTL(lchip, index, cmd, &field_val);
                
            } while (1);
        }
    }

dma_rst:
    /*dmactl reset*/
    field_val = 1;
    cmd = DRV_IOW(SupResetCtl_t, SupResetCtl_cfgResetDmaCtl_f);
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);

    sal_task_sleep(1);

    /*dmactl release reset*/
    field_val = 0;
    DRV_FIELD_IOCTL(lchip, 0, cmd, &field_val);

    return 0;
}


#endif
