/**************************************************************************
 *                                                                        *
 *         Copyright (c) 2014 by Generalplus Inc.                         *
 *                                                                        *
 *  This software is copyrighted by and is the property of Generalplus    *
 *  Inc. All rights are reserved by Generalplus Inc.                      *
 *  This software may only be used in accordance with the                 *
 *  corresponding license agreement. Any unauthorized use, duplication,   *
 *  distribution, or disclosure of this software is expressly forbidden.  *
 *                                                                        *
 *  This Copyright notice MUST not be removed or modified without prior   *
 *  written consent of Generalplus Technology Co., Ltd.                   *
 *                                                                        *
 *  Generalplus Inc. reserves the right to modify this software           *
 *  without notice.                                                       *
 *                                                                        *
 *  Generalplus Inc.                                                      *
 *  No.19, Industry E. Rd. IV, Hsinchu Science Park                       *
 *  Hsinchu City 30078, Taiwan, R.O.C.                                    *
 *                                                                        *
 **************************************************************************/
#include "drv_l1_sfr.h"
#include "drv_l1_dma.h"
#include "drv_l1_cache.h"

#if _OPERATING_SYSTEM == _OS_UCOS2
#include "os.h"
#endif
#if (defined _DRV_L1_DMA) && (_DRV_L1_DMA == 1)

/**************************************************************************
 *                           C O N S T A N T S                            *
 **************************************************************************/
/**************************************************************************
 *                              M A C R O S                               *
 **************************************************************************/
/**************************************************************************
 *               F U N C T I O N    D E C L A R A T I O N S               *
 **************************************************************************/
static INT32U		dma_get_channel(INT8U usage);

///static INT32S dma_set_channel(INT8U usage, INT8U channel);
static void			dma_free_channel(INT32U channel);

#if _OPERATING_SYSTEM != _OS_NONE
static INT32U		dma_get_queue(void);
static void			dma_free_queue(INT32U q_index);
#endif
void				dma_set_notify(INT32U channel, INT32U notify, INT32U os_q);
static INT32S		dma_device_protect(void);
static void			dma_device_unprotect(INT32S mask);

static void			dma_set_control(INT32U channel, INT32U ctrl);
static void			dma_set_source(INT32U channel, INT32U addr);
static void			dma_set_target(INT32U channel, INT32U addr);
static INT32S		dma_set_tx_count(INT32U channel, INT32U count);
static INT32S		dma_set_device(INT32U channel, INT16U device);
static INT32S		dma_set_timeout(INT32U channel, INT32U timeout);
static INT32S		dma_set_line_length(INT32U length);
static INT32S		dma_set_sprite_size(INT32U channel, INT32U size);

#if 0
static INT32S		dma_set_transparent_enable(INT32U channel);
static INT32S		dma_set_transparent_disable(INT32U channel);
static INT32S		dma_set_transparent_pattern(INT32U channel, INT16U pattern);
#endif
static INT32S		dma_transfer_extend(DMA_STRUCT *dma_struct, INT8U usage, INT32U os_q, void (*dma_user_isr) (INT8U, INT8S));

#if DRV_l1_DAC_ON_OFF
INT8U dma_to_dac_busy;
#endif

/**************************************************************************
 *                         G L O B A L    D A T A                         *
 **************************************************************************/
static INT8U		dma_init_done = FALSE;
static INT8U		dma_usage[C_DMA_CHANNEL_NUM];
static INT32U		dma_notify_variable[C_DMA_CHANNEL_NUM];

#if (_OPERATING_SYSTEM == _OS_UCOS2)
static INT32U		dma_notify_queue[C_DMA_CHANNEL_NUM];
static OS_EVENT		*dma_driver_queue[C_DMA_Q_NUM] = { NULL };
static void			*q_buffer[C_DMA_Q_NUM][C_DMA_Q_BUF_SIZE];
static INT8U		dma_q_usage[C_DMA_Q_NUM];
#elif (_OPERATING_SYSTEM == _OS_FREERTOS)
static osMessageQId dma_notify_queue[C_DMA_CHANNEL_NUM];
static osMessageQId dma_driver_queue[C_DMA_Q_NUM] = { NULL };
static INT8U		dma_q_usage[C_DMA_Q_NUM];
#endif

#if (_OPERATING_SYSTEM == _OS_UCOS2)
static OS_EVENT		*sw_dma_sem = NULL;

void SW_DMA_LOCK(void)
{
	INT8U err;

	OSSemPend(sw_dma_sem, 0, &err);
}

void SW_DMA_UNLOCK(void)
{
	OSSemPost(sw_dma_sem);
}
#endif
/**
 * @brief   get dma idle channel
 * @param   usage: use method
 * @return 	channel
 */
static INT32U dma_get_channel(INT8U usage)
{
	INT32U	i;
	INT32S	mask;

#if _OPERATING_SYSTEM != _OS_NONE					// Soft Protect for critical section
#if _OPERATING_SYSTEM == _OS_UCOS2
	SW_DMA_LOCK();
#elif _OPERATING_SYSTEM == _OS_FREERTOS
	osSuspend();
#endif
#endif
	mask = dma_device_protect();					// Device Protect for critical section
	for(i = 0; i < C_DMA_CHANNEL_NUM; i++)
	{
		if(dma_usage[i] == C_DMA_NOT_UESED)
		{
			dma_usage[i] = usage;

			dma_device_unprotect(mask);
#if _OPERATING_SYSTEM != _OS_NONE
#if _OPERATING_SYSTEM == _OS_UCOS2
			SW_DMA_UNLOCK();
#elif _OPERATING_SYSTEM == _OS_FREERTOS
			osResume();
#endif
#endif

			// Reset DMA controller
			dma_set_control(i, C_DMA_CTRL_RESET);
			return i;
		}
	}

	dma_device_unprotect(mask);
#if _OPERATING_SYSTEM != _OS_NONE
#if _OPERATING_SYSTEM == _OS_UCOS2
	SW_DMA_UNLOCK();
#elif _OPERATING_SYSTEM == _OS_FREERTOS
	osResume();
#endif
#endif
	return i;
}

/**
 * @brief   get dma assign channel
 * @param   usage: use method
 * @param   channel: assign channel
 * @return 	result: >=0 is success, <0 is fail.
 */
#if DES_3DES_ENABLE || AES_ENABLE
static INT32S dma_set_channel(INT8U usage, INT8U channel)
{
	INT32S	ret, mask;

	if(channel >= C_DMA_CHANNEL_NUM)
	{
		return STATUS_FAIL;
	}

#if _OPERATING_SYSTEM != _OS_NONE					// Soft Protect for critical section
#if _OPERATING_SYSTEM == _OS_UCOS2
	SW_DMA_LOCK();
#elif _OPERATING_SYSTEM == _OS_FREERTOS
	osSuspend();
#endif
#endif
	mask = dma_device_protect();					// Device Protect for critical section
	if(dma_usage[channel] == C_DMA_NOT_UESED)
	{
		dma_usage[channel] = usage;
		dma_set_control(channel, C_DMA_CTRL_RESET); // Reset DMA controller
		ret = STATUS_OK;
	}
	else
	{
		ret = STATUS_FAIL;
	}

	dma_device_unprotect(mask);
#if _OPERATING_SYSTEM != _OS_NONE
#if _OPERATING_SYSTEM == _OS_UCOS2
	SW_DMA_UNLOCK();
#elif _OPERATING_SYSTEM == _OS_FREERTOS
	osResume();
#endif
#endif
	return ret;
}

#endif
void (*dma_user_isr[C_DMA_CHANNEL_NUM]) (INT8U, INT8S);

static INT32S drv_l1_dma_callback_set(INT32U channel, void (*user_isr) (INT8U, INT8S))
{
	INT32S	ret = STATUS_OK;
	if(channel > 7)
		ret = STATUS_FAIL;

	dma_user_isr[channel] = user_isr;

	return ret;
}

static INT32S drv_l1_dma_callback_clear(INT32U channel)
{
	INT32S	ret = STATUS_OK;
	if(channel > 7)
		ret = STATUS_FAIL;

	dma_user_isr[channel] = 0;

	return ret;
}

void drv_l1_dma_free_channel(INT32U channel)
{
	dma_free_channel(channel);
	dma_set_target(channel, 0);
	dma_set_source(channel, 0);
}

INT32U drv_l1_dma_get_channel_target_address(INT32U channel)
{
	INT32U	addr = 0;

	// Clear pending bit
	if(channel == C_DMA_CH0)
	{
		addr = R_DMA0_TAR_ADDR;
	}
	else
	if(channel == C_DMA_CH1)
	{
		addr = R_DMA1_TAR_ADDR;
	}
	else
	if(channel == C_DMA_CH2)
	{
		addr = R_DMA2_TAR_ADDR;
	}
	else
	if(channel == C_DMA_CH3)
	{
		addr = R_DMA3_TAR_ADDR;
	}
	else
	if(channel == C_DMA_CH4)
	{
		addr = R_DMA4_TAR_ADDR;
	}
	else
	if(channel == C_DMA_CH5)
	{
		addr = R_DMA5_TAR_ADDR;
	}
	else
	if(channel == C_DMA_CH6)
	{
		addr = R_DMA6_TAR_ADDR;
	}
	else
	if(channel == C_DMA_CH7)
	{
		addr = R_DMA7_TAR_ADDR;
	}

	return addr;
}

/**
 * @brief   free dma unsued channel
 * @param   channel: channel
 * @return 	none
 */
static void dma_free_channel(INT32U channel)
{
	if(channel >= C_DMA_CHANNEL_NUM)
	{
		return;
	}

	// Reset DMA controller
	dma_set_control(channel, C_DMA_CTRL_RESET);

	// Clear pending bit
	if(channel == C_DMA_CH0)
	{
		R_DMA_INT = C_DMA0_INT_PEND;
	}
	else
	if(channel == C_DMA_CH1)
	{
		R_DMA_INT = C_DMA1_INT_PEND;
	}
	else
	if(channel == C_DMA_CH2)
	{
		R_DMA_INT = C_DMA2_INT_PEND;
	}
	else
	if(channel == C_DMA_CH3)
	{
		R_DMA_INT = C_DMA3_INT_PEND;
	}
	else
	if(channel == C_DMA_CH4)
	{
		R_DMA_INT = C_DMA4_INT_PEND;
	}
	else
	if(channel == C_DMA_CH5)
	{
		R_DMA_INT = C_DMA5_INT_PEND;
	}
	else
	if(channel == C_DMA_CH6)
	{
		R_DMA_INT = C_DMA6_INT_PEND;
	}
	else
	if(channel == C_DMA_CH7)
	{
		R_DMA_INT = C_DMA7_INT_PEND;
	}

	dma_notify_variable[channel] = (INT32U) NULL;

#if _OPERATING_SYSTEM != _OS_NONE
	dma_notify_queue[channel] = (INT32U) NULL;
#endif
	dma_usage[channel] = (INT8U) C_DMA_NOT_UESED;
}

/**
 * @brief   get and set dma channel queue
 * @param   none
 * @return 	queue number
 */
#if _OPERATING_SYSTEM != _OS_NONE	// Soft Protect for critical section
static INT32U dma_get_queue(void)
{
	INT32U	i;

#if _OPERATING_SYSTEM == _OS_UCOS2
	SW_DMA_LOCK();
#elif _OPERATING_SYSTEM == _OS_FREERTOS
	osSuspend();
#endif
	for(i = 0; i < C_DMA_Q_NUM; i++)
	{
		if(!dma_q_usage[i])
		{
			dma_q_usage[i] = (INT8U) TRUE;

#if _OPERATING_SYSTEM == _OS_UCOS2
			SW_DMA_UNLOCK();
#elif _OPERATING_SYSTEM == _OS_FREERTOS
			osResume();
#endif
			if(dma_driver_queue[i])
			{
				OSQFlush(dma_driver_queue[i]);
			}
			else
			{
#if _OPERATING_SYSTEM == _OS_UCOS2
				dma_driver_queue[i] = OSQCreate(&q_buffer[i][0], C_DMA_Q_BUF_SIZE);
#elif _OPERATING_SYSTEM == _OS_FREERTOS
				osMessageQDef_t dma_q = { C_DMA_Q_BUF_SIZE, sizeof(INT32U), 0 };
				dma_driver_queue[i] = osMessageCreate(&dma_q, NULL);
#endif
			}

			return i;
		}
	}

#if _OPERATING_SYSTEM == _OS_UCOS2
	SW_DMA_UNLOCK();
#elif _OPERATING_SYSTEM == _OS_FREERTOS
	osResume();
#endif
	return i;
}

/**
 * @brief   free dma channel queue
 * @param   q_index: queue number
 * @return 	none
 */
static void dma_free_queue(INT32U q_index)
{
	if(q_index >= C_DMA_Q_NUM)
	{
		return;
	}

	dma_q_usage[q_index] = (INT8U) FALSE;
}

#endif

/**
 * @brief   set dma done notify
 * @param   channel: channel number
 * @param   notify: notify point
 * @param   os_q: notify queue
 * @return 	none
 */
void dma_set_notify(INT32U channel, INT32U notify, INT32U os_q)
{
	dma_notify_variable[channel] = notify;
#if _OPERATING_SYSTEM == _OS_UCOS2
	dma_notify_queue[channel] = os_q;
#endif
}

/**
 * @brief   disable dma irq
 * @param   none
 * @return 	result: >=0 is success, <0 is fail.
 */
static INT32S dma_device_protect(void)
{
	vic_irq_disable(VIC_DMA);
	return 0;
}

/**
 * @brief   enable/disable dma irq
 * @param   mask: mask value
 * @return 	none
 */
static void dma_device_unprotect(INT32S mask)
{
	vic_irq_enable(VIC_DMA);
}

static void handle_dma_timout(INT32S channel)
{
	if(dma_notify_variable[channel])
	{
		*((INT8S *) dma_notify_variable[channel]) = (INT8S) C_DMA_STATUS_TIMEOUT;
	}

#if _OPERATING_SYSTEM != _OS_NONE	// Soft Protect for critical section
#if _OPERATING_SYSTEM == _OS_UCOS2
	if(dma_notify_queue[channel])
	{
		if(OSQPost((OS_EVENT *) dma_notify_queue[channel], (void *) C_DMA_STATUS_TIMEOUT) != OS_NO_ERR)
		{
			DBG_PRINT("[ERROR] DMA post fail, Q=0x%X\r\n", dma_notify_queue[channel]);
		}
	}

#elif _OPERATING_SYSTEM == _OS_FREERTOS
	if(dma_notify_queue[channel])
	{
		INT32U	msg = C_DMA_STATUS_TIMEOUT;
		osMessagePut(dma_notify_queue[channel], (uint32_t) & msg, osWaitForever);
	}

#endif
#endif
	if(dma_user_isr[channel])
	{
		(*dma_user_isr[channel]) ((INT8U) channel, C_DMA_STATUS_TIMEOUT);
	}
}

static void handle_dma_done(INT32S channel)
{
	if(dma_notify_variable[channel])
	{
		*((INT8S *) dma_notify_variable[channel]) = (INT8S) C_DMA_STATUS_DONE;
	}

#if _OPERATING_SYSTEM != _OS_NONE	// Soft Protect for critical section
#if _OPERATING_SYSTEM == _OS_UCOS2
	if(dma_notify_queue[channel])
	{
		if(OSQPost((OS_EVENT *) dma_notify_queue[channel], (void *) C_DMA_STATUS_DONE) != OS_NO_ERR)
		{
			DBG_PRINT("[ERROR] DMA post fail, Q=0x%X\r\n", dma_notify_queue[channel]);
		}

	}

#elif _OPERATING_SYSTEM == _OS_FREERTOS
	if(dma_notify_queue[channel])
	{
		INT32U	msg = C_DMA_STATUS_DONE;
		osMessagePut(dma_notify_queue[channel], (uint32_t) & msg, osWaitForever);
	}

#endif
#endif
	if(dma_user_isr[channel])
	{
		(*dma_user_isr[channel]) ((INT8U) channel, C_DMA_STATUS_DONE);
	}
}

void DMA_IRQHandler(void)	// Device ISR
{
	INT32U	pending;

	pending = R_DMA_INT;

	#if C_DMA_CHANNEL_NUM > 0
	// DMA0
	if(pending & C_DMA0_INT_PEND)
	{	// DMA0 interrupt is pending
		if(dma_usage[C_DMA_CH0] != C_DMA_NOT_UESED)
		{
			if(pending & C_DMA0_TIMEOUT)
			{
				handle_dma_timout(C_DMA_CH0);
			}
			else
			{
				handle_dma_done(C_DMA_CH0);
			}

			if(dma_usage[C_DMA_CH0] != C_DMA_OCCUPIED)
			{
				#if DRV_l1_DAC_ON_OFF
				if(R_DMA0_TAR_ADDR == (INT32U)P_DAC_CHA_DATA)
				{
					dma_to_dac_busy = 0;
				}
				#endif

				dma_free_channel(C_DMA_CH0);
			}
		}

		R_DMA_INT = C_DMA0_INT_PEND;	// Clear pending bit
	}
	#endif

	#if C_DMA_CHANNEL_NUM > 1
	// DMA1
	if(pending & C_DMA1_INT_PEND)
	{	// DMA1 interrupt is pending
		if(dma_usage[C_DMA_CH1] != C_DMA_NOT_UESED)
		{
			if(pending & C_DMA1_TIMEOUT)
			{
				handle_dma_timout(C_DMA_CH1);
			}
			else
			{
				handle_dma_done(C_DMA_CH1);
			}

			if(dma_usage[C_DMA_CH1] != C_DMA_OCCUPIED)
			{
				#if DRV_l1_DAC_ON_OFF
				if(R_DMA1_TAR_ADDR == (INT32U)P_DAC_CHA_DATA)
				{
					dma_to_dac_busy = 0;
				}
				#endif

				dma_free_channel(C_DMA_CH1);
			}
		}

		R_DMA_INT = C_DMA1_INT_PEND;	// Clear pending bit
	}
	#endif

	#if C_DMA_CHANNEL_NUM > 2
	// DMA2
	if(pending & C_DMA2_INT_PEND)
	{	// DMA2 interrupt is pending
		if(dma_usage[C_DMA_CH2] != C_DMA_NOT_UESED)
		{
			if(pending & C_DMA2_TIMEOUT)
			{
				handle_dma_timout(C_DMA_CH2);
			}
			else
			{
				handle_dma_done(C_DMA_CH2);
			}

			if(dma_usage[C_DMA_CH2] != C_DMA_OCCUPIED)
			{
				#if DRV_l1_DAC_ON_OFF
				if(R_DMA2_TAR_ADDR == (INT32U)P_DAC_CHA_DATA)
				{
					dma_to_dac_busy = 0;
				}
				#endif

				dma_free_channel(C_DMA_CH2);
			}
		}

		R_DMA_INT = C_DMA2_INT_PEND;	// Clear pending bit
	}
	#endif

	#if C_DMA_CHANNEL_NUM > 3
	// DMA3
	if(pending & C_DMA3_INT_PEND)
	{	// DMA3 interrupt is pending
		if(dma_usage[C_DMA_CH3] != C_DMA_NOT_UESED)
		{
			if(pending & C_DMA3_TIMEOUT)
			{
				handle_dma_timout(C_DMA_CH3);
			}
			else
			{
				handle_dma_done(C_DMA_CH3);
			}

			if(dma_usage[C_DMA_CH3] != C_DMA_OCCUPIED)
			{
				#if DRV_l1_DAC_ON_OFF
				if(R_DMA3_TAR_ADDR == (INT32U)P_DAC_CHA_DATA)
				{
					dma_to_dac_busy = 0;
				}
				#endif

				dma_free_channel(C_DMA_CH3);
			}
		}

		R_DMA_INT = C_DMA3_INT_PEND;	// Clear pending bit
	}
	#endif

	#if C_DMA_CHANNEL_NUM > 4
	// DMA4
	if(pending & C_DMA4_INT_PEND)
	{	// DMA4 interrupt is pending
		if(dma_usage[C_DMA_CH4] != C_DMA_NOT_UESED)
		{
			if(pending & C_DMA4_TIMEOUT)
			{
				handle_dma_timout(C_DMA_CH4);
			}
			else
			{
				handle_dma_done(C_DMA_CH4);
			}

			if(dma_usage[C_DMA_CH4] != C_DMA_OCCUPIED)
			{
				#if DRV_l1_DAC_ON_OFF
				if(R_DMA4_TAR_ADDR == (INT32U)P_DAC_CHA_DATA)
				{
					dma_to_dac_busy = 0;
				}
				#endif

				dma_free_channel(C_DMA_CH4);
			}
		}

		R_DMA_INT = C_DMA4_INT_PEND;	// Clear pending bit
	}
	#endif

	#if C_DMA_CHANNEL_NUM > 5
	// DMA5
	if(pending & C_DMA5_INT_PEND)
	{	// DMA5 interrupt is pending
		if(dma_usage[C_DMA_CH5] != C_DMA_NOT_UESED)
		{
			if(pending & C_DMA5_TIMEOUT)
			{
				handle_dma_timout(C_DMA_CH5);
			}
			else
			{
				handle_dma_done(C_DMA_CH5);
			}

			if(dma_usage[C_DMA_CH5] != C_DMA_OCCUPIED)
			{
				#if DRV_l1_DAC_ON_OFF
				if(R_DMA5_TAR_ADDR == (INT32U)P_DAC_CHA_DATA)
				{
					dma_to_dac_busy = 0;
				}
				#endif

				dma_free_channel(C_DMA_CH5);
			}
		}

		R_DMA_INT = C_DMA5_INT_PEND;	// Clear pending bit
	}
	#endif

	#if C_DMA_CHANNEL_NUM > 6
	// DMA6
	if(pending & C_DMA6_INT_PEND)
	{	// DMA6 interrupt is pending
		if(dma_usage[C_DMA_CH6] != C_DMA_NOT_UESED)
		{
			if(pending & C_DMA6_TIMEOUT)
			{
				handle_dma_timout(C_DMA_CH6);
			}
			else
			{
				handle_dma_done(C_DMA_CH6);
			}

			if(dma_usage[C_DMA_CH6] != C_DMA_OCCUPIED)
			{
				#if DRV_l1_DAC_ON_OFF
				if(R_DMA6_TAR_ADDR == (INT32U)P_DAC_CHA_DATA)
				{
					dma_to_dac_busy = 0;
				}
				#endif

				dma_free_channel(C_DMA_CH6);
			}
		}

		R_DMA_INT = C_DMA6_INT_PEND;	// Clear pending bit
	}
	#endif

	#if C_DMA_CHANNEL_NUM > 7
	// DMA7
	if(pending & C_DMA7_INT_PEND)
	{	// DMA7 interrupt is pending
		if(dma_usage[C_DMA_CH7] != C_DMA_NOT_UESED)
		{
			if(pending & C_DMA7_TIMEOUT)
			{
				handle_dma_timout(C_DMA_CH7);
			}
			else
			{
				handle_dma_done(C_DMA_CH7);
			}

			if(dma_usage[C_DMA_CH7] != C_DMA_OCCUPIED)
			{
				#if DRV_l1_DAC_ON_OFF
				if(R_DMA7_TAR_ADDR == (INT32U)P_DAC_CHA_DATA)
				{
					dma_to_dac_busy = 0;
				}
				#endif

				dma_free_channel(C_DMA_CH7);
			}
		}

		R_DMA_INT = C_DMA7_INT_PEND;	// Clear pending bit
	}
	#endif
}

/**
 * @brief   dma initialize
 * @param   none
 * @return 	none
 */
void drv_l1_dma_init(void)
{
	INT32U	i;

	R_DMA0_CTRL = C_DMA_CTRL_RESET; // Software reset, this bit will auto clear after reset complete
	R_DMA0_TX_COUNT = 0x0;
	R_DMA0_SPRITE_SIZE = 0x0;
	R_DMA0_TRANSPARENT = 0x0;
	R_DMA0_MISC = 0x0;

	R_DMA1_CTRL = C_DMA_CTRL_RESET;
	R_DMA1_TX_COUNT = 0x0;
	R_DMA1_SPRITE_SIZE = 0x0;
	R_DMA1_TRANSPARENT = 0x0;
	R_DMA1_MISC = 0x0;

	R_DMA2_CTRL = C_DMA_CTRL_RESET;
	R_DMA2_TX_COUNT = 0x0;
	R_DMA2_SPRITE_SIZE = 0x0;
	R_DMA2_TRANSPARENT = 0x0;
	R_DMA2_MISC = 0x0;

	R_DMA3_CTRL = C_DMA_CTRL_RESET;
	R_DMA3_TX_COUNT = 0x0;
	R_DMA3_SPRITE_SIZE = 0x0;
	R_DMA3_TRANSPARENT = 0x0;
	R_DMA3_MISC = 0x0;

	R_DMA4_CTRL = C_DMA_CTRL_RESET;
	R_DMA4_TX_COUNT = 0x0;
	R_DMA4_SPRITE_SIZE = 0x0;
	R_DMA4_TRANSPARENT = 0x0;
	R_DMA4_MISC = 0x0;

	R_DMA5_CTRL = C_DMA_CTRL_RESET;
	R_DMA5_TX_COUNT = 0x0;
	R_DMA5_SPRITE_SIZE = 0x0;
	R_DMA5_TRANSPARENT = 0x0;
	R_DMA5_MISC = 0x0;

	R_DMA6_CTRL = C_DMA_CTRL_RESET;
	R_DMA6_TX_COUNT = 0x0;
	R_DMA6_SPRITE_SIZE = 0x0;
	R_DMA6_TRANSPARENT = 0x0;
	R_DMA6_MISC = 0x0;

	R_DMA7_CTRL = C_DMA_CTRL_RESET;
	R_DMA7_TX_COUNT = 0x0;
	R_DMA7_SPRITE_SIZE = 0x0;
	R_DMA7_TRANSPARENT = 0x0;
	R_DMA7_MISC = 0x0;

	R_DMA_LINE_LEN = 0x0;
	R_DMA_DEVICE = 0x76543210;
	R_DMA_CEMODE = C_DMA_CE_DONT_RESET;
	R_DMA_INT = 0xFFFFFFFF;			// Clear all pending bits
	for(i = 0; i < C_DMA_CHANNEL_NUM; i++)
	{
		dma_usage[i] = (INT8U) C_DMA_NOT_UESED;
		dma_notify_variable[i] = (INT32U) NULL;
#if _OPERATING_SYSTEM != _OS_NONE	// Soft Protect for critical section
#if _OPERATING_SYSTEM == _OS_UCOS2
		dma_notify_queue[i] = (INT32U) NULL;
#elif _OPERATING_SYSTEM == _OS_FREERTOS
		dma_notify_queue[i] = (osMessageQId) NULL;
#endif
#endif
	}

#if _OPERATING_SYSTEM != _OS_NONE
	for(i = 0; i < C_DMA_Q_NUM; i++)
	{
		dma_q_usage[i] = (INT8U) FALSE;
		if(dma_driver_queue[i])
		{
			OSQFlush(dma_driver_queue[i]);
		}
		else
		{
#if _OPERATING_SYSTEM == _OS_UCOS2
			dma_driver_queue[i] = OSQCreate(&q_buffer[i][0], C_DMA_Q_BUF_SIZE);
#elif _OPERATING_SYSTEM == _OS_FREERTOS
			osMessageQDef_t dma_q = { C_DMA_Q_BUF_SIZE, sizeof(INT32U), 0 };
			dma_driver_queue[i] = osMessageCreate(&dma_q, NULL);
#endif
		}
	}

#endif

#if _OPERATING_SYSTEM == _OS_UCOS2
	if(sw_dma_sem == NULL)
	{
		sw_dma_sem = OSSemCreate(1);
	}
#endif

	vic_irq_register(VIC_DMA, DMA_IRQHandler);
	vic_irq_enable(VIC_DMA);

	dma_init_done = TRUE;
}

/**
 * @brief   set dma direction
 * @param   channel: dma channel number
 * @param   dir: direction, frame buffer to sprite or sprite to frame buffer
 * @return 	none
 */
static void dma_set_direction(INT32U channel, INT32U dir)
{
	switch(channel)
	{
	case C_DMA_CH0:
		if(dir)
			R_DMA0_MISC |= C_DMA_MISC_FB_TO_SPRITE;
		else
			R_DMA0_MISC &= ~C_DMA_MISC_FB_TO_SPRITE;
		break;

	case C_DMA_CH1:
		if(dir)
			R_DMA1_MISC |= C_DMA_MISC_FB_TO_SPRITE;
		else
			R_DMA1_MISC &= ~C_DMA_MISC_FB_TO_SPRITE;
		break;

	case C_DMA_CH2:
		if(dir)
			R_DMA2_MISC |= C_DMA_MISC_FB_TO_SPRITE;
		else
			R_DMA2_MISC &= ~C_DMA_MISC_FB_TO_SPRITE;
		break;

	case C_DMA_CH3:
		if(dir)
			R_DMA3_MISC |= C_DMA_MISC_FB_TO_SPRITE;
		else
			R_DMA3_MISC &= ~C_DMA_MISC_FB_TO_SPRITE;
		break;

	case C_DMA_CH4:
		if(dir)
			R_DMA4_MISC |= C_DMA_MISC_FB_TO_SPRITE;
		else
			R_DMA4_MISC &= ~C_DMA_MISC_FB_TO_SPRITE;
		break;

	case C_DMA_CH5:
		if(dir)
			R_DMA5_MISC |= C_DMA_MISC_FB_TO_SPRITE;
		else
			R_DMA5_MISC &= ~C_DMA_MISC_FB_TO_SPRITE;
		break;

	case C_DMA_CH6:
		if(dir)
			R_DMA6_MISC |= C_DMA_MISC_FB_TO_SPRITE;
		else
			R_DMA6_MISC &= ~C_DMA_MISC_FB_TO_SPRITE;
		break;

	case C_DMA_CH7:
		if(dir)
			R_DMA7_MISC |= C_DMA_MISC_FB_TO_SPRITE;
		else
			R_DMA7_MISC &= ~C_DMA_MISC_FB_TO_SPRITE;
		break;

	default:
		break;
	}
}

/**
 * @brief   set dma control register
 * @param   channel: dma channel number
 * @param   ctrl: dma control register value
 * @return 	none
 */
static void dma_set_control(INT32U channel, INT32U ctrl)
{
	switch(channel)
	{
	case C_DMA_CH0:
		R_DMA0_CTRL = ctrl;
		break;

	case C_DMA_CH1:
		R_DMA1_CTRL = ctrl;
		break;

	case C_DMA_CH2:
		R_DMA2_CTRL = ctrl;
		break;

	case C_DMA_CH3:
		R_DMA3_CTRL = ctrl;
		break;

	case C_DMA_CH4:
		R_DMA4_CTRL = ctrl;
		break;

	case C_DMA_CH5:
		R_DMA5_CTRL = ctrl;
		break;

	case C_DMA_CH6:
		R_DMA6_CTRL = ctrl;
		break;

	case C_DMA_CH7:
		R_DMA7_CTRL = ctrl;
		break;

	default:
		break;
	}
}

/**
 * @brief   set dma source address
 * @param   channel: dma channel number
 * @param   addr: dma source address
 * @return 	none
 */
static void dma_set_source(INT32U channel, INT32U addr)
{
	switch(channel)
	{
	case C_DMA_CH0:
		R_DMA0_SRC_ADDR = addr;
		break;

	case C_DMA_CH1:
		R_DMA1_SRC_ADDR = addr;
		break;

	case C_DMA_CH2:
		R_DMA2_SRC_ADDR = addr;
		break;

	case C_DMA_CH3:
		R_DMA3_SRC_ADDR = addr;
		break;

	case C_DMA_CH4:
		R_DMA4_SRC_ADDR = addr;
		break;

	case C_DMA_CH5:
		R_DMA5_SRC_ADDR = addr;
		break;

	case C_DMA_CH6:
		R_DMA6_SRC_ADDR = addr;
		break;

	case C_DMA_CH7:
		R_DMA7_SRC_ADDR = addr;
		break;

	default:
		break;
	}
}

/**
 * @brief   set dma destination address
 * @param   channel: dma channel number
 * @param   addr: dma destination address
 * @return 	none
 */
static void dma_set_target(INT32U channel, INT32U addr)
{
	switch(channel)
	{
	case C_DMA_CH0:
		R_DMA0_TAR_ADDR = addr;
		break;

	case C_DMA_CH1:
		R_DMA1_TAR_ADDR = addr;
		break;

	case C_DMA_CH2:
		R_DMA2_TAR_ADDR = addr;
		break;

	case C_DMA_CH3:
		R_DMA3_TAR_ADDR = addr;
		break;

	case C_DMA_CH4:
		R_DMA4_TAR_ADDR = addr;
		break;

	case C_DMA_CH5:
		R_DMA5_TAR_ADDR = addr;
		break;

	case C_DMA_CH6:
		R_DMA6_TAR_ADDR = addr;
		break;

	case C_DMA_CH7:
		R_DMA7_TAR_ADDR = addr;
		break;

	default:
		break;
	}
}

static INT32S dma_setting(INT32U channel, INT32U saddr, INT32U taddr, INT32U xfer_cnt, INT32U timeout, INT32U sprite_sz)
{
	if((xfer_cnt > C_DMA_COUNT_MAX) || (timeout>C_DMA_MISC_TIMEOUT_MAX) || (sprite_sz > C_DMA_SPRITE_MAX)) {
		return -1;
	}
	switch (channel) {
	case C_DMA_CH0:
		R_DMA0_SRC_ADDR = saddr;
		R_DMA0_TAR_ADDR = taddr;
		R_DMA0_TX_COUNT = xfer_cnt;
        R_DMA0_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA0_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		R_DMA0_SPRITE_SIZE = sprite_sz;
		break;
	case C_DMA_CH1:
		R_DMA1_SRC_ADDR = saddr;
		R_DMA1_TAR_ADDR = taddr;
		R_DMA1_TX_COUNT = xfer_cnt;
        R_DMA1_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA1_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		R_DMA1_SPRITE_SIZE = sprite_sz;
		break;
	case C_DMA_CH2:
		R_DMA2_SRC_ADDR = saddr;
		R_DMA2_TAR_ADDR = taddr;
		R_DMA2_TX_COUNT = xfer_cnt;
		R_DMA2_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA2_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		R_DMA2_SPRITE_SIZE = sprite_sz;
		break;
	case C_DMA_CH3:
		R_DMA3_SRC_ADDR = saddr;
		R_DMA3_TAR_ADDR = taddr;
		R_DMA3_TX_COUNT = xfer_cnt;
		R_DMA3_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA3_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		R_DMA3_SPRITE_SIZE = sprite_sz;
		break;
	case C_DMA_CH4:
		R_DMA4_SRC_ADDR = saddr;
		R_DMA4_TAR_ADDR = taddr;
		R_DMA4_TX_COUNT = xfer_cnt;
		R_DMA4_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA4_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		R_DMA4_SPRITE_SIZE = sprite_sz;
		break;
	case C_DMA_CH5:
		R_DMA5_SRC_ADDR = saddr;
		R_DMA5_TAR_ADDR = taddr;
		R_DMA5_TX_COUNT = xfer_cnt;
		R_DMA5_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA5_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		R_DMA5_SPRITE_SIZE = sprite_sz;
		break;
	case C_DMA_CH6:
		R_DMA6_SRC_ADDR = saddr;
		R_DMA6_TAR_ADDR = taddr;
		R_DMA6_TX_COUNT = xfer_cnt;
		R_DMA6_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA6_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		R_DMA6_SPRITE_SIZE = sprite_sz;
		break;
	case C_DMA_CH7:
		R_DMA7_SRC_ADDR = saddr;
		R_DMA7_TAR_ADDR = taddr;
		R_DMA7_TX_COUNT = xfer_cnt;
		R_DMA7_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA7_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		R_DMA7_SPRITE_SIZE = sprite_sz;
		break;
	default:
		return -1;
	}

	return 0;
}


/**
 * @brief   set dma transfer length
 * @param   channel: dma channel number
 * @param   count: transfer length
 * @return 	result: >=0 is success, <0 is fail.
 */
static INT32S dma_set_tx_count(INT32U channel, INT32U count)
{
	if(count > C_DMA_COUNT_MAX)
	{
		return -1;
	}

	switch(channel)
	{
	case C_DMA_CH0:
		R_DMA0_TX_COUNT = count;
		break;

	case C_DMA_CH1:
		R_DMA1_TX_COUNT = count;
		break;

	case C_DMA_CH2:
		R_DMA2_TX_COUNT = count;
		break;

	case C_DMA_CH3:
		R_DMA3_TX_COUNT = count;
		break;

	case C_DMA_CH4:
		R_DMA4_TX_COUNT = count;
		break;

	case C_DMA_CH5:
		R_DMA5_TX_COUNT = count;
		break;

	case C_DMA_CH6:
		R_DMA6_TX_COUNT = count;
		break;

	case C_DMA_CH7:
		R_DMA7_TX_COUNT = count;
		break;

	default:
		return -1;
	}

	return 0;
}

/**
 * @brief   set dma transfer device number
 * @param   channel: dma channel number
 * @param   count: device number
 * @return 	result: >=0 is success, <0 is fail.
 */
static INT32S dma_set_device(INT32U channel, INT16U device)
{
	INT32U	shift = 0, bshift = 0;
	INT32U	bank = device >> 4;
	INT32S	mask;

	if((channel >= C_DMA_CHANNEL_NUM) || (device > C_DMA_IO_MAX))
	{
		return -1;
	}

	if(channel == C_DMA_CH0)
	{
		shift = C_DMA0_IO_SHIFT;
		bshift = C_DMA0_BANK_SHIFT;
	}
	else
	if(channel == C_DMA_CH1)
	{
		shift = C_DMA1_IO_SHIFT;
		bshift = C_DMA1_BANK_SHIFT;
	}
	else
	if(channel == C_DMA_CH2)
	{
		shift = C_DMA2_IO_SHIFT;
		bshift = C_DMA2_BANK_SHIFT;
	}
	else
	if(channel == C_DMA_CH3)
	{
		shift = C_DMA3_IO_SHIFT;
		bshift = C_DMA3_BANK_SHIFT;
	}
	else
	if(channel == C_DMA_CH4)
	{
		shift = C_DMA4_IO_SHIFT;
		bshift = C_DMA4_BANK_SHIFT;
	}
	else
	if(channel == C_DMA_CH5)
	{
		shift = C_DMA5_IO_SHIFT;
		bshift = C_DMA5_BANK_SHIFT;
	}
	else
	if(channel == C_DMA_CH6)
	{
		shift = C_DMA6_IO_SHIFT;
		bshift = C_DMA6_BANK_SHIFT;
	}
	else
	if(channel == C_DMA_CH7)
	{
		shift = C_DMA7_IO_SHIFT;
		bshift = C_DMA7_BANK_SHIFT;
	}

#if _OPERATING_SYSTEM != _OS_NONE	// Soft Protect for critical section
#if _OPERATING_SYSTEM == _OS_UCOS2
	SW_DMA_LOCK();
#elif _OPERATING_SYSTEM == _OS_FREERTOS
	osSuspend();
#endif
#endif
	mask = dma_device_protect();	// Device Protect for critical section
	R_DMA_CEMODE &= ~(0x01 << bshift);
	R_DMA_CEMODE |= (bank << bshift);

	R_DMA_DEVICE &= ~((C_DMA_IO_MASK) << shift);
	R_DMA_DEVICE |= ((device & C_DMA_IO_MASK) << shift);

	dma_device_unprotect(mask);
#if _OPERATING_SYSTEM != _OS_NONE
#if _OPERATING_SYSTEM == _OS_UCOS2
	SW_DMA_UNLOCK();
#elif _OPERATING_SYSTEM == _OS_FREERTOS
	osResume();
#endif
#endif
	return 0;
}

/**
 * @brief   set dma timeout value
 * @param   channel: dma channel number
 * @param   timeout: timeout value
 * @return 	result: >=0 is success, <0 is fail.
 */
static INT32S dma_set_timeout(INT32U channel, INT32U timeout)
{
	if((channel >= C_DMA_CHANNEL_NUM) || (timeout > C_DMA_MISC_TIMEOUT_MAX))
	{
		return -1;
	}

	switch(channel)
	{
	case C_DMA_CH0:
		R_DMA0_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA0_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		break;

	case C_DMA_CH1:
		R_DMA1_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA1_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		break;

	case C_DMA_CH2:
		R_DMA2_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA2_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		break;

	case C_DMA_CH3:
		R_DMA3_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA3_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		break;

	case C_DMA_CH4:
		R_DMA4_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA4_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		break;

	case C_DMA_CH5:
		R_DMA5_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA5_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		break;

	case C_DMA_CH6:
		R_DMA6_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA6_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		break;

	case C_DMA_CH7:
		R_DMA7_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);
		R_DMA7_MISC |= timeout << C_DMA_MISC_TIMEOUT_SHIFT;
		break;

	default:
		break;
	}

	return 0;
}

/**
 * @brief   set dma line length
 * @param   length: line length for LCD frame buffer
 * @return 	result: >=0 is success, <0 is fail.
 */
static INT32S dma_set_line_length(INT32U length)
{
	if(length > C_DMA_LINE_MAX)
	{
		return -1;
	}

	R_DMA_LINE_LEN = length;
	return 0;
}

/**
 * @brief   set dma sprite size
 * @param   channel: dma channel
 * @param   size: sprite size
 * @return 	result: >=0 is success, <0 is fail.
 */
static INT32S dma_set_sprite_size(INT32U channel, INT32U size)
{
	if(size > C_DMA_SPRITE_MAX)
	{
		return -1;
	}

	switch(channel)
	{
	case C_DMA_CH0:
		R_DMA0_SPRITE_SIZE = size;
		break;

	case C_DMA_CH1:
		R_DMA1_SPRITE_SIZE = size;
		break;

	case C_DMA_CH2:
		R_DMA2_SPRITE_SIZE = size;
		break;

	case C_DMA_CH3:
		R_DMA3_SPRITE_SIZE = size;
		break;

	case C_DMA_CH4:
		R_DMA4_SPRITE_SIZE = size;
		break;

	case C_DMA_CH5:
		R_DMA5_SPRITE_SIZE = size;
		break;

	case C_DMA_CH6:
		R_DMA6_SPRITE_SIZE = size;
		break;

	case C_DMA_CH7:
		R_DMA7_SPRITE_SIZE = size;
		break;

	default:
		return -1;
	}

	return 0;
}

#if 0
static INT32S dma_set_transparent_enable(INT32U channel)
{
	switch(channel)
	{
	case C_DMA_CH0:
		R_DMA0_MISC |= C_DMA_MISC_TRANSPARENT_EN;
		break;

	case C_DMA_CH1:
		R_DMA1_MISC |= C_DMA_MISC_TRANSPARENT_EN;
		break;

	case C_DMA_CH2:
		R_DMA2_MISC |= C_DMA_MISC_TRANSPARENT_EN;
		break;

	case C_DMA_CH3:
		R_DMA3_MISC |= C_DMA_MISC_TRANSPARENT_EN;
		break;

	case C_DMA_CH4:
		R_DMA4_MISC |= C_DMA_MISC_TRANSPARENT_EN;
		break;

	case C_DMA_CH5:
		R_DMA5_MISC |= C_DMA_MISC_TRANSPARENT_EN;
		break;

	case C_DMA_CH6:
		R_DMA6_MISC |= C_DMA_MISC_TRANSPARENT_EN;
		break;

	case C_DMA_CH7:
		R_DMA7_MISC |= C_DMA_MISC_TRANSPARENT_EN;
		break;

	default:
		return -1;
	}

	return 0;
}

static INT32S dma_set_transparent_disable(INT32U channel)
{
	switch(channel)
	{
	case C_DMA_CH0:
		R_DMA0_MISC &= ~(C_DMA_MISC_TRANSPARENT_EN);
		break;

	case C_DMA_CH1:
		R_DMA1_MISC &= ~(C_DMA_MISC_TRANSPARENT_EN);
		break;

	case C_DMA_CH2:
		R_DMA2_MISC &= ~(C_DMA_MISC_TRANSPARENT_EN);
		break;

	case C_DMA_CH3:
		R_DMA3_MISC &= ~(C_DMA_MISC_TRANSPARENT_EN);
		break;

	case C_DMA_CH4:
		R_DMA4_MISC &= ~(C_DMA_MISC_TRANSPARENT_EN);
		break;

	case C_DMA_CH5:
		R_DMA5_MISC &= ~(C_DMA_MISC_TRANSPARENT_EN);
		break;

	case C_DMA_CH6:
		R_DMA6_MISC &= ~(C_DMA_MISC_TRANSPARENT_EN);
		break;

	case C_DMA_CH7:
		R_DMA7_MISC &= ~(C_DMA_MISC_TRANSPARENT_EN);
		break;

	default:
		return -1;
	}

	return 0;
}

static INT32S dma_set_transparent_pattern(INT32U channel, INT16U pattern)
{
	if(pattern > C_DMA_TRANSPARENT_MAX)
	{
		return -1;
	}

	switch(channel)
	{
	case C_DMA_CH0:
		R_DMA0_TRANSPARENT = pattern;
		break;

	case C_DMA_CH1:
		R_DMA1_TRANSPARENT = pattern;
		break;

	case C_DMA_CH2:
		R_DMA2_TRANSPARENT = pattern;
		break;

	case C_DMA_CH3:
		R_DMA3_TRANSPARENT = pattern;
		break;

	case C_DMA_CH4:
		R_DMA4_TRANSPARENT = pattern;
		break;

	case C_DMA_CH5:
		R_DMA5_TRANSPARENT = pattern;
		break;

	case C_DMA_CH6:
		R_DMA6_TRANSPARENT = pattern;
		break;

	case C_DMA_CH7:
		R_DMA7_TRANSPARENT = pattern;
		break;

	default:
		return -1;
	}

	return 0;
}

#endif
#if AES_ENABLE
static INT32S aes_load_key_wait(INT32U time_out)
{
	INT32U	i;

	for(i = 0; i < time_out; i++)
	{
		if((R_AES_LOAD_KEY & 0x10) == 0)
		{
			return 0;
		}
		else
		{
			INT32U	j;
			INT32U	loopCnt = 0xf;		// [200k]0x4FF;  // [100k]0x3FF;
			for(j = 0; j < loopCnt; j++)
			{
				j = j;
			}
		}
	}

	return -1;
}

#endif
static INT32S dma_transfer_extend(DMA_STRUCT *dma_struct, INT8U usage, INT32U os_q, void (*dma_user_isr) (INT8U, INT8S))
{
	INT32U	s_addr = dma_struct->s_addr;
	INT32U	t_addr = dma_struct->t_addr;
	INT32U	count = dma_struct->count;
	INT32U	channel, len, ctrl;
	INT16U	src_type, target_type;

	#if DES_3DES_ENABLE
	INT32U	des_mode = 0;
	#endif
	#if DES_3DES_ENABLE || AES_ENABLE
	INT32U	option = 0;
	#endif

	if(!count)
	{
		if(dma_struct->notify)
		{
			*(dma_struct->notify) = C_DMA_STATUS_DONE;
		}

		return 0;
	}

	if(dma_struct->width == DMA_DATA_WIDTH_1BYTE)
	{
		ctrl = C_DMA_CTRL_8BIT | C_DMA_CTRL_INT | C_DMA_CTRL_NORMAL_INT | C_DMA_CTRL_ENABLE;
		len = count;
	}
	else
	if(dma_struct->width == DMA_DATA_WIDTH_2BYTE)
	{
		// Both source and target address must be 2-byte alignment
		if((s_addr & 0x1) || (t_addr & 0x1))
		{
			DBG_PRINT("DMA fail 2-byte align s=0x%X,t=0x%X\r\n", s_addr, t_addr);
			return -1;
		}

		ctrl = C_DMA_CTRL_16BIT | C_DMA_CTRL_INT | C_DMA_CTRL_NORMAL_INT | C_DMA_CTRL_ENABLE;
		len = count << 1;
	}
	else
	if(dma_struct->width == DMA_DATA_WIDTH_4BYTE)
	{
		// Both source and target address must be 4-byte alignment
		if((s_addr & 0x3) || (t_addr & 0x3))
		{
			DBG_PRINT("DMA fail 4-byte align s=0x%X,t=0x%X\r\n", s_addr, t_addr);
			return -1;
		}

		ctrl = C_DMA_CTRL_32BIT | C_DMA_CTRL_INT | C_DMA_CTRL_NORMAL_INT | C_DMA_CTRL_ENABLE;
		len = count << 2;
	}
	else
	{
		DBG_PRINT("DMA fail unknow width type=0x%d\r\n", dma_struct->width);
		return -1;
	}

	if((s_addr < C_DMA_IO_ADDR_START) || (s_addr > C_DMA_IO_ADDR_END))
	{
		src_type = C_DMA_MEMORY;
	}
	else
	{
		if(s_addr == (INT32U) P_UART0_BASE)
		{
			src_type = C_DMA_IO_UART0_RX;
		}
		else
		if(s_addr == (INT32U) P_UART1_BASE)
		{
			src_type = C_DMA_IO_UART1_RX;
		}
		else
		if(s_addr == (INT32U) P_SPI0_RX_DATA)
		{
			src_type = C_DMA_IO_SPI0_RX;
		}
		else
		if(s_addr == (INT32U) P_SPI1_RX_DATA)
		{
			src_type = C_DMA_IO_SPI1_RX;
		}
		else
		if(s_addr == (INT32U) P_SDC0_DATA_RX)
		{
			src_type = C_DMA_IO_SDC0;
		}
		else
		if(s_addr == (INT32U) P_SDC1_DATA_RX)
		{
			src_type = C_DMA_IO_SDC1;
		}
		else
		if(s_addr == (INT32U) P_ADC_ASADC_DATA)
		{
			src_type = C_DMA_IO_ADC;
		}
		else
		if(s_addr == (INT32U) P_I2SRX_DATA)
		{
			src_type = C_DMA_IO_I2S_RX;
		}
		else
		if(s_addr == (INT32U) P_I2S1RX_DATA)
		{
			src_type = C_DMA_IO_I2S1_RX;
		}
		else
		if(s_addr == (INT32U) P_I2S2RX_DATA)
		{
			src_type = C_DMA_IO_I2S2_RX;
		}
		else
		if(s_addr == (INT32U) P_I2S3RX_DATA)
		{
			src_type = C_DMA_IO_I2S3_RX;
		}
		else
		{
			// Unknow IO
			DBG_PRINT("DMA fail unknow IO s=0x%X\r\n", s_addr);
			return -4;
		}
	}

	if((t_addr < C_DMA_IO_ADDR_START) || (t_addr > C_DMA_IO_ADDR_END))
	{
		target_type = C_DMA_MEMORY;
	}
	else
	{
		if(src_type != C_DMA_MEMORY)
		{
			// IO to IO is not supported
			DBG_PRINT("DMA fail not support IO to IO s=0x%X,t=0x%X", s_addr, t_addr);
			return -1;
		}

		if(t_addr == (INT32U) P_UART0_BASE)
		{
			target_type = C_DMA_IO_UART0_TX;
		}
		else
		if(t_addr == (INT32U) P_UART1_BASE)
		{
			target_type = C_DMA_IO_UART1_TX;
		}
		else
		if(t_addr == (INT32U) P_SPI0_TX_DATA)
		{
			target_type = C_DMA_IO_SPI0_TX;
		}
		else
		if(t_addr == (INT32U) P_SPI1_TX_DATA)
		{
			target_type = C_DMA_IO_SPI1_TX;
		}
		else
		if(t_addr == (INT32U) P_DAC_CHA_DATA)
		{
			target_type = C_DMA_IO_DAC_CHA;
		}
		else
		if(t_addr == (INT32U) P_DAC_CHB_DATA)
		{
			target_type = C_DMA_IO_DAC_CHB;
		}
		else
		if(t_addr == (INT32U) P_SDC0_DATA_TX)
		{
			target_type = C_DMA_IO_SDC0;
		}
		else
		if(t_addr == (INT32U) P_SDC1_DATA_TX)
		{
			target_type = C_DMA_IO_SDC1;
		}
		else
		if(t_addr == (INT32U) P_I2STX_DATA)
		{
			target_type = C_DMA_IO_I2S_TX;
		}
		else
		if(t_addr == (INT32U) P_I2S1TX_DATA)
		{
			target_type = C_DMA_IO_I2S1_TX;
		}
		else
		if(t_addr == (INT32U) P_I2S2TX_DATA)
		{
			target_type = C_DMA_IO_I2S2_TX;
		}
		else
		if(t_addr == (INT32U) P_I2S3TX_DATA)
		{
			target_type = C_DMA_IO_I2S3_TX;
		}
		else
		{
			// Unknow IO
			DBG_PRINT("DMA fail unknow IO t=0x%X\r\n", t_addr);
			return -5;
		}
	}

	if(!dma_init_done)
	{
		drv_l1_dma_init();
	}

#if AES_ENABLE
	// aes only support in dma2
	if(dma_struct->aes && dma_struct->aes->enable)
	{
		channel = 2;
		dma_struct->channel = 2;
		if(dma_set_channel(usage, 2) < 0)
		{
			// DMA channel is not available
			DBG_PRINT("DMA fail AES channel is not available\r\n");
			return -1;
		}

		*((volatile INT32U *) 0xD03000B8) = 0;

		if(dma_struct->aes->func)		//Encrypt
		{
			//INT32U temp;
			if(dma_struct->aes->keyrev == 0)
			{
				option |= (1 << 4);
			}
			else
			if(dma_struct->aes->keyrev == 1)
			{
				option &= (~(1 << 4));
			}
			else
			if(dma_struct->aes->keyrev == 2)
			{
				option |= (0xF | (1 << 4));
			}

			//temp |= ((dma_struct->aes->option)&0xFFFF);
			option |= ((1 << 9) | (1 << 14));
			if(dma_struct->aes->InRev & 0x01)
			{
				option &= ~(1 << 9);
			}
			if(dma_struct->aes->InRev & 0x02)
			{
				option |= (0xF << 5);
			}

			if(dma_struct->aes->OutRev & 0x01)
			{
				option &= ~(1 << 14);
			}

			if(dma_struct->aes->OutRev & 0x02)
			{
				option |= (0xF << 10);
			}

			R_AES_REV |= option;
		}
		else
		{
			//INT32U temp;
			if(dma_struct->aes->keyrev == 0)
			{
				option |= (1 << 20);
			}
			else
			if(dma_struct->aes->keyrev == 1)
			{
				option &= (~(1 << 20));
			}
			else
			if(dma_struct->aes->keyrev == 2)
			{
				option |= (0xF0000 | (1 << 20));
			}

			//temp |= (((dma_struct->aes->option)&0xFFFF)<<16);
			option |= ((1 << 25) | (1 << 30));
			if(dma_struct->aes->InRev & 0x01)
			{
				option &= ~(1 << 25);
			}

			if(dma_struct->aes->InRev & 0x02)
			{
				option |= (0xF << 21);
			}

			if(dma_struct->aes->OutRev & 0x01)
			{
				option &= ~(1 << 30);
			}

			if(dma_struct->aes->OutRev & 0x02)
			{
				option |= (0xF << 26);
			}

			R_AES_REV |= option;
		}

		R_AES_KEY0 = dma_struct->aes->key[0];
		R_AES_KEY1 = dma_struct->aes->key[1];
		R_AES_KEY2 = dma_struct->aes->key[2];
		R_AES_KEY3 = dma_struct->aes->key[3];
		R_AES_LOAD_KEY = 0x00;
		R_AES_LOAD_KEY = 0x01;

		if(aes_load_key_wait(5))
		{
			DBG_PRINT("DMA fail AES load key timeout\r\n");
			return -1;
		}

		option = 0;
		if(dma_struct->aes->mode > 0)
		{
			option = (dma_struct->aes->mode << 13);
			R_AES_IV_0 = dma_struct->aes->iv[0];
			R_AES_IV_1 = dma_struct->aes->iv[1];
			R_AES_IV_2 = dma_struct->aes->iv[2];
			R_AES_IV_3 = dma_struct->aes->iv[3];

			if(dma_struct->aes->ivrev == 0)
			{
				// IV inverse
				option |= (1 << 6);
			}
			else
			if(dma_struct->aes->ivrev == 1)
			{
				// IV inverse
				option &= (~(1 << 6));
			}
			else
			if(dma_struct->aes->ivrev == 2)
			{
				// IV inverse
				option |= ((1 << 6) | (1 << 5) | (1 << 3) | (1 << 2) | (1 << 1));
			}
		}

		if(dma_struct->aes->func)
		{
			// Encrypt
			option |= 0x1000;
		}
		else
		{
			// Discrypt
			option |= 0x1100;
		}
		R_AES_LOAD_KEY = option;
	}
	else
#endif
#if DES_3DES_ENABLE
		// des, 3des only support in dma4, could not enable together
		if(dma_struct->des)
		{
			if((dma_struct->des->enable & 0x3) != 0)
			{
				channel = 4;
				dma_struct->channel = channel;
				if(dma_set_channel(usage, channel) < 0)
				{
					// DMA channel is not available
					DBG_PRINT("DMA fail DES channel is not available\r\n");
					return -1;
				}

				// enable des engine
				R_DES_CTRL = (1 << 0);

				// Descrypt
				if(dma_struct->des->func == 0)
				{
					// enable Descrypt
					R_DES_CTRL |= (1 << 1);
				}

				// MSB bit 24~31 will be 0
				R_DES_KEY0_LSB = (INT32U) (dma_struct->des->key[0]);
				R_DES_KEY0_MSB = (INT32U) (dma_struct->des->key[0] >> 32);

				// 3des
				if((dma_struct->des->enable & (1 << 1)) != 0)
				{
					R_DES_KEY1_LSB = (INT32U) (dma_struct->des->key[1]);
					R_DES_KEY1_MSB = (INT32U) (dma_struct->des->key[1] >> 32);
					R_DES_KEY2_LSB = (INT32U) (dma_struct->des->key[2]);
					R_DES_KEY2_MSB = (INT32U) (dma_struct->des->key[2] >> 32);

					// enable 3des
					R_DES_CTRL |= (1 << 2);
				}

				des_mode = dma_struct->des->mode << 16;

				if(des_mode > 0)
				{
					R_DES_IV_LSB = ((INT32U *) (&(dma_struct->des->iv)))[0];
					R_DES_IV_MSB = ((INT32U *) (&(dma_struct->des->iv)))[1];
				}

				option = des_mode;

				// In Byte Rev
				if(dma_struct->des->InRev & 0x01)
				{
					option |= (1 << 4);
				}

				// In Bit Rev
				if(dma_struct->des->InRev & 0x02)
				{
					option |= (1 << 5);
				}

				//Out Byte Rev
				if(dma_struct->des->OutRev & 0x01)
				{
					option |= (1 << 6);
				}

				//Out Bit Rev
				if(dma_struct->des->OutRev & 0x02)
				{
					option |= (1 << 7);
				}

				if(dma_struct->des->keyrev & 0x01)
				{
					option |= (1 << 8);
				}

				if(dma_struct->des->keyrev & 0x02)
				{
					option |= (1 << 9);
				}

				if(dma_struct->des->ivrev & 0x01)
				{
					option |= (1 << 10);
				}

				if(dma_struct->des->ivrev & 0x02)
				{
					option |= (1 << 11);
				}

				//R_DES_CTRL |= dma_struct->des->option;
				R_DES_CTRL |= option;
			}
			else
				R_DES_CTRL &= (~(1 << 0));
		}
		else
#endif
		{
			channel = dma_get_channel(usage);
			dma_struct->channel = channel;
			if(channel >= C_DMA_CHANNEL_NUM)
			{
				// No free DMA channel is available
				DBG_PRINT("DMA fail no free DMA channel\r\n");
				return -1;
			}
		}

#if 0
	// external interrupt trigger mode.
	if(dma_struct->trigger && dma_struct->trigger->enable)
	{
		// must use Demand Transfer mode.
		if((src_type == C_DMA_MEMORY) && (target_type == C_DMA_MEMORY))
		{
			return -1;
		}

		if(dma_struct->trigger->source > 2)
		{
			return -1;
		}

		ctrl |= (0x01 << 19);		//trigger enable
		ctrl &= ~(0x0F << 20);		//trigger source, exta, eatb, extc...
		ctrl |= ((INT32U) (dma_struct->trigger->source & 0x0F) << 20);
		if(dma_struct->trigger->edge)
		{
			ctrl |= (0x01 << 24);	//rising edge
		}
		else
		{
			ctrl &= ~(0x01 << 24);	//falling edge
		}
	}

#endif
	dma_set_notify(channel, (INT32U) dma_struct->notify, os_q);

	// Set source address
	dma_set_source(channel, s_addr);

	// Set target address
	dma_set_target(channel, t_addr);

	// Set transmit counter
	if(dma_set_tx_count(channel, count))
	{
		DBG_PRINT("DMA fail dma_set_tx_count(%d,0x%X) fail\r\n", channel, count);
		dma_free_channel(channel);
		return -1;
	}

	if(dma_set_timeout(channel, dma_struct->timeout))
	{
		DBG_PRINT("DMA fail dma_set_timeout(%d,0x%X) fail\r\n", channel, dma_struct->timeout);
		dma_free_channel(channel);
		return -1;
	}

	if(dma_set_sprite_size(channel, 0))
	{
		DBG_PRINT("DMA fail dma_set_sprite_size(%d,0) fail\r\n", channel);
		dma_free_channel(channel);
		return -1;
	}

	// Prepare control register
	if(src_type != C_DMA_MEMORY)
	{	// IO to memory
		ctrl |= C_DMA_CTRL_DEMAND_TRANS | C_DMA_CTRL_IO2M | C_DMA_CTRL_SRC_FIX | C_DMA_CTRL_EXTERNAL;

		if(dma_set_device(channel, src_type))
		{
			DBG_PRINT("DMA fail src dma_set_device(%d,%d) fail\r\n", channel, src_type);
			dma_free_channel(channel);
			return -1;
		}

		#if (defined _DRV_L1_CACHE) && (_DRV_L1_CACHE == 1)
		// Invalid target memory from cache
		cache_invalid_range(t_addr, len);
		#endif
	}
	else
	if(target_type != C_DMA_MEMORY)
	{	// Memory to IO
		ctrl |= C_DMA_CTRL_DEMAND_TRANS | C_DMA_CTRL_M2IO | C_DMA_CTRL_DEST_FIX | C_DMA_CTRL_EXTERNAL;

		if(dma_set_device(channel, target_type))
		{
			DBG_PRINT("DMA fail target dma_set_device(%d,%d) fail\r\n", channel, target_type);
			dma_free_channel(channel);
			return -1;
		}

		#if (defined _DRV_L1_CACHE) && (_DRV_L1_CACHE == 1)
		// Drain source memory from cache
		cache_drain_range(s_addr, len);
		#endif
	}
	else
	{	// Memory to memory
		ctrl |= C_DMA_CTRL_SINGLE_TRANS | C_DMA_CTRL_SRC_INCREASE | C_DMA_CTRL_DEST_INCREASE | C_DMA_CTRL_SOFTWARE;

		#if (defined _DRV_L1_CACHE) && (_DRV_L1_CACHE == 1)
		// Drain source memory and invalid target memory from cache
		cache_drain_range(s_addr, len);
		cache_invalid_range(t_addr, len);
		#endif
	}

#if AES_ENABLE
	if(dma_struct->aes)
	{
		ctrl |= C_DMA_CTRL_BURST4_ACCESS;
	}
	else
#endif
	{
		// Check whether burst mode can be used
		if(usage == C_DMA_OCCUPIED || src_type == C_DMA_IO_FIR_RX || src_type == C_DMA_IO_FIR_RX2 || src_type == C_DMA_IO_FIR_TX || target_type == C_DMA_IO_FIR_TX)
		{
			ctrl |= C_DMA_CTRL_SINGLE_ACCESS;
		}
		else
		if(!(count & 0x7))
		{
			ctrl |= C_DMA_CTRL_BURST8_ACCESS;
		}
		else
		if(!(count & 0x3))
		{
			ctrl |= C_DMA_CTRL_BURST4_ACCESS;
		}
		else
		{
			ctrl |= C_DMA_CTRL_SINGLE_ACCESS;
		}
	}

	if(dma_user_isr != NULL)
	{
		drv_l1_dma_callback_set(dma_struct->channel, dma_user_isr);
	}
	else
	{
		drv_l1_dma_callback_clear(dma_struct->channel);
	}

	#if DRV_l1_DAC_ON_OFF
	if(target_type == C_DMA_IO_DAC_CHA)
	{
		R_DAC_CHA_CTRL |= (1 << 13);		// Enable DAC output
		R_DAC_CHB_CTRL |= (1 << 13);		// Enable DAC output
		dma_to_dac_busy = 1;
	}
	#endif

	// Start DMA now
	dma_set_control(channel, ctrl);

	#if DRV_l1_DAC_ON_OFF
	if(target_type == C_DMA_IO_DAC_CHA)
	{
		R_DAC_CHA_CTRL |= (1 << 14);		// Enable DAC FIFO empty interrupt
	}
	#endif

	/*
	if(dma_struct->des && ((dma_struct->des->enable & 0x3) != 0))
	{
		DBG_PRINT("R_DMA4_CTRL[%08x]=%08x\r\n", &R_DMA4_CTRL, R_DMA4_CTRL);							// 0xD0300200
		DBG_PRINT("R_DMA4_SRC_ADDR[%08x]=%08x\r\n", &R_DMA4_SRC_ADDR, R_DMA4_SRC_ADDR);				// 0xD0300204
		DBG_PRINT("R_DMA4_TAR_ADDR[%08x]=%08x\r\n", &R_DMA4_TAR_ADDR, R_DMA4_TAR_ADDR);				// 0xD0300208
		DBG_PRINT("R_DMA4_TX_COUNT[%08x]=%08x\r\n", &R_DMA4_TX_COUNT, R_DMA4_TX_COUNT);				// 0xD030020C
		DBG_PRINT("R_DMA4_SPRITE_SIZE[%08x]=%08x\r\n", &R_DMA4_SPRITE_SIZE, R_DMA4_SPRITE_SIZE);	// 0xD0300210
		DBG_PRINT("R_DMA4_TRANSPARENT[%08x]=%08x\r\n", &R_DMA4_TRANSPARENT, R_DMA4_TRANSPARENT);	// 0xD0300214
		DBG_PRINT("R_DMA4_MISC[%08x]=%08x\r\n", &R_DMA4_MISC, R_DMA4_MISC);    // 0xD0300218
	}
	*/
	return 0;
}

/**
 * @brief   transfer data by dma
 * @param   dma_struct: dma struct
 * @return 	result: >=0 is success, <0 is fail.
 */
INT32S drv_l1_dma_transfer(DMA_STRUCT *dma_struct)
{
	if(!dma_struct)
	{
		DBG_PRINT("DMA fail invalid dma_struct=0x%X\r\n", dma_struct);
		return -1;
	}

	return dma_transfer_extend(dma_struct, C_DMA_NORMAL_USED, (INT32U) NULL, NULL);
}

INT32S drv_l1_dma_transfer_sdio(DMA_STRUCT *dma_struct, INT8U usage)
{
	INT32U s_addr = dma_struct->s_addr;
	INT32U t_addr = dma_struct->t_addr;
	INT32U count = dma_struct->count;
	INT32U channel, len, ctrl;
	INT16U src_type, target_type;
	if (!dma_struct) {
		return -1;
	}
    if (dma_struct->width == DMA_DATA_WIDTH_4BYTE) {
		if ((s_addr&0x3) || (t_addr&0x3)) {
			return -1;						// Both source and target address must be 4-byte alignment
		}
		ctrl = C_DMA_CTRL_32BIT | C_DMA_CTRL_INT | C_DMA_CTRL_NORMAL_INT | C_DMA_CTRL_ENABLE;
		len = count << 2;
	} else if (dma_struct->width == DMA_DATA_WIDTH_2BYTE) {
		if ((s_addr&0x1) || (t_addr&0x1)) {
			return -1;						// Both source and target address must be 2-byte alignment
		}
		ctrl = C_DMA_CTRL_16BIT | C_DMA_CTRL_INT | C_DMA_CTRL_NORMAL_INT | C_DMA_CTRL_ENABLE;
		len = count << 1;
		DBG_PRINT("s_addr %x t_addr %x width %d byte len %d\r\n",s_addr,t_addr,dma_struct->width,len);
	} else if (dma_struct->width == DMA_DATA_WIDTH_1BYTE) {
		ctrl = C_DMA_CTRL_8BIT | C_DMA_CTRL_INT | C_DMA_CTRL_NORMAL_INT | C_DMA_CTRL_ENABLE;
		len = count ;
		DBG_PRINT("s_addr %x t_addr %x width %d byte len %d\r\n",s_addr,t_addr,dma_struct->width,len);
	} else {
		return -1;
	}
    //set source type
    if ((s_addr<C_DMA_IO_ADDR_START) || (s_addr>C_DMA_IO_ADDR_END)) {
		src_type = C_DMA_MEMORY;
	} else {
		if (s_addr == (INT32U) P_SDC0_DATA_RX) {
			src_type = C_DMA_IO_SDC0;
        } else if (s_addr == (INT32U) P_SDC1_DATA_RX) {
			src_type = C_DMA_IO_SDC1;
		} else {
			return -4;						// unknown IO
		}
	}
	//set target type
	if ((t_addr<C_DMA_IO_ADDR_START) || (t_addr>C_DMA_IO_ADDR_END)) {
		target_type = C_DMA_MEMORY;
	} else {
		if (src_type != C_DMA_MEMORY) {
			return -1;						// IO to IO is not supported
		}
		if (t_addr == (INT32U) P_SDC0_DATA_TX) {
			target_type = C_DMA_IO_SDC0;
        } else if (t_addr == (INT32U) P_SDC1_DATA_TX) {
			target_type = C_DMA_IO_SDC1;
		} else {
			return -5;						// unknown IO
		}
    }

    if (!dma_init_done) {
        drv_l1_dma_init();
	}


    	channel = dma_get_channel(C_DMA_NORMAL_USED);

	dma_struct->channel = channel;
	if (channel >= C_DMA_CHANNEL_NUM) {
        return -1;							// No free DMA channel is available
    }
 	dma_set_notify(channel, (INT32U) dma_struct->notify, 0);
/*
	// Set source address
	dma_set_source(channel, s_addr);

	// Set target address
	dma_set_target(channel, t_addr);

	// Set transmit counter
	if (dma_set_tx_count(channel, count)) {
		dma_free_channel(channel);
		return -1;
	}

	if (dma_set_timeout(channel, dma_struct->timeout)) {
		dma_free_channel(channel);
		return -1;
	}

	if (dma_set_sprite_size(channel, 0)) {
		dma_free_channel(channel);
		return -1;
	}
*/
    dma_setting(channel, s_addr, t_addr, count, dma_struct->timeout, 0);
	// Prepare control register
	if (src_type != C_DMA_MEMORY) {			// IO to memory
		ctrl |= C_DMA_CTRL_DEMAND_TRANS | C_DMA_CTRL_IO2M | C_DMA_CTRL_SRC_FIX | C_DMA_CTRL_EXTERNAL;

		if (dma_set_device(channel, src_type)) {
			dma_free_channel(channel);
			return -1;
		}

	#if (defined _DRV_L1_CACHE) && (_DRV_L1_CACHE == 1)
		// Invalid target memory from cache
		cache_invalid_range(t_addr, len);
	#endif

	} else if (target_type != C_DMA_MEMORY) {	// Memory to IO
		ctrl |= C_DMA_CTRL_DEMAND_TRANS | C_DMA_CTRL_M2IO | C_DMA_CTRL_DEST_FIX | C_DMA_CTRL_EXTERNAL;

		if (dma_set_device(channel, target_type)) {
			dma_free_channel(channel);
			return -1;
		}

	#if (defined _DRV_L1_CACHE) && (_DRV_L1_CACHE == 1)
		// Drain source memory from cache
		cache_drain_range(s_addr, len);
	#endif

	}
	else
	{								// Memory to memory
		ctrl |= C_DMA_CTRL_SINGLE_TRANS | C_DMA_CTRL_SRC_INCREASE | C_DMA_CTRL_DEST_INCREASE | C_DMA_CTRL_SOFTWARE;

	#if (defined _DRV_L1_CACHE) && (_DRV_L1_CACHE == 1)
		// Drain source memory and invalid target memory from cache
		cache_drain_range(s_addr, len);
		cache_invalid_range(t_addr, len);
	#endif
	}

    // Check whether burst mode can be used
    if (usage == C_DMA_OCCUPIED) {
        ctrl |= C_DMA_CTRL_SINGLE_ACCESS;
    } else if (!(count & 0x7)) {
        ctrl |= C_DMA_CTRL_BURST8_ACCESS;
    } else if (!(count & 0x3)) {
        ctrl |= C_DMA_CTRL_BURST4_ACCESS;
    } else {
        ctrl |= C_DMA_CTRL_SINGLE_ACCESS;
    }

	// Start DMA now
	dma_set_control(channel, ctrl);
    return 0;
}
/**
 * @brief   dma transfer data and wait transfer finish
 * @param   dma_struct: dma struct
 * @return 	result: >=0 is success, <0 is fail.
 */
INT32S drv_l1_dma_transfer_wait_ready(DMA_STRUCT *dma_struct)
{
	volatile INT8S	notify;
#if _OPERATING_SYSTEM != _OS_NONE
	INT32U			q_index;
	INT32S			mask;
#if _OPERATING_SYSTEM == _OS_UCOS2
	INT32S			result;
	INT8U			error;
#elif _OPERATING_SYSTEM == _OS_FREERTOS
	osEvent			result;
#endif
	if(!dma_struct)
	{
		return -1;
	}

	if(!(dma_struct->notify))
	{
		dma_struct->notify = &notify;
	}

	*(dma_struct->notify) = C_DMA_STATUS_WAITING;
	q_index = dma_get_queue();
	if(q_index >= C_DMA_Q_NUM)
	{
		return -1;
	}

	if(dma_transfer_extend(dma_struct, C_DMA_NORMAL_USED, (INT32U) dma_driver_queue[q_index], NULL))
	{
		if(dma_struct->notify == &notify)
		{				// Restore dma_struct->notify
			dma_struct->notify = NULL;
		}

		dma_free_queue(q_index);
		return -1;
	}

#if _OPERATING_SYSTEM == _OS_UCOS2
	result = (INT32S) OSQPend(dma_driver_queue[q_index], 0, &error);
#elif _OPERATING_SYSTEM == _OS_FREERTOS
	result = osMessageGet(dma_driver_queue[q_index], osWaitForever);
#endif
	dma_free_queue(q_index);
#if _OPERATING_SYSTEM == _OS_UCOS2
	if(error == OS_NO_ERR)
#elif _OPERATING_SYSTEM == _OS_FREERTOS
		if(result.status == osEventMessage)
#endif
		{
			if(dma_struct->notify == &notify)
			{			// Restore dma_struct->notify
				dma_struct->notify = NULL;
			}

#if _OPERATING_SYSTEM == _OS_UCOS2
			if(result == C_DMA_STATUS_DONE)
			{
				return 0;
			}

#elif _OPERATING_SYSTEM == _OS_FREERTOS
			if(result.value.v == C_DMA_STATUS_DONE)
			{
				return 0;
			}

#endif
			return -1;	// DMA timeout
		}

	// If we don't receive response from DMA, we have to reset DMA controller and free the channel by ourselves
	mask = dma_device_protect();
	dma_free_channel((INT32U) dma_struct->channel);
	dma_device_unprotect(mask);
#else
	if(dma_transfer_extend(dma_struct, C_DMA_NORMAL_USED, NULL, NULL))
	{
		return -1;
	}

#endif
	while(*((volatile INT8S *) dma_struct->notify) == C_DMA_STATUS_WAITING);
	if(*(dma_struct->notify) == C_DMA_STATUS_DONE)
	{
		if(dma_struct->notify == &notify)
		{				// Restore dma_struct->notify
			dma_struct->notify = NULL;
		}

		return 0;
	}

	if(dma_struct->notify == &notify)
	{					// Restore dma_struct->notify
		dma_struct->notify = NULL;
	}

	return -1;			// DMA timeout
}

#if _OPERATING_SYSTEM == _OS_UCOS2

/**
 * @brief   dma transfer data with queue
 * @param   dma_struct: dma struct
 * @param   os_q: notify queue
 * @return 	result: >=0 is success, <0 is fail.
 */
INT32S drv_l1_dma_transfer_with_queue(DMA_STRUCT *dma_struct, OS_EVENT *os_q)
{
	if(!dma_struct || !os_q)
	{
		return -1;
	}

	return dma_transfer_extend(dma_struct, C_DMA_NORMAL_USED, (INT32U) os_q, NULL);
}

/**
 * @brief   dma transfer data with callback
 * @param   dma_struct: dma struct
 * @param   *dma_user_isr: function pointer of dma user callback isr
 * @return 	result: >=0 is success, <0 is fail.
 */
INT32S drv_l1_dma_transfer_with_callback(DMA_STRUCT *dma_struct, void (*dma_user_isr) (INT8U, INT8S))
{
	if(!dma_struct /*|| !os_q*/ )
	{
		return -1;
	}

	return dma_transfer_extend(dma_struct, C_DMA_NORMAL_USED, (INT32U) NULL, dma_user_isr);
}

/**
 * @brief   dma transfer data with queue in double buffer mode.
 * @param   dma_struct: dma struct
 * @param   os_q: notify queue
 * @return 	result: >=0 is success, <0 is fail.
 */
INT32S drv_l1_dma_transfer_with_double_buf(DMA_STRUCT *dma_struct, OS_EVENT *os_q)
{
	if(!dma_struct /*|| !os_q*/ )
	{
		return -1;
	}

	return dma_transfer_extend(dma_struct, C_DMA_OCCUPIED, (INT32U) os_q, NULL);
}

/**
 * @brief   dma transfer data with queue and callback in double buffer mode.
 * @param   dma_struct: dma struct
 * @param   os_q: notify queue
 * @param   *dma_user_isr: function pointer of dma user callback isr
 * @return 	result: >=0 is success, <0 is fail.
 */
INT32S drv_l1_dma_transfer_double_buf_with_callback(DMA_STRUCT *dma_struct, OS_EVENT *os_q, void (*dma_user_isr) (INT8U, INT8S))
{
	if(!dma_struct /*|| !os_q*/ )
	{
		return -1;
	}

	return dma_transfer_extend(dma_struct, C_DMA_OCCUPIED, (INT32U) os_q, dma_user_isr);
}

#else

/**
 * @brief   dma transfer data in double buffer mode.
 * @param   dma_struct: dma struct
 * @return 	result: >=0 is success, <0 is fail.
 */
INT32S drv_l1_dma_transfer_with_double_buf(DMA_STRUCT *dma_struct)
{
	if(!dma_struct)
	{
		return -1;
	}

	return dma_transfer_extend(dma_struct, C_DMA_OCCUPIED, NULL, NULL);
}

#endif

/**
 * @brief   dma transfer next data in double buffer mode.
 * @param   dma_struct: dma struct
 * @param   os_q: notify queue
 * @return 	result: >=0 is success, <0 is fail.
 */
INT32S drv_l1_dma_transfer_double_buf_set(DMA_STRUCT *dma_struct)
{
	INT32S	mask;
	INT32U	s_addr = dma_struct->s_addr;
	INT32U	t_addr = dma_struct->t_addr;
	INT32U	count = dma_struct->count;
	INT32U	len;
	INT16U	src_type, target_type;

	if(!dma_struct || dma_struct->channel >= C_DMA_Q_NUM)
	{
		return -1;
	}

	if(dma_usage[dma_struct->channel] != C_DMA_OCCUPIED)
	{
		return -1;
	}

#if (defined _DRV_L1_CACHE) && (_DRV_L1_CACHE == 1)
	if(dma_struct->width == DMA_DATA_WIDTH_1BYTE)
		len = count;
	else
	if(dma_struct->width == DMA_DATA_WIDTH_2BYTE)
		len = count << 1;
	else
	if(dma_struct->width == DMA_DATA_WIDTH_4BYTE)
		len = count << 2;
	else
		return -1;
	if((s_addr < C_DMA_IO_ADDR_START) || (s_addr > C_DMA_IO_ADDR_END))
		src_type = C_DMA_MEMORY;
	else
		src_type = C_DMA_IO_I2S_RX;		// here, purpose is find out memort or IO, so just set any IO is ok
	if((t_addr < C_DMA_IO_ADDR_START) || (t_addr > C_DMA_IO_ADDR_END))
		target_type = C_DMA_MEMORY;
	else
		target_type = C_DMA_IO_I2S_TX;	// here, purpose is find out memort or IO, so just set any IO is ok
	if(src_type != C_DMA_MEMORY)		// IO to memory
		cache_invalid_range(t_addr, len);
	else
	if(target_type != C_DMA_MEMORY)
		cache_drain_range(s_addr, len);
	else
	{
		cache_drain_range(s_addr, len);
		cache_invalid_range(t_addr, len);
	}

#endif
	dma_set_source((INT32U) dma_struct->channel, dma_struct->s_addr);
	dma_set_target((INT32U) dma_struct->channel, dma_struct->t_addr);
	if(dma_set_tx_count((INT32U) dma_struct->channel, dma_struct->count))
	{
		mask = dma_device_protect();

		#if DRV_l1_DAC_ON_OFF
		if(dma_struct->t_addr == (INT32U) P_DAC_CHA_DATA)
		{
			dma_to_dac_busy = 0;
			// Disable DAC output
			if(!(R_DAC_CHA_FIFO & 0xF))
			{
				R_DAC_CHA_CTRL &= ~(1 << 13);
			}

			if (!(R_DAC_CHB_FIFO & 0xF))
			{
				R_DAC_CHB_CTRL &= ~(1 << 13);
			}
		}
		#endif

		dma_free_channel((INT32U) dma_struct->channel);
		dma_device_unprotect(mask);

		return -1;
	}

	#if DRV_l1_DAC_ON_OFF
	if(dma_struct->t_addr == (INT32U) P_DAC_CHA_DATA)
	{
		R_DAC_CHA_CTRL |= (1 << 14);		// Enable DAC FIFO empty interrupt
	}
	#endif

	return 0;
}

/**
 * @brief   dma free channel in double buffer mode.
 * @param   dma_struct: dma struct
 * @param   os_q: notify queue
 * @return 	result: >=0 is success, <0 is fail.
 */
INT32S drv_l1_dma_transfer_double_buf_free(DMA_STRUCT *dma_struct)
{
	INT32S	mask;

	if(!dma_struct)
	{
		return -1;
	}

	if(dma_usage[dma_struct->channel] != C_DMA_OCCUPIED)
	{
		return -1;
	}

	mask = dma_device_protect();

	#if DRV_l1_DAC_ON_OFF
	if(dma_struct->t_addr == (INT32U) P_DAC_CHA_DATA)
	{
		dma_to_dac_busy = 0;
		if(!(R_DAC_CHA_FIFO & 0xF))
		{
			R_DAC_CHA_CTRL &= ~(1 << 14);
			R_DAC_CHA_CTRL &= ~(1 << 13);
		}
		if(!(R_DAC_CHB_FIFO & 0xF))
		{
			R_DAC_CHB_CTRL &= ~(1 << 13);
		}
	}
	#endif

	dma_free_channel((INT32U) dma_struct->channel);
	dma_device_unprotect(mask);

	return 0;
}

/**
 * @brief   Check double buffer full flag.
 * @param   channel: channel number, C_DMA_CH0 ~ C_DMA_CH7
 * @return 	1: Full, 0: Not full
 */
INT32S drv_l1_dma_dbf_status_get(INT8U channel)
{
	INT32U	ctrl;

	if(channel == C_DMA_CH0)
	{
		ctrl = R_DMA0_CTRL;
	}
	else
	if(channel == C_DMA_CH1)
	{
		ctrl = R_DMA1_CTRL;
	}
	else
	if(channel == C_DMA_CH2)
	{
		ctrl = R_DMA2_CTRL;
	}
	else
	if(channel == C_DMA_CH3)
	{
		ctrl = R_DMA3_CTRL;
	}
	else
	if(channel == C_DMA_CH4)
	{
		ctrl = R_DMA4_CTRL;
	}
	else
	if(channel == C_DMA_CH5)
	{
		ctrl = R_DMA5_CTRL;
	}
	else
	if(channel == C_DMA_CH6)
	{
		ctrl = R_DMA6_CTRL;
	}
	else
	if(channel == C_DMA_CH7)
	{
		ctrl = R_DMA7_CTRL;
	}
	else
	{
		ctrl = 0;
	}

	if(ctrl & C_DMA_CTRL_DBF)
	{
		return 1;
	}

	return 0;
}

/**
 * @brief   Check DMA channel status
 * @param   channel: channel number, C_DMA_CH0 ~ C_DMA_CH7
 * @return 	1: Busy, 0: Idle
 */
INT32S drv_l1_dma_status_get(INT8U channel)
{
	INT32U	ctrl;

	if(channel == C_DMA_CH0)
	{
		ctrl = R_DMA0_CTRL;
	}
	else
	if(channel == C_DMA_CH1)
	{
		ctrl = R_DMA1_CTRL;
	}
	else
	if(channel == C_DMA_CH2)
	{
		ctrl = R_DMA2_CTRL;
	}
	else
	if(channel == C_DMA_CH3)
	{
		ctrl = R_DMA3_CTRL;
	}
	else
	if(channel == C_DMA_CH4)
	{
		ctrl = R_DMA4_CTRL;
	}
	else
	if(channel == C_DMA_CH5)
	{
		ctrl = R_DMA5_CTRL;
	}
	else
	if(channel == C_DMA_CH6)
	{
		ctrl = R_DMA6_CTRL;
	}
	else
	if(channel == C_DMA_CH7)
	{
		ctrl = R_DMA7_CTRL;
	}
	else
	{
		ctrl = 0;
	}

	if(ctrl & C_DMA_CTRL_BUSY)
	{
		return 1;
	}

	return 0;
}

/**
 * @brief   Fill memory by DMA
 * @param   t_addr: memory address
 * @param   value: filled value
 * @param   byte_count: data length in byte
 * @return 	0: Success, -1: Fail
 */
INT32S drv_l1_dma_memory_fill(INT32U t_addr, INT8U value, INT32U byte_count)
{
	INT8U			*p8;
	INT32U			*p32;
	INT32U			ctrl, channel;
	INT32U			src_value;
	INT32S			ret;
#if _OPERATING_SYSTEM != _OS_NONE
	INT32U			q_index;
	INT32S			mask;
#if _OPERATING_SYSTEM == _OS_UCOS2
	INT8U			error;
	INT32S			result;
#elif _OPERATING_SYSTEM == _OS_FREERTOS
	osEvent			result;
#endif
#endif
	volatile INT8S	notify;

	if(!dma_init_done)
	{
		drv_l1_dma_init();
	}

	// If length is less than 16 bytes, uses CPU to set the memory directly
	if(byte_count < 16)
	{
		p8 = (INT8U *) t_addr;
		while(byte_count--)
		{
			*p8++ = value;
		}

		return 0;
	}

	// Make sure start address is 4-byte alignment
	while(t_addr & 0x3)
	{
		*((INT8U *) t_addr++) = value;
		byte_count--;
	}

	// Make sure end address is on 4-byte boundry
	while(byte_count & 0x3)
	{
		*((INT8U *) t_addr + byte_count - 1) = value;
		byte_count--;
	}

	src_value = (value << 24) | (value << 16) | (value << 8) | value;

	// If left length is less than 128 bytes, uses CPU to set the memory directly
	if(byte_count < 128)
	{
		p32 = (INT32U *) t_addr;
		while(byte_count)
		{
			*p32++ = src_value;
			byte_count -= 4;
		}

		return 0;
	}

#if _OPERATING_SYSTEM != _OS_NONE
	// Create a queue to receive DMA result
	q_index = dma_get_queue();
	if(q_index >= C_DMA_Q_NUM)
	{
		return -1;
	}

#endif

	// Get a free DMA channel
	channel = dma_get_channel(C_DMA_NORMAL_USED);
	if(channel >= C_DMA_CHANNEL_NUM)
	{
#if _OPERATING_SYSTEM != _OS_NONE
		dma_free_queue(q_index);
#endif
		return -1;	// No free DMA channel is available
	}

	// Set transmit counter
	ret = dma_set_tx_count(channel, byte_count >> 2);

	// Disable timeout function
	if(dma_set_timeout(channel, 0))
	{
		ret = -1;
	}

	// Disable skip function
	if(dma_set_sprite_size(channel, 0))
	{
		ret = -1;
	}

	// Return if any error occurs
	if(ret)
	{
#if _OPERATING_SYSTEM != _OS_NONE
		dma_free_queue(q_index);
#endif
		dma_free_channel(channel);
		return -1;
	}

	// Set DMA status to waiting transmit
	notify = C_DMA_STATUS_WAITING;
#if _OPERATING_SYSTEM != _OS_NONE
	dma_set_notify(channel, (INT32U) & notify, (INT32U) dma_driver_queue[q_index]);
#else
	dma_set_notify(channel, (INT32U) & notify, (INT32U) NULL);
#endif

	// Set source address
	dma_set_source(channel, (INT32U) & src_value);

	// Set target address
	dma_set_target(channel, t_addr);

#if (defined _DRV_L1_CACHE) && (_DRV_L1_CACHE == 1)
	// Drain source memory and invalid target memory from cache
	cache_drain_range((INT32U) & src_value, 4);
	cache_invalid_range(t_addr, byte_count);
#endif

	// Prepare control register
	if(!((byte_count >> 2) & 0x7))
	{
		ctrl = C_DMA_CTRL_BURST8_ACCESS |
			C_DMA_CTRL_SINGLE_TRANS |
			C_DMA_CTRL_32BIT |
			C_DMA_CTRL_M2M |
			C_DMA_CTRL_INT |
			C_DMA_CTRL_SRC_FIX |
			C_DMA_CTRL_DEST_INCREASE |
			C_DMA_CTRL_NORMAL_INT |
			C_DMA_CTRL_SOFTWARE |
			C_DMA_CTRL_ENABLE;
	}
	else
	if(!((byte_count >> 2) & 0x3))
	{
		ctrl = C_DMA_CTRL_BURST4_ACCESS |
			C_DMA_CTRL_SINGLE_TRANS |
			C_DMA_CTRL_32BIT |
			C_DMA_CTRL_M2M |
			C_DMA_CTRL_INT |
			C_DMA_CTRL_SRC_FIX |
			C_DMA_CTRL_DEST_INCREASE |
			C_DMA_CTRL_NORMAL_INT |
			C_DMA_CTRL_SOFTWARE |
			C_DMA_CTRL_ENABLE;
	}
	else
	{
		ctrl = C_DMA_CTRL_SINGLE_ACCESS |
			C_DMA_CTRL_SINGLE_TRANS |
			C_DMA_CTRL_32BIT |
			C_DMA_CTRL_M2M |
			C_DMA_CTRL_INT |
			C_DMA_CTRL_SRC_FIX |
			C_DMA_CTRL_DEST_INCREASE |
			C_DMA_CTRL_NORMAL_INT |
			C_DMA_CTRL_SOFTWARE |
			C_DMA_CTRL_ENABLE;
	}

	// Start DMA now
	dma_set_control(channel, ctrl);

#if _OPERATING_SYSTEM != _OS_NONE
#if _OPERATING_SYSTEM == _OS_UCOS2
	result = (INT32S) OSQPend(dma_driver_queue[q_index], 10 * OS_TICKS_PER_SEC, &error);
	if(error != OS_NO_ERR && !result)
	{
		dma_free_queue(q_index);
		return -1;
	}

#elif _OPERATING_SYSTEM == _OS_FREERTOS
	// Wait until DMA finish transmitting or timeout
	result = osMessageGet(dma_driver_queue[q_index], osWaitForever);
	if((result.status != osEventMessage) || (result.value.v != C_DMA_STATUS_DONE))
	{
		dma_free_queue(q_index);
		return -1;
	}

#endif
	dma_free_queue(q_index);

	// If we don't receive response from DMA, we have to reset DMA controller and free the channel by ourselves
	mask = dma_device_protect();
	dma_free_channel(channel);
	dma_device_unprotect(mask);

	if(notify == C_DMA_STATUS_DONE)
	{
		return 0;
	}

	return -1;
#else
	while(notify == C_DMA_STATUS_WAITING);
	return 0;		// DMA timeout function is not enabled, DMA transfer must be ok to reach here
#endif
}

/**
 * @brief   Copy a section of data
 * @param   s_addr: Source address
 * @param   t_addr: Destination address
 * @param   byte_count: Data number needed to be copied
 * @param   s_width: Buffer width of source buffer
 * @param   t_width: Buffer width of destination buffer
 * @param   dir:dma direction
 * @param   mode:Prepare control mod
 * @return 	0: Success, -1: Fail
 */
INT32S dma_buffer_copy_extend(INT32U s_addr, INT32U t_addr, INT32U byte_count, INT32U s_width, INT32U t_width, INT32U dir, INT32U mode)
{
	INT32U			ctrl, channel;
	INT32S			ret;
#if _OPERATING_SYSTEM != _OS_NONE
	INT32U			q_index;
	INT32S			mask;
#if _OPERATING_SYSTEM == _OS_UCOS2
	INT8U			error;
	INT32S			result;
#elif _OPERATING_SYSTEM == _OS_FREERTOS
	osEvent			result;
#endif
#endif
	volatile INT8S	notify;

	if(!dma_init_done)
	{
		drv_l1_dma_init();
	}

	// Make sure address and size are 2-byte alignment
	if((s_addr & 0x1) || (t_addr & 0x1) || (byte_count & 0x1) || (s_width & 0x1) || (t_width & 0x1) || (s_width > t_width))
	{
		return -1;
	}

	// Create a queue to receive DMA result
#if _OPERATING_SYSTEM != _OS_NONE
	q_index = dma_get_queue();
	if(q_index >= C_DMA_Q_NUM)
	{
		return -1;
	}

#endif

	// Get a free DMA channel
	channel = dma_get_channel(C_DMA_NORMAL_USED);
	if(channel >= C_DMA_CHANNEL_NUM)
	{
#if _OPERATING_SYSTEM != _OS_NONE
		dma_free_queue(q_index);
#endif
		return -1;	// No free DMA channel is available
	}

	// Set transmit counter
	ret = dma_set_tx_count(channel, byte_count >> 1);

	// Disable timeout function
	if(dma_set_timeout(channel, 0))
	{
		ret = -1;
	}

	dma_set_direction(channel, dir);

	// Set destination buffer width (1 means 2 bytes)
	if(dma_set_line_length(t_width >> 1))
	{
		ret = -1;
	}

	// Set source buffer width (1 means 2 bytes)
	if(dma_set_sprite_size(channel, s_width >> 1))
	{
		ret = -1;
	}

	// Return if any error occurs
	if(ret)
	{
#if _OPERATING_SYSTEM != _OS_NONE
		dma_free_queue(q_index);
#endif
		dma_free_channel(channel);

		return -1;
	}

	// Set DMA status to waiting transmit
	notify = C_DMA_STATUS_WAITING;
#if _OPERATING_SYSTEM != _OS_NONE
	dma_set_notify(channel, (INT32U) & notify, (INT32U) dma_driver_queue[q_index]);
#else
	dma_set_notify(channel, (INT32U) & notify, (INT32U) NULL);
#endif

	// Set source address
	dma_set_source(channel, s_addr);

	// Set target address
	dma_set_target(channel, t_addr);

#if (defined _DRV_L1_CACHE) && (_DRV_L1_CACHE == 1)
	// Drain source memory and invalid target memory from cache
	cache_drain_range(s_addr, byte_count);
	cache_invalid_range(t_addr, (byte_count / s_width) * t_width);
#endif

	// Prepare control register, burst is not supported in sprite copy mode
	ctrl = mode |
		C_DMA_CTRL_SINGLE_TRANS |
		C_DMA_CTRL_16BIT |
		C_DMA_CTRL_M2M |
		C_DMA_CTRL_INT |
		C_DMA_CTRL_SRC_INCREASE |
		C_DMA_CTRL_DEST_INCREASE |
		C_DMA_CTRL_NORMAL_INT |
		C_DMA_CTRL_SOFTWARE |
		C_DMA_CTRL_ENABLE;

	// Start DMA now
	dma_set_control(channel, ctrl);

	// Wait until DMA finish transmitting or timeout
#if _OPERATING_SYSTEM != _OS_NONE
#if _OPERATING_SYSTEM == _OS_UCOS2
	result = (INT32S) OSQPend(dma_driver_queue[q_index], 5 * OS_TICKS_PER_SEC, &error);
	if(error != OS_NO_ERR && !result)
	{
		dma_free_queue(q_index);
		return -1;
	}

#elif _OPERATING_SYSTEM == _OS_FREERTOS
	result = osMessageGet(dma_driver_queue[q_index], osWaitForever);
	if((result.status != osEventMessage) || (result.value.v != C_DMA_STATUS_DONE))
	{
		dma_free_queue(q_index);
		return -1;
	}

#endif
	dma_free_queue(q_index);

	// If we don't receive response from DMA, we have to reset DMA controller and free the channel by ourselves
	mask = dma_device_protect();
	dma_free_channel(channel);
	dma_device_unprotect(mask);

	if(notify == C_DMA_STATUS_DONE)
	{
		return 0;
	}

	return -1;
#else
	while(notify == C_DMA_STATUS_WAITING);
	return 0;		// DMA timeout function is not enabled, DMA transfer must be ok to reach here
#endif
}

/**
 * @brief   Copy a section of data
 * @param   s_addr: Source address
 * @param   t_addr: Destination address
 * @param   byte_count: Data number needed to be copied
 * @param   s_width: Buffer width of source buffer
 * @param   t_width: Buffer width of destination buffer
 			All parameters must be multiple of 2,
 			s_width(source buffer width) must <= t_width(target buffer width)
 * @return 	0: Success, -1: Fail
 */
INT32S drv_l1_dma_buffer_copy(INT32U s_addr, INT32U t_addr, INT32U byte_count, INT32U s_width, INT32U t_width)
{
	return dma_buffer_copy_extend(s_addr, t_addr, byte_count, s_width, t_width, C_DMA_MISC_SPRITE_TO_FB, C_DMA_CTRL_SINGLE_ACCESS);
}

/**
 * @brief   Copy memory data
 * @param   dest: Destination address
 * @param   src: Source address
 * @param   len: Data number needed to be copied
 * @return 	0: Success, -1: Fail
 */
INT32S dma_transfer_data(INT32U dest, INT32U src, INT32U len)
{
#if 1
	R_DMA0_CTRL = C_DMA_CTRL_RESET; /* reset dma */
	R_DMA_INT = R_DMA_INT;			/* clear interrupt flag */

	R_DMA0_MISC &= ~(C_DMA_MISC_TIMEOUT_MASK);			/* set time-out value */
	R_DMA0_MISC |= 0xFF << C_DMA_MISC_TIMEOUT_SHIFT;	/* 1 sec */

	/* set DMA 0 for transfering data */
	R_DMA0_SRC_ADDR = (INT32U) src;
	R_DMA0_TAR_ADDR = (INT32U) dest;
	R_DMA0_TX_COUNT = len >> 1;
	R_DMA0_CTRL = C_DMA_CTRL_SINGLE_ACCESS |
		C_DMA_CTRL_SINGLE_TRANS |
		C_DMA_CTRL_SRC_INCREASE |
		C_DMA_CTRL_DEST_INCREASE |
		C_DMA_CTRL_SOFTWARE |
		C_DMA_CTRL_16BIT |
		C_DMA_CTRL_INT |
		C_DMA_CTRL_NORMAL_INT |
		C_DMA_CTRL_ENABLE;

	while((R_DMA_INT & C_DMA0_INT_PEND) == 0);
	if(R_DMA_INT & C_DMA0_TIMEOUT)
	{
		return C_DMA0_TIMEOUT;
	}

	R_DMA_INT = C_DMA0_INT_PEND;

	return STATUS_OK;
#else
	memcpy((void *) dest, (void *) src, len);
	return STATUS_OK;
#endif
}

#endif
