/***************************************************
 * sym_hash_interface_async.c
 *
 * Created on: Mar 20, 2020
 * Author: jilong.guo@china-core.com
 ***************************************************/
#include <linux/kernel.h>
#include <linux/list.h>
#include <linux/string.h>
#include <linux/spinlock.h>
#include "./INCLUDE/compate.h"
#include "./INCLUDE/pci_csec.h"
#include "./INCLUDE/desc.h"
#include "./INCLUDE/desc_constr.h"
#include "./INCLUDE/sym_perf.h"

void sym_callback_handler(struct csec_priv_t *csec_priv,uint32_t *desc_va,dma_addr_t desc_pa,uint32_t status, void *arg)
{	
	struct sym_resource_recovery *sym_res;
	//csec_debug(KERN_INFO "b\n");
	sym_res = arg;
	sym_res->result.rst = status;

	complete(&sym_res->result.op_done);
	/* To check if the output data is moved out. Just for test. */
	while(atomic_read(&sym_res->result.recovery)!=1);

	if(sym_res->result.sg_virt)
	{
		dma_unmap_single(csec_priv->dev,sym_res->result.sg_phy, SGMAX*16, DMA_TO_DEVICE);
		kfree(sym_res->result.sg_virt);
	}

	dma_unmap_single(csec_priv->dev, sym_res->cipher.key_addr, sym_res->cipher.key_len, DMA_BIDIRECTIONAL);
	dma_unmap_single(csec_priv->dev, sym_res->cipher.iv_addr, sym_res->cipher.iv_len, DMA_BIDIRECTIONAL);
	dma_unmap_single(csec_priv->dev, sym_res->cipher.data_addr, sym_res->cipher.data_len, DMA_BIDIRECTIONAL);

	kfree(sym_res->key_in);
	kfree(sym_res->iv_in);
	kfree(sym_res->data_in);

	if(sym_res->algs_type == ACLASS_SNOOP_SP ) {
		dma_unmap_single(csec_priv->dev, sym_res->cipher_class2.key_addr, sym_res->cipher_class2.key_len, DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev, sym_res->cipher_class2.iv_addr, sym_res->cipher_class2.iv_len, DMA_BIDIRECTIONAL);
		kfree(sym_res->key2_in);
		kfree(sym_res->iv2_in);
	}
	if(sym_res->algs_type == ACLASS_SNOOP) {
		dma_unmap_single(csec_priv->dev, sym_res->cipher_class2.key_addr, sym_res->cipher_class2.key_len, DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,sym_res->cipher_class2.data_addr,80,DMA_BIDIRECTIONAL);
		kfree(sym_res->key2_in);
		kfree(sym_res->iv2_in);
	}

	csec_debug(KERN_INFO "descriptor virtual address: 0x%08x\n",desc_va);
	dma_unmap_single(csec_priv->dev, desc_pa, MAX_CSEC_DESCSIZE, DMA_TO_DEVICE);
	kfree(desc_va);
	
#ifdef ASYN_MODE_BENCHMARK
	complete(&(sym_res->mparm->c));
#endif
	
	kfree(sym_res);
	if(status != 0)
		csec_error(KERN_ERR "%s: dequeue status error!\n",__func__);

	csec_debug(KERN_INFO "%s done!\n",__func__);

}

int  new_interface_do_asyn_recovery(struct csec_priv_t *csec_priv,struct cipher_core *cipher,unsigned char cmd_nr,struct sym_resource_recovery *sym_res)
{
	dma_addr_t desc_phy_addr;
	dma_addr_t sg_phy_addr;
	uint32_t *desc;
	int status;
	void *sg_virt;

	desc = kmalloc(MAX_CSEC_DESCSIZE,GFP_KERNEL|SYS_DMA);
	if(!desc )
	{
		csec_error(KERN_ERR "cdev_csec_do: desc kzalloc error\n");
		return -ENOMEM;
	}

	if(cmd_nr == ACLASS_CIPHER)
	{		
		if(cipher->data_len + 128 <SIZE4KI)
		{
			 inline_cnstr_jobdesc_cipher_core(desc,cipher);
		}
		else
		{
			inline_cnstr_jobdesc_cipher_sg_pre(csec_priv,desc,cipher,&sg_virt,&sg_phy_addr);
			cipher->data_addr = sg_phy_addr;	
			inline_cnstr_jobdesc_cipher_sg_core(csec_priv,desc,cipher);
			sym_res->result.sg_virt = sg_virt;
			sym_res->result.sg_phy = sg_phy_addr;
		}
	}
	else if(cmd_nr == ACLASS_HASH)
	{
		inline_cnstr_jobdesc_hash_core(desc,cipher);
	}
	else if(cmd_nr == ACLASS_SNOOP)
	{

		 inline_cnstr_jobdesc_snoop_core(desc,cipher);
	}
	else if(cmd_nr == ACLASS_SNOOP_SP)
	{

		 inline_cnstr_jobdesc_snoop_sp(desc,cipher);
	}

	desc_phy_addr = dma_map_single(csec_priv->dev,(void *)desc,MAX_CSEC_DESCSIZE, DMA_TO_DEVICE);	

	csec_debug(KERN_INFO "desc fill ok\n");

	sym_res->algs_type = cmd_nr;
	if(cmd_nr == ACLASS_HASH || cmd_nr == ACLASS_CIPHER) {
		memcpy(&sym_res->cipher, cipher, sizeof(struct cipher_core));
	}else if (cmd_nr == ACLASS_SNOOP || cmd_nr == ACLASS_SNOOP_SP) {
		memcpy(&sym_res->cipher, cipher, sizeof(struct cipher_core));
		memcpy(&sym_res->cipher_class2, (struct cipher_core *)&cipher[1], sizeof(struct cipher_core));
	}

	do
	{
		//status = cards_enqueue(csec_priv,  desc, desc_phy_addr,(void *)sym_callback_handler,(void *)sym_res);
		status = cards_enqueue(csec_priv,  desc, desc_phy_addr, sym_res->callback, (void *)sym_res);
		if(status)
		{
			if(CDEV_INVL)
			{
				//wait_event_timeout(csec_priv->ccore_cards->dq_done,0,CDEV_INVL);
				set_current_state(TASK_INTERRUPTIBLE);
				schedule_timeout(CDEV_INVL);
				csec_debug2(KERN_INFO "ce0\n");
			}
		}
	}while(status==-EBUSY);
		
	return 0;
}

int sm1_cbc_encrypt_newapi_asyn_recovery(u8 *in,u8 *out,int inlen,u8 *key,u8 *iv)
{
	struct csec_priv_t *csec_priv;
	struct ccore_cards_t *ccore_cards = get_ccore_cards();
	struct cipher_core  cipher;
	struct sym_resource_recovery *sym_res;
	u8 *key_in,*key_in_raw,*iv_in,*iv_in_raw,*data_in,*data_in_raw;
	int ret;
	dma_addr_t data_ori;
	long long t;
	
	cipher.key_len = 32;
	cipher.data_len = inlen;
	cipher.iv_len = 16;
	cipher.alg = SM1;
	cipher.type = CBC;
	cipher.as = INITFINAL;
	cipher.opt = ENC;

	key_in_raw =(u8 *) kmalloc(cipher.key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key_in =(u8 *) (((size_t)key_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv_in_raw =(u8 *) kmalloc(cipher.iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv_in = (u8 *) (((size_t)iv_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	data_in_raw =(u8 *) kmalloc(cipher.data_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	data_in = (u8 *) (((size_t)data_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));
	
	sym_res = kzalloc(sizeof(struct sym_resource_recovery),GFP_KERNEL);
	if(!sym_res )
	{
		csec_error(KERN_ERR "%s: sym_res kzalloc error\n", __func__);
		return -ENOMEM;
	}
	
	csec_priv = cards_enqueue_pre(ccore_cards);

	memcpy(key_in,key,cipher.key_len);
	memcpy(iv_in,iv,cipher.iv_len);
	memcpy(data_in,in,cipher.data_len);
	
	cipher.key_addr = dma_map_single(csec_priv->dev,(void *)key_in,cipher.key_len, DMA_BIDIRECTIONAL);
	cipher.iv_addr = dma_map_single(csec_priv->dev,(void *)iv_in,cipher.iv_len, DMA_BIDIRECTIONAL);
	cipher.data_addr = dma_map_single(csec_priv->dev,(void *)data_in,cipher.data_len, DMA_BIDIRECTIONAL);
	data_ori = cipher.data_addr;

	sym_res->key_in = key_in_raw;
	sym_res->iv_in = iv_in_raw;
	sym_res->data_in = data_in_raw;
	sym_res->callback = (void *)sym_callback_handler;
	init_completion(&sym_res->result.op_done);
	
	ret = new_interface_do_asyn_recovery(csec_priv,&cipher,ACLASS_CIPHER,sym_res);
	
	if(ret == -ENOMEM) {

		dma_unmap_single(csec_priv->dev,cipher.key_addr,cipher.key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher.iv_addr,cipher.iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,data_ori,cipher.data_len,DMA_BIDIRECTIONAL);
		kfree(key_in_raw);
		kfree(iv_in_raw);
		kfree(data_in_raw);
		return -1;		
	}

	if(!wait_for_completion_timeout(&sym_res->result.op_done, CDEV_INVL*20)){
		csec_error("wait_for_completion_timeout\n");
		return -EAGAIN;
	}
	

	do{
		dma_sync_single_for_cpu(csec_priv->dev,data_ori,cipher.data_len,DMA_BIDIRECTIONAL);
		dma_sync_single_for_cpu(csec_priv->dev,cipher.iv_addr,cipher.iv_len,DMA_BIDIRECTIONAL);
		ret = memcmp(data_in,in,cipher.iv_len);
	}while(!ret);


	memcpy(out,data_in,cipher.data_len);
	memcpy(iv,iv_in,cipher.iv_len);
	
	/* Output data is moved out, setup the 'recovery' flag to do the work of recovery resources. */
	atomic_set(&sym_res->result.recovery, 1);

	if(sym_res->result.rst!=0)
		return -1;
	else
		return 0;

}

int sm1_cbc_decrypt_newapi_asyn_recovery(u8 *in,u8 *out,int inlen,u8 *key,u8 *iv)
{
	struct csec_priv_t *csec_priv;
	struct ccore_cards_t *ccore_cards = get_ccore_cards();
	struct cipher_core  cipher;
	struct sym_resource_recovery *sym_res;
	u8 *key_in,*key_in_raw,*iv_in,*iv_in_raw,*data_in,*data_in_raw;
	int ret;
	dma_addr_t data_ori;
	
	cipher.key_len = 32;
	cipher.data_len = inlen;
	cipher.iv_len = 16;
	cipher.alg = SM1;
	cipher.type = CBC;
	cipher.as = INITFINAL;
	cipher.opt = DEC;
	key_in_raw =(u8 *) kmalloc(cipher.key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key_in =(u8 *) (((size_t)key_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv_in_raw =(u8 *) kmalloc(cipher.iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv_in = (u8 *) (((size_t)iv_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	data_in_raw =(u8 *) kmalloc(cipher.data_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	data_in = (u8 *) (((size_t)data_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));
	
	sym_res = kzalloc(sizeof(struct sym_resource_recovery),GFP_KERNEL);
	if(!sym_res )
	{
		csec_error(KERN_ERR "%s: sym_res kzalloc error\n", __func__);
		return -ENOMEM;
	}
	
	csec_priv = cards_enqueue_pre(ccore_cards);

	memcpy(key_in,key,cipher.key_len);
	memcpy(iv_in,iv,cipher.iv_len);
	memcpy(data_in,in,cipher.data_len);

	cipher.key_addr = dma_map_single(csec_priv->dev,(void *)key_in,cipher.key_len, DMA_BIDIRECTIONAL);
	cipher.iv_addr = dma_map_single(csec_priv->dev,(void *)iv_in,cipher.iv_len, DMA_BIDIRECTIONAL);
	cipher.data_addr = dma_map_single(csec_priv->dev,(void *)data_in,cipher.data_len, DMA_BIDIRECTIONAL);
	data_ori = cipher.data_addr;
	
	sym_res->key_in = key_in_raw;
	sym_res->iv_in = iv_in_raw;
	sym_res->data_in = data_in_raw;
	sym_res->callback = (void *)sym_callback_handler;
	init_completion(&sym_res->result.op_done);

	ret = new_interface_do_asyn_recovery(csec_priv,&cipher,ACLASS_CIPHER,sym_res);
	if(ret == -ENOMEM) {
		dma_unmap_single(csec_priv->dev,cipher.key_addr,cipher.key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher.iv_addr,cipher.iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,data_ori,cipher.data_len,DMA_BIDIRECTIONAL);
		kfree(key_in_raw);
		kfree(iv_in_raw);
		kfree(data_in_raw);
		return -1;
	}

	if(!wait_for_completion_timeout(&sym_res->result.op_done, CDEV_INVL*20)){
		csec_error("wait_for_completion_timeout\n");
		return -EAGAIN;
	}


	do{
		dma_sync_single_for_cpu(csec_priv->dev,data_ori,cipher.data_len,DMA_BIDIRECTIONAL);
		dma_sync_single_for_cpu(csec_priv->dev,cipher.iv_addr,cipher.iv_len,DMA_BIDIRECTIONAL);
		ret = memcmp(data_in,in,cipher.iv_len);
	}while(!ret);

	memcpy(out,data_in,cipher.data_len);
	memcpy(iv,iv_in,cipher.iv_len);

	/* Output data is moved out, setup the 'recovery' flag to do the work of recovery resources. */
	atomic_set(&sym_res->result.recovery, 1);

	if(sym_res->result.rst!=0)
		return -1;
	else
		return 0;
}
int sm3_hmac_newapi_asyn_recovery(u8 *key,int keylen,u8 *input,int inlen,u8 *output)
{
	struct csec_priv_t *csec_priv;
	struct ccore_cards_t *ccore_cards = get_ccore_cards();
	struct cipher_core  cipher;
	struct sym_resource_recovery *sym_res;
	u8 *key_in,*key_in_raw,*iv_in,*iv_in_raw,*data_in,*data_in_raw;

	int ret;
	
	cipher.key_len = keylen;
	cipher.data_len = inlen;
	cipher.iv_len = 32;
	cipher.alg = SM3;
	cipher.type = T_HMAC;
	cipher.as = INITFINAL;
	cipher.opt = ENC;
	
	key_in_raw =(u8 *) kmalloc(cipher.key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key_in =(u8 *) (((size_t)key_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv_in_raw =(u8 *) kmalloc(cipher.iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv_in = (u8 *) (((size_t)iv_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	data_in_raw =(u8 *) kmalloc(cipher.data_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	data_in = (u8 *) (((size_t)data_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));
	
	sym_res = kzalloc(sizeof(struct sym_resource_recovery),GFP_KERNEL);
	if(!sym_res )
	{
		csec_error(KERN_ERR "%s: sym_res kzalloc error\n", __func__);
		return -ENOMEM;
	}
	
	csec_priv = cards_enqueue_pre(ccore_cards);

	memcpy(key_in,key,cipher.key_len);
	memcpy(data_in,input,cipher.data_len);
	memset(iv_in,0,cipher.iv_len);

	cipher.key_addr = dma_map_single(csec_priv->dev,(void *)key_in,cipher.key_len, DMA_BIDIRECTIONAL);
	cipher.iv_addr = dma_map_single(csec_priv->dev,(void *)iv_in,cipher.iv_len, DMA_BIDIRECTIONAL);
	cipher.data_addr = dma_map_single(csec_priv->dev,(void *)data_in,cipher.data_len, DMA_BIDIRECTIONAL);
	
	sym_res->key_in = key_in_raw;
	sym_res->iv_in = iv_in_raw;
	sym_res->data_in = data_in_raw;
	sym_res->callback = (void *)sym_callback_handler;
	init_completion(&sym_res->result.op_done);
	
	ret = new_interface_do_asyn_recovery(csec_priv,&cipher,ACLASS_HASH,sym_res);
	if(ret == -ENOMEM) {
		dma_unmap_single(csec_priv->dev,cipher.key_addr,cipher.key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher.iv_addr,cipher.iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher.data_addr,cipher.data_len,DMA_BIDIRECTIONAL);
		kfree(key_in_raw);
		kfree(iv_in_raw);
		kfree(data_in_raw);
		return ret;
	}

	if(!wait_for_completion_timeout(&sym_res->result.op_done, CDEV_INVL*20)){
		csec_error("wait_for_completion_timeout\n");
		return -EAGAIN;
	}

	do{
		dma_sync_single_for_cpu(csec_priv->dev,cipher.iv_addr,cipher.iv_len,DMA_BIDIRECTIONAL);
		ret = memcmp_with0(iv_in,cipher.iv_len);
	}while(!ret);

	memcpy(output,iv_in,cipher.iv_len);

	/* Output data is moved out, setup the 'recovery' flag to do the work of recovery resources. */
	atomic_set(&sym_res->result.recovery, 1);

	if(sym_res->result.rst!=0)
		return -1;
	else
		return 0;

}

void sym_callback_handler_key_in_ram(struct csec_priv_t *csec_priv,uint32_t *desc_va,
				dma_addr_t desc_pa,uint32_t status, void *arg)
{	
	struct sym_resource_recovery *sym_res;

	//csec_debug(KERN_INFO "b\n");
	sym_res = arg;
	sym_res->result.rst = status;

	complete(&sym_res->result.op_done);
	/* To check if the output data is moved out. Just for test. */
	while(atomic_read(&sym_res->result.recovery)!=1);

	if(sym_res->result.sg_virt)
	{
		dma_unmap_single(csec_priv->dev,sym_res->result.sg_phy, SGMAX*16, DMA_TO_DEVICE);
		kfree(sym_res->result.sg_virt);
	}

	//dma_unmap_single(csec_priv->dev, sym_res->cipher.key_addr, sym_res->cipher.key_len, DMA_BIDIRECTIONAL);
	dma_unmap_single(csec_priv->dev, sym_res->cipher.iv_addr, sym_res->cipher.iv_len, DMA_BIDIRECTIONAL);
	dma_unmap_single(csec_priv->dev, sym_res->cipher.data_addr, sym_res->cipher.data_len, DMA_BIDIRECTIONAL);

	//kfree(sym_res->key_in);
	kfree(sym_res->iv_in);
	kfree(sym_res->data_in);

	if(sym_res->algs_type == ACLASS_SNOOP_SP ) {
		dma_unmap_single(csec_priv->dev, sym_res->cipher_class2.key_addr, sym_res->cipher_class2.key_len, DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev, sym_res->cipher_class2.iv_addr, sym_res->cipher_class2.iv_len, DMA_BIDIRECTIONAL);
		kfree(sym_res->key2_in);
		kfree(sym_res->iv2_in);
	}
	if(sym_res->algs_type == ACLASS_SNOOP) {
		dma_unmap_single(csec_priv->dev, sym_res->cipher_class2.key_addr, sym_res->cipher_class2.key_len, DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,sym_res->cipher_class2.data_addr,80,DMA_BIDIRECTIONAL);
		kfree(sym_res->key2_in);
		kfree(sym_res->iv2_in);
	}

	csec_debug(KERN_INFO "descriptor virtual address: 0x%08x\n",desc_va);
	dma_unmap_single(csec_priv->dev, desc_pa, MAX_CSEC_DESCSIZE, DMA_TO_DEVICE);
	kfree(desc_va);
	
#ifdef ASYN_MODE_BENCHMARK
	complete(&(sym_res->mparm->c));
#endif
	
	kfree(sym_res);
	if(status != 0)
		csec_error(KERN_ERR "%s: dequeue status error!\n",__func__);

	csec_debug(KERN_INFO "%s done!\n",__func__);

}


int sm4_ecb_encrypt_newapi_asyn_recovery_key_in_ram(u8 *in,u8 *out,int inlen,dma_addr_t key_addr,u8 *iv)
{
	struct csec_priv_t *csec_priv;
	struct ccore_cards_t *ccore_cards = get_ccore_cards();
	struct cipher_core  cipher;
	struct sym_resource_recovery *sym_res;
	u8 *key_in,*key_in_raw,*iv_in,*iv_in_raw,*data_in,*data_in_raw;
	int ret;
	dma_addr_t data_ori;
	long long t;

	key_addr = (key_addr|(CARD_IHADDR<<32));
		
	cipher.key_len = 16;
	cipher.data_len = inlen;
	cipher.iv_len = 16;
	cipher.alg = SM4;
	cipher.type = ECB;
	cipher.as = INITFINAL;
	cipher.opt = ENC;

	iv_in_raw =(u8 *) kmalloc(cipher.iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv_in = (u8 *) (((size_t)iv_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	data_in_raw =(u8 *) kmalloc(cipher.data_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	data_in = (u8 *) (((size_t)data_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));
	
	sym_res = kzalloc(sizeof(struct sym_resource_recovery),GFP_KERNEL);
	if(!sym_res )
	{
		csec_error(KERN_ERR "%s: sym_res kzalloc error\n", __func__);
		return -ENOMEM;
	}
	
	csec_priv = cards_enqueue_pre(ccore_cards);

	memcpy(iv_in,iv,cipher.iv_len);
	memcpy(data_in,in,cipher.data_len);

	cipher.key_addr = key_addr;
	cipher.iv_addr = dma_map_single(csec_priv->dev,(void *)iv_in,cipher.iv_len, DMA_BIDIRECTIONAL);
	cipher.data_addr = dma_map_single(csec_priv->dev,(void *)data_in,cipher.data_len, DMA_BIDIRECTIONAL);

	data_ori = cipher.data_addr;
	sym_res->key_in = key_addr;

	sym_res->iv_in = iv_in_raw;
	sym_res->data_in = data_in_raw;
	sym_res->callback = (void *)sym_callback_handler_key_in_ram;
	init_completion(&sym_res->result.op_done);
	
	ret = new_interface_do_asyn_recovery(csec_priv,&cipher,ACLASS_CIPHER,sym_res);
	
	if(ret == -ENOMEM) {

	//	dma_unmap_single(csec_priv->dev,cipher.key_addr,cipher.key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher.iv_addr,cipher.iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,data_ori,cipher.data_len,DMA_BIDIRECTIONAL);
	//	kfree(key_in_raw);
		kfree(iv_in_raw);
		kfree(data_in_raw);
		return -1;	
	}

	if(!wait_for_completion_timeout(&sym_res->result.op_done, CDEV_INVL*20)){
		csec_error("wait_for_completion_timeout\n");
		return -EAGAIN;
	}
	

	do{
		dma_sync_single_for_cpu(csec_priv->dev,data_ori,cipher.data_len,DMA_BIDIRECTIONAL);
		dma_sync_single_for_cpu(csec_priv->dev,cipher.iv_addr,cipher.iv_len,DMA_BIDIRECTIONAL);
		ret = memcmp(data_in,in,cipher.iv_len);
	}while(!ret);


	memcpy(out,data_in,cipher.data_len);
	memcpy(iv,iv_in,cipher.iv_len);
	
	/* Output data is moved out, setup the 'recovery' flag to do the work of recovery resources. */
	atomic_set(&sym_res->result.recovery, 1);

	if(sym_res->result.rst!=0)
		return -1;
	else
		return 0;

}


int sym_newapi_speed_do_asyn_recovery(void *_mparm)
{
	struct parm *mparm = _mparm;
	struct csec_priv_t *csec_priv;
	struct ccore_cards_t *ccore_cards = get_ccore_cards();
	struct cipher_core  cipher;
	struct sym_resource_recovery *sym_res;
	u8 *key_in,*key_in_raw,*iv_in,*iv_in_raw,*data_in,*data_in_raw;
	u8 data_tmp[4];
	int ret;
	dma_addr_t data_ori;
	
	cipher.key_len = mkeysize;
	cipher.data_len = mdatasize;
	cipher.iv_len = mivsize;
	cipher.alg = malg;
	cipher.type = mtype;
	cipher.as = INITFINAL;
	cipher.opt = mopt;
	key_in_raw =(u8 *) kmalloc(cipher.key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key_in =(u8 *) (((size_t)key_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv_in_raw =(u8 *) kmalloc(cipher.iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv_in = (u8 *) (((size_t)iv_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	data_in_raw =(u8 *) kmalloc(cipher.data_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	data_in = (u8 *) (((size_t)data_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));
	
	sym_res = kzalloc(sizeof(struct sym_resource_recovery),GFP_KERNEL);
	if(!sym_res )
	{
		csec_error(KERN_ERR "%s: sym_res kzalloc error\n", __func__);
		return -ENOMEM;
	}
	
	csec_priv = cards_enqueue_pre(ccore_cards);

	get_random_bytes(key_in,mkeysize);
	get_random_bytes(iv_in,mivsize);
	get_random_bytes(data_in,mdatasize);

	cipher.key_addr = dma_map_single(csec_priv->dev,(void *)key_in,cipher.key_len, DMA_BIDIRECTIONAL);
	cipher.iv_addr = dma_map_single(csec_priv->dev,(void *)iv_in,cipher.iv_len, DMA_BIDIRECTIONAL);
	cipher.data_addr = dma_map_single(csec_priv->dev,(void *)data_in,cipher.data_len, DMA_BIDIRECTIONAL);
	data_ori = cipher.data_addr;

	sym_res->mparm = mparm;
	sym_res->key_in = key_in_raw;
	sym_res->iv_in = iv_in_raw;
	sym_res->data_in = data_in_raw;
	sym_res->callback = (void *)sym_callback_handler;
	
	/* Set 'recovery' as 1 to bypass the checking in callback.*/
	atomic_set(&sym_res->result.recovery, 1);
	init_completion(&sym_res->result.op_done);

	memcpy(data_tmp,data_in+mdatasize-4,4);
	//printk(KERN_ERR "1 mparm is %llx\n",mparm);
	wait_for_completion_interruptible(&(mparm->w));
	
	/* Start to record time.*/
	getnstimeofday(&sym_res->tv);

	ret = new_interface_do_asyn_recovery(csec_priv,&cipher,ACLASS_CIPHER,sym_res);
	if(ret == -ENOMEM) {
		dma_unmap_single(csec_priv->dev, cipher.key_addr, cipher.key_len, DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev, cipher.iv_addr, cipher.iv_len, DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev, cipher.data_addr, cipher.data_len, DMA_BIDIRECTIONAL);
		kfree(key_in_raw);
		kfree(iv_in_raw);
		kfree(data_in_raw);

		kfree(sym_res);
		return ret;
	}
#if 0
	do{
		dma_sync_single_for_cpu(csec_priv->dev,data_ori,cipher.data_len,DMA_BIDIRECTIONAL);
		dma_sync_single_for_cpu(csec_priv->dev,cipher.iv_addr,cipher.iv_len,DMA_BIDIRECTIONAL);
		ret = memcmp(data_tmp,data_in+mdatasize-4,4);
	}while(!ret);
#endif
	//printk(KERN_ERR "3 mparm is %llx\n",mparm);
#ifndef ASYN_MODE_BENCHMARK
	complete(&(mparm->c));
#endif

	//printk(KERN_ERR "4 mparm is %llx\n",mparm);

	return 0;
}
int hash_newapi_speed_do_asyn_recovery(void *_mparm)
{
	struct parm *mparm = _mparm;
	struct csec_priv_t *csec_priv;
	struct ccore_cards_t *ccore_cards = get_ccore_cards();
	struct cipher_core  cipher;
	struct sym_resource_recovery *sym_res;
	u8 *key_in,*key_in_raw,*iv_in,*iv_in_raw,*data_in,*data_in_raw;
	u8 data_tmp[128];
	int ret;
	
	cipher.key_len = mkeysize;
	cipher.data_len = mdatasize;
	cipher.iv_len = mivsize;
	cipher.alg = malg;
	cipher.type = mtype;
	cipher.as = INITFINAL;
	cipher.opt = mopt;
	key_in_raw =(u8 *) kmalloc(cipher.key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key_in =(u8 *) (((size_t)key_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv_in_raw =(u8 *) kmalloc(cipher.iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv_in = (u8 *) (((size_t)iv_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	data_in_raw =(u8 *) kmalloc(cipher.data_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	data_in = (u8 *) (((size_t)data_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));
	
	sym_res = kzalloc(sizeof(struct sym_resource_recovery),GFP_KERNEL);
	if(!sym_res )
	{
		csec_error(KERN_ERR "%s: sym_res kzalloc error\n", __func__);
		return -ENOMEM;
	}
	
	csec_priv = cards_enqueue_pre(ccore_cards);

	get_random_bytes(key_in,mkeysize);
	get_random_bytes(iv_in,mivsize);
	get_random_bytes(data_in,mdatasize);

	cipher.key_addr = dma_map_single(csec_priv->dev,(void *)key_in,cipher.key_len, DMA_BIDIRECTIONAL);
	cipher.iv_addr = dma_map_single(csec_priv->dev,(void *)iv_in,cipher.iv_len, DMA_BIDIRECTIONAL);
	cipher.data_addr = dma_map_single(csec_priv->dev,(void *)data_in,cipher.data_len, DMA_BIDIRECTIONAL);

	sym_res->mparm = mparm;
	sym_res->key_in = key_in_raw;
	sym_res->iv_in = iv_in_raw;
	sym_res->data_in = data_in_raw;
	sym_res->callback = (void *)sym_callback_handler;

	/* Set 'recovery' as 1 to bypass the checking in callback.*/
	atomic_set(&sym_res->result.recovery, 1);
	init_completion(&sym_res->result.op_done);

	memcpy(data_tmp,iv_in+mivsize-4,4);
	//printk(KERN_ERR "1 mparm is %llx\n",mparm);
	wait_for_completion_interruptible(&(mparm->w));
	
	//printk(KERN_ERR "2 mparm is %llx\n",mparm);
	ret = new_interface_do_asyn_recovery(csec_priv,&cipher,ACLASS_HASH,sym_res);
	if(ret == -ENOMEM) {
		dma_unmap_single(csec_priv->dev,cipher.key_addr,cipher.key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher.iv_addr,cipher.iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher.data_addr,cipher.data_len,DMA_BIDIRECTIONAL);
		kfree(key_in_raw);
		kfree(iv_in_raw);
		kfree(data_in_raw);

		kfree(sym_res);
		return ret;
	}
#if 0
	do{
		dma_sync_single_for_cpu(csec_priv->dev,cipher.iv_addr,cipher.iv_len,DMA_BIDIRECTIONAL);
		ret = memcmp(data_tmp,iv_in+mivsize-4,4);
	}while(!ret);
#endif
	//printk(KERN_ERR "3 mparm is %llx\n",mparm);
#ifndef ASYN_MODE_BENCHMARK
	complete(&(mparm->c));
#endif

	//printk(KERN_ERR "4 mparm is %llx\n",mparm);
	
	return 0;
}
int snoop_newapi_speed_do_asyn(void *_mparm)
{
	struct parm *mparm = _mparm;
	struct csec_priv_t *csec_priv;
	struct ccore_cards_t *ccore_cards = get_ccore_cards();
	struct cipher_core  cipher[2],*cipher_class1,*cipher_class2;
	struct sym_resource_recovery *sym_res;
	u8 *key1_in,*key1_in_raw,*key2_in,*key2_in_raw,*iv_in,*iv_in_raw,*data_in,*data_in_raw,*icv,*icv_raw;
	u8 data_tmp[4];

	int ret;

	cipher_class1 = cipher;
	cipher_class2 = &(cipher[1]);
	
	cipher_class1->key_len = mkeysize;
	cipher_class1->data_len = mdatasize;
	cipher_class1->iv_len = mivsize;
	cipher_class1->alg = malg;
	cipher_class1->type = mtype;
	cipher_class1->as = INITFINAL;
	cipher_class1->opt = mopt;

	cipher_class2->key_len = mkeysize2;
	cipher_class2->alg = malg2;
	cipher_class2->type = mtype2;
	cipher_class2->as = INITFINAL;
	cipher_class2->opt = mopt2;
	
	key1_in_raw =(u8 *) kmalloc(cipher_class1->key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key1_in =(u8 *) (((size_t)key1_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	key2_in_raw =(u8 *) kmalloc(cipher_class2->key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key2_in =(u8 *) (((size_t)key2_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv_in_raw =(u8 *) kmalloc(cipher_class1->iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv_in = (u8 *) (((size_t)iv_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	data_in_raw =(u8 *) kmalloc(cipher_class1->data_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	data_in = (u8 *) (((size_t)data_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	icv_raw =(u8 *) kmalloc(80+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	icv = (u8 *) (((size_t)icv_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));
	
	sym_res = kzalloc(sizeof(struct sym_resource_recovery),GFP_KERNEL);
	if(!sym_res )
	{
		csec_error(KERN_ERR "%s: sym_res kzalloc error\n", __func__);
		return -ENOMEM;
	}
	
	csec_priv = cards_enqueue_pre(ccore_cards);

	memset(iv_in,0,cipher_class1->iv_len);
	memset(icv,0,80);
	memcpy(data_tmp,data_in+mdatasize-4,4);

	cipher_class1->key_addr = dma_map_single(csec_priv->dev,(void *)key1_in,cipher_class1->key_len, DMA_BIDIRECTIONAL);
	cipher_class2->key_addr = dma_map_single(csec_priv->dev,(void *)key2_in,cipher_class2->key_len, DMA_BIDIRECTIONAL);
	cipher_class1->iv_addr = dma_map_single(csec_priv->dev,(void *)iv_in,cipher_class1->iv_len, DMA_BIDIRECTIONAL);
	cipher_class1->data_addr = dma_map_single(csec_priv->dev,(void *)data_in,cipher_class1->data_len, DMA_BIDIRECTIONAL);
	cipher_class2->data_addr = dma_map_single(csec_priv->dev,(void *)icv,80, DMA_BIDIRECTIONAL);

	sym_res->mparm = mparm;
	sym_res->key_in = key1_in_raw;
	sym_res->key2_in = key2_in_raw;
	sym_res->iv_in = iv_in_raw;
	sym_res->iv2_in = icv_raw;
	sym_res->data_in = data_in_raw;
	sym_res->callback = (void *)sym_callback_handler;
	/* Set 'recovery' as 1 to bypass the checking in callback.*/
	atomic_set(&sym_res->result.recovery, 1);
	init_completion(&sym_res->result.op_done);

	wait_for_completion(&(mparm->w));
	
	ret = new_interface_do_asyn_recovery(csec_priv,cipher,ACLASS_SNOOP,sym_res);
	if(ret == -ENOMEM) {
		dma_unmap_single(csec_priv->dev,cipher_class1->key_addr,cipher_class1->key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->key_addr,cipher_class1->key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class1->iv_addr,cipher_class1->iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class1->data_addr,cipher_class1->data_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->data_addr,80,DMA_BIDIRECTIONAL);
		kfree(key1_in_raw);
		kfree(key2_in_raw);
		kfree(iv_in_raw);
		kfree(data_in_raw);
		kfree(icv_raw);

		kfree(sym_res);
		return ret;
	}
	//printk(KERN_ERR "3 mparm is %llx\n",mparm);
#ifndef ASYN_MODE_BENCHMARK
	complete(&(mparm->c));
#endif
	return 0;	
}

int snoop_sp_sm4sm3_enc_asyn(void)
{
	struct csec_priv_t *csec_priv;
	struct ccore_cards_t *ccore_cards = get_ccore_cards();
	struct cipher_core  cipher[2],*cipher_class1,*cipher_class2;
	struct sym_resource_recovery *sym_res;
	u8 *key1_in,*key1_in_raw,*key2_in,*key2_in_raw,*iv1_in,*iv1_in_raw,*iv2_in,*iv2_in_raw,*data_in,*data_in_raw;
	u8 data_tmp[4];

	int ret;

	int cdatel = 0x100;
	int hdatel = 0x105;

	cipher_class1 = cipher;
	cipher_class2 = &(cipher[1]);
	
	cipher_class1->key_len = 16;
	cipher_class1->data_len = cdatel;
	cipher_class1->iv_len = 16;
	cipher_class1->alg = SM4;
	cipher_class1->type = CBC;
	cipher_class1->as = INITFINAL;
	cipher_class1->opt = ENC;

	cipher_class2->key_len = 32;
	cipher_class2->data_len = hdatel;
	cipher_class2->iv_len = 32;
	cipher_class2->alg = H_SM3;
	cipher_class2->type = T_HMAC;
	cipher_class2->as = INITFINAL;
	cipher_class2->opt = ENC;
	
	key1_in_raw =(u8 *) kmalloc(cipher_class1->key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key1_in =(u8 *) (((size_t)key1_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	key2_in_raw =(u8 *) kmalloc(cipher_class2->key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key2_in =(u8 *) (((size_t)key2_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv1_in_raw =(u8 *) kmalloc(cipher_class1->iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv1_in = (u8 *) (((size_t)iv1_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv2_in_raw =(u8 *) kmalloc(cipher_class2->iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv2_in = (u8 *) (((size_t)iv2_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	data_in_raw =(u8 *) kmalloc(cipher_class2->data_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	data_in = (u8 *) (((size_t)data_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	sym_res = kzalloc(sizeof(struct sym_resource_recovery),GFP_KERNEL);
	if(!sym_res )
	{
		csec_error(KERN_ERR "%s: sym_res kzalloc error\n", __func__);
		return -ENOMEM;
	}
	
	csec_priv = cards_enqueue_pre(ccore_cards);

	memset(iv2_in,0,cipher_class2->iv_len);

	memset(key1_in,0x5a,cipher_class1->key_len);
	memset(key2_in,0x5a,cipher_class2->key_len);
	memset(iv1_in,0x5a,cipher_class1->iv_len);
	memset(data_in,0x5a,cipher_class2->data_len);
	memcpy(data_tmp,data_in+hdatel-4,4);

	cipher_class1->key_addr = dma_map_single(csec_priv->dev,(void *)key1_in,cipher_class1->key_len, DMA_BIDIRECTIONAL);
	cipher_class2->key_addr = dma_map_single(csec_priv->dev,(void *)key2_in,cipher_class2->key_len, DMA_BIDIRECTIONAL);
	cipher_class1->iv_addr = dma_map_single(csec_priv->dev,(void *)iv1_in,cipher_class1->iv_len, DMA_BIDIRECTIONAL);
	cipher_class2->iv_addr = dma_map_single(csec_priv->dev,(void *)iv2_in,cipher_class2->iv_len, DMA_BIDIRECTIONAL);
	cipher_class2->data_addr = dma_map_single(csec_priv->dev,(void *)data_in,cipher_class2->data_len, DMA_BIDIRECTIONAL);
	cipher_class1->data_addr = cipher_class2->data_addr+hdatel-cdatel;

	sym_res->key_in = key1_in_raw;
	sym_res->key2_in = key2_in_raw;
	sym_res->iv_in = iv1_in_raw;
	sym_res->iv2_in = iv2_in_raw;
	sym_res->data_in = data_in_raw;
	sym_res->callback = (void *)sym_callback_handler;
	init_completion(&sym_res->result.op_done);

	ret = new_interface_do_asyn_recovery(csec_priv,cipher,ACLASS_SNOOP_SP,sym_res);
	if(ret == -ENOMEM) {
		dma_unmap_single(csec_priv->dev,cipher_class1->key_addr,cipher_class1->key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->key_addr,cipher_class1->key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class1->iv_addr,cipher_class1->iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->iv_addr,cipher_class2->iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->data_addr,cipher_class2->data_len,DMA_BIDIRECTIONAL);

		kfree(key1_in_raw);
		kfree(key2_in_raw);
		kfree(iv1_in_raw);
		kfree(iv2_in_raw);
		kfree(data_in_raw);

		kfree(sym_res);
		return ret;
	}

	if(!wait_for_completion_timeout(&sym_res->result.op_done, CDEV_INVL*20)){
		csec_error("wait_for_completion_timeout\n");
		return -EAGAIN;
	}

	do{
		dma_sync_single_for_cpu(csec_priv->dev,cipher_class2->iv_addr,cipher_class2->iv_len,DMA_BIDIRECTIONAL);
		ret = memcmp_with0(iv2_in,4);
		dma_sync_single_for_cpu(csec_priv->dev,cipher_class1->data_addr,cipher_class1->data_len,DMA_BIDIRECTIONAL);
		ret = (ret && memcmp(data_tmp,data_in+hdatel-4,4));
	}while(!ret);

	if(memcmp(snoop_sm4sm3enc_sm4_ctrl_data, data_in, cipher_class2->data_len))
		printk("snoop sm4 encrypt test failed!\n");	
	else
		printk("snoop sm4 encrypt test successfully!\n");	

	if(memcmp(snoop_sm4sm3enc_sm3_ctrl_data, iv2_in, cipher_class2->iv_len))
		printk("snoop sm3 encrypt test failed!\n");	
	else
		printk("snoop sm3 encrypt test successfully!\n");	
	
	/* Once output data is moved out, setup the 'recovery' flag to do the work of recovery resources. */
	atomic_set(&sym_res->result.recovery, 1);

	if(sym_res->result.rst!=0)
		return -1;
	else
		return 0;
}

int snoop_sp_sm4sm3_dec_asyn(void)
{
	struct csec_priv_t *csec_priv;
	struct ccore_cards_t *ccore_cards = get_ccore_cards();
	struct cipher_core  cipher[2],*cipher_class1,*cipher_class2;
	struct sym_resource_recovery *sym_res;
	u8 *key1_in,*key1_in_raw,*key2_in,*key2_in_raw,*iv1_in,*iv1_in_raw,*iv2_in,*iv2_in_raw,*data_in,*data_in_raw;
	u8 data_tmp[4];

	int ret;

	int cdatel = 0x100;
	int hdatel = 0x105;

	cipher_class1 = cipher;
	cipher_class2 = &(cipher[1]);
	
	cipher_class1->key_len = 16;
	cipher_class1->data_len = cdatel;
	cipher_class1->iv_len = 16;
	cipher_class1->alg = SM4;
	cipher_class1->type = CBC;
	cipher_class1->as = INITFINAL;
	cipher_class1->opt = DEC;

	cipher_class2->key_len = 32;
	cipher_class2->data_len = hdatel;
	cipher_class2->iv_len = 32;
	cipher_class2->alg = H_SM3;
	cipher_class2->type = T_HMAC;
	cipher_class2->as = INITFINAL;
	cipher_class2->opt = DEC;
	
	key1_in_raw =(u8 *) kmalloc(cipher_class1->key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key1_in =(u8 *) (((size_t)key1_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	key2_in_raw =(u8 *) kmalloc(cipher_class2->key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key2_in =(u8 *) (((size_t)key2_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv1_in_raw =(u8 *) kmalloc(cipher_class1->iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv1_in = (u8 *) (((size_t)iv1_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv2_in_raw =(u8 *) kmalloc(cipher_class2->iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv2_in = (u8 *) (((size_t)iv2_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	data_in_raw =(u8 *) kmalloc(cipher_class2->data_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	data_in = (u8 *) (((size_t)data_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));
	
	sym_res = kzalloc(sizeof(struct sym_resource_recovery),GFP_KERNEL);
	if(!sym_res )
	{
		csec_error(KERN_ERR "%s: sym_res kzalloc error\n", __func__);
		return -ENOMEM;
	}
	
	csec_priv = cards_enqueue_pre(ccore_cards);

	memset(iv2_in,0,cipher_class2->iv_len);

	memset(key1_in,0x5a,cipher_class1->key_len);
	memset(key2_in,0x5a,cipher_class2->key_len);
	memset(iv1_in,0x5a,cipher_class1->iv_len);
	memset(data_in,0x5a,cipher_class2->data_len);
	memcpy(data_tmp,data_in+hdatel-4,4);

	cipher_class1->key_addr = dma_map_single(csec_priv->dev,(void *)key1_in,cipher_class1->key_len, DMA_BIDIRECTIONAL);
	cipher_class2->key_addr = dma_map_single(csec_priv->dev,(void *)key2_in,cipher_class2->key_len, DMA_BIDIRECTIONAL);
	cipher_class1->iv_addr = dma_map_single(csec_priv->dev,(void *)iv1_in,cipher_class1->iv_len, DMA_BIDIRECTIONAL);
	cipher_class2->iv_addr = dma_map_single(csec_priv->dev,(void *)iv2_in,cipher_class2->iv_len, DMA_BIDIRECTIONAL);
	cipher_class2->data_addr = dma_map_single(csec_priv->dev,(void *)data_in,cipher_class2->data_len, DMA_BIDIRECTIONAL);
	cipher_class1->data_addr = cipher_class2->data_addr+hdatel-cdatel;

	sym_res->key_in = key1_in_raw;
	sym_res->key2_in = key2_in_raw;
	sym_res->iv_in = iv1_in_raw;
	sym_res->iv2_in = iv2_in_raw;
	sym_res->data_in = data_in_raw;
	sym_res->callback = (void *)sym_callback_handler;
	init_completion(&sym_res->result.op_done);

	ret = new_interface_do_asyn_recovery(csec_priv,cipher,ACLASS_SNOOP_SP,sym_res);
	if(ret == -ENOMEM) {
		dma_unmap_single(csec_priv->dev,cipher_class1->key_addr,cipher_class1->key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->key_addr,cipher_class1->key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class1->iv_addr,cipher_class1->iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->iv_addr,cipher_class2->iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->data_addr,cipher_class2->data_len,DMA_BIDIRECTIONAL);

		kfree(key1_in_raw);
		kfree(key2_in_raw);
		kfree(iv1_in_raw);
		kfree(iv2_in_raw);
		kfree(data_in_raw);

		kfree(sym_res);
		return ret;
	}

	if(!wait_for_completion_timeout(&sym_res->result.op_done, CDEV_INVL*20)){
		csec_error("wait_for_completion_timeout\n");
		return -EAGAIN;
	}


	do{
		dma_sync_single_for_cpu(csec_priv->dev,cipher_class2->iv_addr,cipher_class2->iv_len,DMA_BIDIRECTIONAL);
		ret = memcmp_with0(iv2_in,4);
		dma_sync_single_for_cpu(csec_priv->dev,cipher_class1->data_addr,cipher_class1->data_len,DMA_BIDIRECTIONAL);
		ret = (ret && memcmp(data_tmp,data_in+hdatel-4,4));
	}while(!ret);

	if(memcmp(snoop_sm4sm3dec_sm4_ctrl_data, data_in, cipher_class2->data_len))
		printk("snoop sm4 decrypt test failed!\n");	
	else
		printk("snoop sm4 decrypt test successfully!\n");	

	if(memcmp(snoop_sm3dec_ctrl_data, iv2_in, cipher_class2->iv_len))
		printk("snoop sm3 decrypt test failed!\n");	
	else
		printk("snoop sm3 decrypt test successfully!\n");	
	
	/* Once output data is moved out, setup the 'recovery' flag to do the work of recovery resources. */
	atomic_set(&sym_res->result.recovery, 1);

	if(sym_res->result.rst!=0)
		return -1;
	else
		return 0;

}

int snoop_sp_newapi_speed_do_asyn(void *_mparm)
{
	struct parm *mparm = _mparm;
	struct csec_priv_t *csec_priv;
	struct ccore_cards_t *ccore_cards = get_ccore_cards();
	struct cipher_core  cipher[2],*cipher_class1,*cipher_class2;
	struct sym_resource_recovery *sym_res;
	u8 *key1_in,*key1_in_raw,*key2_in,*key2_in_raw,*iv1_in,*iv1_in_raw,*iv2_in,*iv2_in_raw,*data_in,*data_in_raw;
	u8 data_tmp[4];

	int ret;

	int headlen = 16;

	cipher_class1 = cipher;
	cipher_class2 = &(cipher[1]);
	
	cipher_class1->key_len = mkeysize;
	cipher_class1->data_len = mdatasize;
	cipher_class1->iv_len = mivsize;
	cipher_class1->alg = malg;
	cipher_class1->type = mtype;
	cipher_class1->as = INITFINAL;
	cipher_class1->opt = mopt;

	cipher_class2->key_len = mkeysize2;
	cipher_class2->data_len = mdatasize+headlen;
	cipher_class2->iv_len = mivsize2;
	cipher_class2->alg = malg2;
	cipher_class2->type = mtype2;
	cipher_class2->as = INITFINAL;
	cipher_class2->opt = mopt2;
	
	key1_in_raw =(u8 *) kmalloc(cipher_class1->key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key1_in =(u8 *) (((size_t)key1_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	key2_in_raw =(u8 *) kmalloc(cipher_class2->key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key2_in =(u8 *) (((size_t)key2_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv1_in_raw =(u8 *) kmalloc(cipher_class1->iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv1_in = (u8 *) (((size_t)iv1_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv2_in_raw =(u8 *) kmalloc(cipher_class2->iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv2_in = (u8 *) (((size_t)iv2_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	data_in_raw =(u8 *) kmalloc(cipher_class2->data_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	data_in = (u8 *) (((size_t)data_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));
	
	sym_res = kzalloc(sizeof(struct sym_resource_recovery),GFP_KERNEL);
	if(!sym_res )
	{
		csec_error(KERN_ERR "%s: sym_res kzalloc error\n", __func__);
		return -ENOMEM;
	}
	
	csec_priv = cards_enqueue_pre(ccore_cards);

	memset(iv2_in,0,cipher_class2->iv_len);
	memcpy(data_tmp,data_in+cipher_class2->data_len-4,4);

	cipher_class1->key_addr = dma_map_single(csec_priv->dev,(void *)key1_in,cipher_class1->key_len, DMA_BIDIRECTIONAL);
	cipher_class2->key_addr = dma_map_single(csec_priv->dev,(void *)key2_in,cipher_class2->key_len, DMA_BIDIRECTIONAL);
	cipher_class1->iv_addr = dma_map_single(csec_priv->dev,(void *)iv1_in,cipher_class1->iv_len, DMA_BIDIRECTIONAL);
	cipher_class2->iv_addr = dma_map_single(csec_priv->dev,(void *)iv2_in,cipher_class2->iv_len, DMA_BIDIRECTIONAL);
	cipher_class2->data_addr = dma_map_single(csec_priv->dev,(void *)data_in,cipher_class2->data_len, DMA_BIDIRECTIONAL);
	cipher_class1->data_addr = cipher_class2->data_addr + headlen;

	sym_res->mparm = mparm;
	sym_res->key_in = key1_in_raw;
	sym_res->key2_in = key2_in_raw;
	sym_res->iv_in = iv1_in_raw;
	sym_res->iv2_in = iv2_in_raw;
	sym_res->data_in = data_in_raw;
	sym_res->callback = (void *)sym_callback_handler;
	/* Set 'recovery' as 1 to bypass the checking in callback.*/
	atomic_set(&sym_res->result.recovery, 1);
	init_completion(&sym_res->result.op_done);

	wait_for_completion(&(mparm->w));
	

	ret = new_interface_do_asyn_recovery(csec_priv,cipher,ACLASS_SNOOP_SP,sym_res);
	if(ret == -ENOMEM) {
		dma_unmap_single(csec_priv->dev,cipher_class1->key_addr,cipher_class1->key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->key_addr,cipher_class1->key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class1->iv_addr,cipher_class1->iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->iv_addr,cipher_class2->iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->data_addr,cipher_class2->data_len,DMA_BIDIRECTIONAL);
		
		kfree(key1_in_raw);
		kfree(key2_in_raw);
		kfree(iv1_in_raw);
		kfree(iv2_in_raw);
		kfree(data_in_raw);

		kfree(sym_res);
		return ret;
	}
		
	//printk(KERN_ERR "3 mparm is %llx\n",mparm);
#ifndef ASYN_MODE_BENCHMARK
	complete(&(mparm->c));
#endif
	return 0;
}

int snoop_sp_sm1sm3_enc_asyn(void)
{
	struct csec_priv_t *csec_priv;
	struct ccore_cards_t *ccore_cards = get_ccore_cards();
	struct cipher_core  cipher[2],*cipher_class1,*cipher_class2;
	struct sym_resource_recovery *sym_res;
	u8 *key1_in,*key1_in_raw,*key2_in,*key2_in_raw,*iv1_in,*iv1_in_raw,*iv2_in,*iv2_in_raw,*data_in,*data_in_raw;
	u8 data_tmp[4];

	int ret;

	int cdatel = 0x100;
	int hdatel = 0x105;

	cipher_class1 = cipher;
	cipher_class2 = &(cipher[1]);
	
	cipher_class1->key_len = 32;
	cipher_class1->data_len = cdatel;
	cipher_class1->iv_len = 16;
	cipher_class1->alg = SM1;
	cipher_class1->type = CBC;
	cipher_class1->as = INITFINAL;
	cipher_class1->opt = ENC;

	cipher_class2->key_len = 32;
	cipher_class2->data_len = hdatel;
	cipher_class2->iv_len = 32;
	cipher_class2->alg = H_SM3;
	cipher_class2->type = T_HMAC;
	cipher_class2->as = INITFINAL;
	cipher_class2->opt = ENC;
	
	key1_in_raw =(u8 *) kmalloc(cipher_class1->key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key1_in =(u8 *) (((size_t)key1_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	key2_in_raw =(u8 *) kmalloc(cipher_class2->key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key2_in =(u8 *) (((size_t)key2_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv1_in_raw =(u8 *) kmalloc(cipher_class1->iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv1_in = (u8 *) (((size_t)iv1_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv2_in_raw =(u8 *) kmalloc(cipher_class2->iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv2_in = (u8 *) (((size_t)iv2_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	data_in_raw =(u8 *) kmalloc(cipher_class2->data_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	data_in = (u8 *) (((size_t)data_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	sym_res = kzalloc(sizeof(struct sym_resource_recovery),GFP_KERNEL);
	if(!sym_res )
	{
		csec_error(KERN_ERR "%s: sym_res kzalloc error\n", __func__);
		return -ENOMEM;
	}
	
	csec_priv = cards_enqueue_pre(ccore_cards);

	memset(iv2_in,0,cipher_class2->iv_len);

	memset(key1_in,0x5a,cipher_class1->key_len);
	memset(key2_in,0x5a,cipher_class2->key_len);
	memset(iv1_in,0x5a,cipher_class1->iv_len);
	memset(data_in,0x5a,cipher_class2->data_len);
	memcpy(data_tmp,data_in+hdatel-4,4);

	cipher_class1->key_addr = dma_map_single(csec_priv->dev,(void *)key1_in,cipher_class1->key_len, DMA_BIDIRECTIONAL);
	cipher_class2->key_addr = dma_map_single(csec_priv->dev,(void *)key2_in,cipher_class2->key_len, DMA_BIDIRECTIONAL);
	cipher_class1->iv_addr = dma_map_single(csec_priv->dev,(void *)iv1_in,cipher_class1->iv_len, DMA_BIDIRECTIONAL);
	cipher_class2->iv_addr = dma_map_single(csec_priv->dev,(void *)iv2_in,cipher_class2->iv_len, DMA_BIDIRECTIONAL);
	cipher_class2->data_addr = dma_map_single(csec_priv->dev,(void *)data_in,cipher_class2->data_len, DMA_BIDIRECTIONAL);
	cipher_class1->data_addr = cipher_class2->data_addr+hdatel-cdatel;

	sym_res->key_in = key1_in_raw;
	sym_res->key2_in = key2_in_raw;
	sym_res->iv_in = iv1_in_raw;
	sym_res->iv2_in = iv2_in_raw;
	sym_res->data_in = data_in_raw;
	sym_res->callback = (void *)sym_callback_handler;
	init_completion(&sym_res->result.op_done);

	ret = new_interface_do_asyn_recovery(csec_priv,cipher,ACLASS_SNOOP_SP,sym_res);
	if(ret == -ENOMEM) {
		dma_unmap_single(csec_priv->dev,cipher_class1->key_addr,cipher_class1->key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->key_addr,cipher_class1->key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class1->iv_addr,cipher_class1->iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->iv_addr,cipher_class2->iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->data_addr,cipher_class2->data_len,DMA_BIDIRECTIONAL);

		kfree(key1_in_raw);
		kfree(key2_in_raw);
		kfree(iv1_in_raw);
		kfree(iv2_in_raw);
		kfree(data_in_raw);

		kfree(sym_res);
		return ret;
	}

	if(!wait_for_completion_timeout(&sym_res->result.op_done, CDEV_INVL*20)){
		csec_error("wait_for_completion_timeout\n");
		return -EAGAIN;
	}


	do{
		dma_sync_single_for_cpu(csec_priv->dev,cipher_class2->iv_addr,cipher_class2->iv_len,DMA_BIDIRECTIONAL);
		ret = memcmp_with0(iv2_in,4);
		dma_sync_single_for_cpu(csec_priv->dev,cipher_class1->data_addr,cipher_class1->data_len,DMA_BIDIRECTIONAL);
		ret = (ret && memcmp(data_tmp,data_in+hdatel-4,4));
	}while(!ret);

	if(memcmp(snoop_sm1sm3enc_sm1_ctrl_data, data_in, cipher_class2->data_len))
		printk("snoop sm1 encrypt test failed!\n");	
	else
		printk("snoop sm1 encrypt test successfully!\n");	

	if(memcmp(snoop_sm1sm3enc_sm3_ctrl_data, iv2_in, cipher_class2->iv_len))
		printk("snoop sm3 encrypt test failed!\n");	
	else
		printk("snoop sm3 encrypt test successfully!\n");	
	
	/* Once output data is moved out, setup the 'recovery' flag to do the work of recovery resources. */
	atomic_set(&sym_res->result.recovery, 1);

	if(sym_res->result.rst!=0)
		return -1;
	else
		return 0;
}



int snoop_sp_sm1sm3_dec_asyn(void)
{
	struct csec_priv_t *csec_priv;
	struct ccore_cards_t *ccore_cards = get_ccore_cards();
	struct cipher_core  cipher[2],*cipher_class1,*cipher_class2;
	struct sym_resource_recovery *sym_res;
	u8 *key1_in,*key1_in_raw,*key2_in,*key2_in_raw,*iv1_in,*iv1_in_raw,*iv2_in,*iv2_in_raw,*data_in,*data_in_raw;
	u8 data_tmp[4];

	int ret;

	int cdatel = 0x100;
	int hdatel = 0x105;

	cipher_class1 = cipher;
	cipher_class2 = &(cipher[1]);
	
	cipher_class1->key_len = 32;
	cipher_class1->data_len = cdatel;
	cipher_class1->iv_len = 16;
	cipher_class1->alg = SM1;
	cipher_class1->type = CBC;
	cipher_class1->as = INITFINAL;
	cipher_class1->opt = DEC;

	cipher_class2->key_len = 32;
	cipher_class2->data_len = hdatel;
	cipher_class2->iv_len = 32;
	cipher_class2->alg = H_SM3;
	cipher_class2->type = T_HMAC;
	cipher_class2->as = INITFINAL;
	cipher_class2->opt = DEC;
	
	key1_in_raw =(u8 *) kmalloc(cipher_class1->key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key1_in =(u8 *) (((size_t)key1_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	key2_in_raw =(u8 *) kmalloc(cipher_class2->key_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	key2_in =(u8 *) (((size_t)key2_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv1_in_raw =(u8 *) kmalloc(cipher_class1->iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv1_in = (u8 *) (((size_t)iv1_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	iv2_in_raw =(u8 *) kmalloc(cipher_class2->iv_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	iv2_in = (u8 *) (((size_t)iv2_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));

	data_in_raw =(u8 *) kmalloc(cipher_class2->data_len+DATA_MARGIN,GFP_KERNEL|SYS_DMA);
	data_in = (u8 *) (((size_t)data_in_raw+DATA_MARGIN)&(~(DATA_MARGIN-1)));
	
	sym_res = kzalloc(sizeof(struct sym_resource_recovery),GFP_KERNEL);
	if(!sym_res )
	{
		csec_error(KERN_ERR "%s: sym_res kzalloc error\n", __func__);
		return -ENOMEM;
	}
	
	csec_priv = cards_enqueue_pre(ccore_cards);

	memset(iv2_in,0,cipher_class2->iv_len);

	memset(key1_in,0x5a,cipher_class1->key_len);
	memset(key2_in,0x5a,cipher_class2->key_len);
	memset(iv1_in,0x5a,cipher_class1->iv_len);
	memset(data_in,0x5a,cipher_class2->data_len);
	memcpy(data_tmp,data_in+hdatel-4,4);

	cipher_class1->key_addr = dma_map_single(csec_priv->dev,(void *)key1_in,cipher_class1->key_len, DMA_BIDIRECTIONAL);
	cipher_class2->key_addr = dma_map_single(csec_priv->dev,(void *)key2_in,cipher_class2->key_len, DMA_BIDIRECTIONAL);
	cipher_class1->iv_addr = dma_map_single(csec_priv->dev,(void *)iv1_in,cipher_class1->iv_len, DMA_BIDIRECTIONAL);
	cipher_class2->iv_addr = dma_map_single(csec_priv->dev,(void *)iv2_in,cipher_class2->iv_len, DMA_BIDIRECTIONAL);
	cipher_class2->data_addr = dma_map_single(csec_priv->dev,(void *)data_in,cipher_class2->data_len, DMA_BIDIRECTIONAL);
	cipher_class1->data_addr = cipher_class2->data_addr+hdatel-cdatel;

	sym_res->key_in = key1_in_raw;
	sym_res->key2_in = key2_in_raw;
	sym_res->iv_in = iv1_in_raw;
	sym_res->iv2_in = iv2_in_raw;
	sym_res->data_in = data_in_raw;
	sym_res->callback = (void *)sym_callback_handler;
	init_completion(&sym_res->result.op_done);

	ret = new_interface_do_asyn_recovery(csec_priv,cipher,ACLASS_SNOOP_SP,sym_res);
	if(ret == -ENOMEM) {
		dma_unmap_single(csec_priv->dev,cipher_class1->key_addr,cipher_class1->key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->key_addr,cipher_class1->key_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class1->iv_addr,cipher_class1->iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->iv_addr,cipher_class2->iv_len,DMA_BIDIRECTIONAL);
		dma_unmap_single(csec_priv->dev,cipher_class2->data_addr,cipher_class2->data_len,DMA_BIDIRECTIONAL);

		kfree(key1_in_raw);
		kfree(key2_in_raw);
		kfree(iv1_in_raw);
		kfree(iv2_in_raw);
		kfree(data_in_raw);

		kfree(sym_res);
		return ret;
	}

	if(!wait_for_completion_timeout(&sym_res->result.op_done, CDEV_INVL*20)){
		csec_error("wait_for_completion_timeout\n");
		return -EAGAIN;
	}

	do{
		dma_sync_single_for_cpu(csec_priv->dev,cipher_class2->iv_addr,cipher_class2->iv_len,DMA_BIDIRECTIONAL);
		ret = memcmp_with0(iv2_in,4);
		dma_sync_single_for_cpu(csec_priv->dev,cipher_class1->data_addr,cipher_class1->data_len,DMA_BIDIRECTIONAL);
		ret = (ret && memcmp(data_tmp,data_in+hdatel-4,4));
	}while(!ret);

	if(memcmp(snoop_sm1sm3dec_sm1_ctrl_data, data_in, cipher_class2->data_len))
		printk("snoop sm1 decrypt test failed!\n");	
	else
		printk("snoop sm1 decrypt test successfully!\n");	

	if(memcmp(snoop_sm3dec_ctrl_data, iv2_in, cipher_class2->iv_len))
		printk("snoop sm3 decrypt test failed!\n");	
	else
		printk("snoop sm3 decrypt test successfully!\n");	
	
	/* Once output data is moved out, setup the 'recovery' flag to do the work of recovery resources. */
	atomic_set(&sym_res->result.recovery, 1);

	if(sym_res->result.rst!=0)
		return -1;
	else
		return 0;

}
