#include <linux/slab.h>
#include <linux/dma-mapping.h>

#include "fh_drv_cipher.h"
#include "hw_rpu.h"
#include "fh_error_mpi.h"



static int fh_cesa_hash_padding_sw(DRV_HASH_HDLR_S *hwCtrl)
{
	u8 *remain = (u8 *)hwCtrl->remain;
	u32 remain_len = hwCtrl->remain_len;
	u32 total_bitlen;

	if (remain_len > HASH_BLOCK_SIZE)
		return -1;

	//补0
	memset(remain + remain_len, 0, 2*HASH_BLOCK_SIZE - remain_len);

	//补1
	remain[remain_len + 0] = 0x80;

	total_bitlen = hwCtrl->total_len;
	total_bitlen <<= 3;

	if (hwCtrl->remain_len < 56) {
		hwCtrl->remain_len = HASH_BLOCK_SIZE;
		//这样赋值已经倒过变成大序了
		remain[HASH_BLOCK_SIZE-4] = (u8)(total_bitlen>>24);
		remain[HASH_BLOCK_SIZE-3] = (u8)(total_bitlen>>16);
		remain[HASH_BLOCK_SIZE-2] = (u8)(total_bitlen>>8);
		remain[HASH_BLOCK_SIZE-1] = (u8)(total_bitlen>>0);
	} else {
		hwCtrl->remain_len = 2*HASH_BLOCK_SIZE;
		//这样赋值已经倒过变成大序了
		remain[2*HASH_BLOCK_SIZE-4] = (u8)(total_bitlen>>24);
		remain[2*HASH_BLOCK_SIZE-3] = (u8)(total_bitlen>>16);
		remain[2*HASH_BLOCK_SIZE-2] = (u8)(total_bitlen>>8);
		remain[2*HASH_BLOCK_SIZE-1] = (u8)(total_bitlen>>0);
	}

	return 0;
}

static FH_S32 HashHdlr_BreakCheck_InitCheck(
		DRV_HASH_HDLR_S *hwCtrl,
		DRV_HASH_STAGE_E Stage)
{
	CESA_RES_S *cesa = (CESA_RES_S *)hwCtrl->priv;
	void __iomem *regbase = hwCtrl->regs;

	if (cesa->curr_hdlr != (void *)hwCtrl) {
		if (cesa->curr_hdlr != NULL) {
			switch (cesa->st) {
			case RPU_ST_AES128_SH256_RSA2048_4_AES128:
			case RPU_ST_DES:
			case RPU_ST_TDES:
			case RPU_ST_SM2_SM3_SM4_4_SM4:
				((DRV_CIPHER_HDLR_S *)
				(cesa->curr_hdlr))->is_break = true;
				break;
			case RPU_ST_AES128_SH256_RSA2048_4_SH256:
			case RPU_ST_SM2_SM3_SM4_4_SM3:
			case RPU_ST_SH1:
				((DRV_HASH_HDLR_S *)
				(cesa->curr_hdlr))->is_break = true;
				break;
			case RPU_ST_AES128_SH256_RSA2048_4_RSA2048:
			case RPU_ST_SM2_SM3_SM4_4_SM2_VERIFY:
			case RPU_ST_SM2_SM3_SM4_4_SM2_SIGN:
				//???????????????????????????????????
				break;

			default:
				return FH_ERR_CIPHER_FAILED_CHECKST;
			}
		}

		cesa->curr_hdlr = (void *)hwCtrl;
	}

	//如果这里跑了
	if (Stage == HASH_STAGE_INIT) {
		hwCtrl->is_break = false;
		switch (hwCtrl->stage) {
		case HASH_STAGE_INIT:
		case HASH_STAGE_UPDATE:
		case HASH_STAGE_FINAL:
			hwCtrl->stage = HASH_STAGE_INIT;
			break;
		default:
			return FH_ERR_CIPHER_FAILED_CHECKSTAGE;
		}
	}
	//这里一定不跑，
	//一定是在做update和final处理时才会跑这里
	if (hwCtrl->is_break) {
		switch (Stage) {
		case HASH_STAGE_UPDATE:
			if (hwCtrl->stage == HASH_STAGE_INIT
				|| hwCtrl->stage == HASH_STAGE_UPDATE)
				hwCtrl->stage = HASH_STAGE_UPDATE;
			else
				return FH_ERR_CIPHER_FAILED_CHECKSTAGE;
			break;
		case HASH_STAGE_FINAL:
			if (hwCtrl->stage == HASH_STAGE_UPDATE)
				hwCtrl->stage = HASH_STAGE_FINAL;
			else
				return FH_ERR_CIPHER_FAILED_CHECKSTAGE;
			break;
		default:
			return FH_ERR_CIPHER_FAILED_CHECKSTAGE;
		}

		if (hwCtrl->st != rpu_st_switch(regbase, cesa->st, hwCtrl->st))
			return FH_ERR_CIPHER_FAILED_CONFIGINS;

		cesa->st = hwCtrl->st;

		switch (hwCtrl->algo) {
		case RPU_ALGO_SHA1:
			rpu_sha256_iv_setup_imm(
				regbase,
				(uint8_t *)hwCtrl->state);
			break;
		case RPU_ALGO_SHA256:
			rpu_sha1_iv_setup_imm(
				regbase,
				(uint8_t *)hwCtrl->state);
			break;
		case RPU_ALGO_SM3:
			rpu_sm3_iv_setup_imm(
				regbase,
				(uint8_t *)hwCtrl->state);
			break;
		default:
			return FH_ERR_CIPHER_FAILED_CHECKALG;
		}

		hwCtrl->is_init = true;
		hwCtrl->is_break = false;

	} else {//这里一定会跑
		//这里一定不跑，
		//一定是在做update和final处理时才会跑这里
		switch (Stage) {
		case HASH_STAGE_INIT:
			hwCtrl->stage = HASH_STAGE_INIT;
			hwCtrl->is_init = true;
			break;
		case HASH_STAGE_UPDATE:
			if (hwCtrl->stage == HASH_STAGE_INIT) {
				hwCtrl->stage = HASH_STAGE_UPDATE;
				hwCtrl->is_init = true;
			} else if (hwCtrl->stage == HASH_STAGE_UPDATE) {
				hwCtrl->stage = HASH_STAGE_UPDATE;
				hwCtrl->is_init = false;
			} else
				return FH_ERR_CIPHER_FAILED_CHECKSTAGE;
			break;
		case HASH_STAGE_FINAL:
			if (hwCtrl->stage == HASH_STAGE_UPDATE) {
				hwCtrl->stage = HASH_STAGE_FINAL;
				hwCtrl->is_init = false;
			} else
				return FH_ERR_CIPHER_FAILED_CHECKSTAGE;
			break;
		default:
			return FH_ERR_CIPHER_FAILED_CHECKSTAGE;
		}

	}

	return FH_SUCCESS;
}




FH_S32 FH_DRV_CIPHER_CalcHashInit(
		FH_HANDLE * pHdlr,
		FH_VOID *p_priv,
		FH_UNF_CIPHER_HASH_TYPE_E alg,
		FH_U8 *hmackey)
{
	CESA_RES_S *cesa = (CESA_RES_S *)p_priv;
	DRV_HASH_HDLR_S *hwCtrl;
	FH_S32 ret = FH_SUCCESS;

	if (pHdlr == NULL)
		return FH_ERR_CIPHER_INVALID_POINT;

	*pHdlr = (FH_HANDLE)kzalloc(sizeof(DRV_HASH_HDLR_S), GFP_KERNEL);
	if (*pHdlr == 0)
		return FH_ERR_CIPHER_FAILED_GETHANDLE;

	hwCtrl = (DRV_HASH_HDLR_S *)(*pHdlr);
	hwCtrl->priv = p_priv;
	hwCtrl->regs = cesa->regs + RPU_REG_OFFSET;



	mutex_lock(&cesa->lock);

	ret = HashHdlr_BreakCheck_InitCheck(hwCtrl, HASH_STAGE_INIT);
	if (ret) {
		*pHdlr = 0;
		goto init_done;
	}

	if (hmackey)
		memcpy(hwCtrl->hmackey, hmackey, HASH_BLOCK_SIZE);

	switch (alg) {
	case FH_UNF_CIPHER_HASH_TYPE_SHA1:
	case FH_UNF_CIPHER_HASH_TYPE_HMAC_SHA1:
		hwCtrl->st = rpu_st_switch(
						hwCtrl->regs,
						cesa->st,
						RPU_ST_SH1);
		hwCtrl->algo = RPU_ALGO_SHA1;
		if (alg == FH_UNF_CIPHER_HASH_TYPE_HMAC_SHA1)
			hwCtrl->is_hmac = true;
		break;
	case FH_UNF_CIPHER_HASH_TYPE_SHA256:
	case FH_UNF_CIPHER_HASH_TYPE_HMAC_SHA256:
		hwCtrl->st = rpu_st_switch(
				hwCtrl->regs,
				cesa->st,
				RPU_ST_AES128_SH256_RSA2048_4_SH256);
		hwCtrl->algo = RPU_ALGO_SHA256;
		if (alg == FH_UNF_CIPHER_HASH_TYPE_HMAC_SHA256)
			hwCtrl->is_hmac = true;
		break;
	case FH_UNF_CIPHER_HASH_TYPE_SM3:
	case FH_UNF_CIPHER_HASH_TYPE_HMAC_SM3:
		hwCtrl->st = rpu_st_switch(
						hwCtrl->regs,
						cesa->st,
						RPU_ST_SM2_SM3_SM4_4_SM3);
		hwCtrl->algo = RPU_ALGO_SM3;
		if (alg == FH_UNF_CIPHER_HASH_TYPE_HMAC_SM3)
			hwCtrl->is_hmac = true;
		break;
	default:
		ret = FH_ERR_CIPHER_INVALID_PARA;
		goto init_done;
	}

	if (hwCtrl->st)
		cesa->st = hwCtrl->st;
	else
		ret = FH_ERR_CIPHER_FAILED_CONFIGINS;

init_done:
	mutex_unlock(&cesa->lock);
	if (ret)
		kfree(hwCtrl);
	return ret;
}

FH_S32 FH_DRV_CIPHER_CalcHashUpdate(FH_HANDLE Hdlr, FH_U8 *msg, FH_U32 msglen)
{
	DRV_HASH_HDLR_S *hwCtrl = (DRV_HASH_HDLR_S *)Hdlr;
	CESA_RES_S *cesa = (CESA_RES_S *)hwCtrl->priv;
	void __iomem *regbase = hwCtrl->regs;
	FH_S32 ret = FH_SUCCESS;
	FH_S32 (*rpu_hash_crypto_irq)(
		void __iomem *regbase,
		const FH_U8 in[],
		FH_U32 in_len,
		FH_U8 out[],
		bool is_init);
	FH_S32 (*rpu_hash_iv_get_imm)(
		void __iomem *regbase,
		FH_U8 iv[]);

	FH_U32 cut_length = 0;
	FH_U32 payload_length = 0;
	FH_U32 inputlen = msglen;
	FH_U32 phy_msg_addr =
		dma_map_single(
			cesa->dev,
			msg,
			inputlen,
			DMA_BIDIRECTIONAL);
	FH_U32 phy_remain_addr =
		dma_map_single(
			cesa->dev,
			hwCtrl->remain,
			HASH_BLOCK_SIZE*2,
			DMA_BIDIRECTIONAL);
	FH_U32 phy_dgst_addr =
		dma_map_single(
			cesa->dev,
			hwCtrl->state,
			32,
			DMA_BIDIRECTIONAL);



	mutex_lock(&cesa->lock);

	switch (hwCtrl->algo) {
	case RPU_ALGO_SHA1:
		rpu_hash_crypto_irq = rpu_sha1_crypto_irq;
		rpu_hash_iv_get_imm = rpu_sha1_iv_get_imm;
		break;
	case RPU_ALGO_SHA256:
		rpu_hash_crypto_irq = rpu_sha256_crypto_irq;
		rpu_hash_iv_get_imm = rpu_sha256_iv_get_imm;
		break;
	case RPU_ALGO_SM3:
		rpu_hash_crypto_irq = rpu_sm3_crypto_irq;
		rpu_hash_iv_get_imm = rpu_sm3_iv_get_imm;
		break;
	default:
		ret = FH_ERR_CIPHER_FAILED_CHECKALG;
		goto update_done;
	}

	dma_sync_single_for_device(
		cesa->dev,
		phy_msg_addr,
		inputlen,
		DMA_BIDIRECTIONAL);

	cut_length = hwCtrl->remain_len ?
		(HASH_BLOCK_SIZE - hwCtrl->remain_len) : 0;
	cut_length = (msglen > cut_length) ? cut_length : msglen;

	memcpy((FH_U8 *)hwCtrl->remain + hwCtrl->remain_len, msg, cut_length);
	hwCtrl->remain_len += cut_length;

	msglen -= cut_length;
	msg += cut_length;
	phy_msg_addr += cut_length;

	if (hwCtrl->remain_len == 0) {
		//上一次是HASH_BLOCK_SIZE的整数倍
	} else if (hwCtrl->remain_len < 64) {
		//上一次非整数倍,
		//且这一次也补不满,
		//此时msglen应该等于0
		if (msglen) {
			ret = -777;
			goto update_done;
		}
	} else if (hwCtrl->remain_len == 64) {
		//上一次非整数倍，且这一次补满
		dma_sync_single_for_device(
			cesa->dev,
			phy_remain_addr,
			HASH_BLOCK_SIZE*2,
			DMA_BIDIRECTIONAL);
		rpu_hash_crypto_irq(
			regbase,
			(FH_U8 *)phy_remain_addr,
			hwCtrl->remain_len,
			(FH_U8 *)phy_dgst_addr,
			hwCtrl->is_init);
		wait_for_completion(&cesa->completion);
		reinit_completion(&cesa->completion);
		hwCtrl->remain_len = 0;
		hwCtrl->is_init = false;
		rpu_hash_iv_get_imm(regbase, (FH_U8 *)hwCtrl->state);
	}

	if (msglen) {
		hwCtrl->remain_len = msglen & 0x3F;
		payload_length = msglen - hwCtrl->remain_len;
		memcpy(hwCtrl->remain, msg+payload_length, hwCtrl->remain_len);
		if (payload_length) {
			rpu_hash_crypto_irq(
				regbase,
				(FH_U8 *)phy_msg_addr,
				payload_length,
				(FH_U8 *)phy_dgst_addr,
				hwCtrl->is_init);
			wait_for_completion(&cesa->completion);
			reinit_completion(&cesa->completion);
			hwCtrl->is_init = false;
			rpu_hash_iv_get_imm(regbase, (FH_U8 *)hwCtrl->state);
		}
	}

update_done:
	hwCtrl->total_len += inputlen;
	dma_unmap_single(
		cesa->dev,
		phy_msg_addr,
		inputlen,
		DMA_BIDIRECTIONAL);
	dma_unmap_single(
		cesa->dev,
		phy_remain_addr,
		HASH_BLOCK_SIZE*2,
		DMA_BIDIRECTIONAL);
	dma_unmap_single(
		cesa->dev,
		phy_dgst_addr,
		32,
		DMA_BIDIRECTIONAL);
	mutex_unlock(&cesa->lock);
	if (ret)
		kfree(hwCtrl);
	return ret;
}

FH_S32 FH_DRV_CIPHER_CalcHashFinal(FH_HANDLE Hdlr, FH_U8 *dgst, FH_U32 *dgstlen)
{
	DRV_HASH_HDLR_S *hwCtrl = (DRV_HASH_HDLR_S *)Hdlr;
	CESA_RES_S *cesa = (CESA_RES_S *)hwCtrl->priv;
	void __iomem *regbase = hwCtrl->regs;
	FH_S32 ret = FH_SUCCESS;
	FH_S32 (*rpu_hash_crypto_irq)(
		void __iomem *regbase,
		const FH_U8 in[],
		FH_U32 in_len,
		FH_U8 out[],
		bool is_init);

	FH_U32 phy_remain_addr =
		dma_map_single(
			cesa->dev,
			hwCtrl->remain,
			HASH_BLOCK_SIZE*2,
			DMA_BIDIRECTIONAL);
	FH_U32 phy_dgst_addr =
		dma_map_single(
			cesa->dev,
			hwCtrl->state,
			32,
			DMA_BIDIRECTIONAL);

	mutex_lock(&cesa->lock);

	switch (hwCtrl->algo) {
	case RPU_ALGO_SHA1:
		rpu_hash_crypto_irq = rpu_sha1_crypto_irq;
		*dgstlen = 20;
		break;
	case RPU_ALGO_SHA256:
		rpu_hash_crypto_irq = rpu_sha256_crypto_irq;
		*dgstlen = 32;
		break;
	case RPU_ALGO_SM3:
		rpu_hash_crypto_irq = rpu_sm3_crypto_irq;
		*dgstlen = 32;
		break;
	default:
		ret = FH_ERR_CIPHER_FAILED_CHECKALG;
		goto final_done;
	}

	fh_cesa_hash_padding_sw(hwCtrl);

	dma_sync_single_for_device(
		cesa->dev,
		phy_remain_addr,
		HASH_BLOCK_SIZE*2,
		DMA_BIDIRECTIONAL);
	rpu_hash_crypto_irq(
		regbase,
		(uint8_t *)phy_remain_addr,
		hwCtrl->remain_len,
		(uint8_t *)phy_dgst_addr,
		hwCtrl->is_init);
	wait_for_completion(&cesa->completion);
	reinit_completion(&cesa->completion);
	dma_sync_single_for_cpu(
		cesa->dev,
		phy_dgst_addr,
		*dgstlen,
		DMA_BIDIRECTIONAL);
	memcpy(dgst, hwCtrl->state, *dgstlen);

final_done:
	dma_unmap_single(
		cesa->dev,
		phy_remain_addr,
		HASH_BLOCK_SIZE*2,
		DMA_BIDIRECTIONAL);
	dma_unmap_single(
		cesa->dev,
		phy_dgst_addr,
		32,
		DMA_BIDIRECTIONAL);
	mutex_unlock(&cesa->lock);
	kfree(hwCtrl);
	return ret;
}


FH_S32 FH_DRV_CIPHER_CalcHash(
		FH_VOID *p_priv,
		FH_UNF_CIPHER_HASH_TYPE_E alg,
		FH_U32 MsgPhyAddr,
		FH_U32 msgLen,
		FH_U8 *dgst,
		FH_U32 *dgstlen)
{
	DRV_HASH_HDLR_S *hwCtrl;
	CESA_RES_S *cesa;
	void __iomem *regbase;
	FH_S32 ret = FH_SUCCESS;
	FH_S32 (*rpu_hash_crypto_irq)(
		void __iomem *regbase,
		const FH_U8 in[],
		FH_U32 in_len,
		FH_U8 out[],
		bool is_init);

	FH_U32 phy_remain_addr;
	FH_U32 phy_dgst_addr;
	FH_U32 payload_length;
	FH_U8 *vmsg;

	hwCtrl = kzalloc(sizeof(DRV_HASH_HDLR_S), GFP_KERNEL);
	if (hwCtrl == NULL)
		return FH_ERR_CIPHER_FAILED_GETHANDLE;

	hwCtrl->priv = p_priv;
	cesa = (CESA_RES_S *)hwCtrl->priv;
	hwCtrl->regs = cesa->regs + RPU_REG_OFFSET;
	regbase = hwCtrl->regs;

	phy_remain_addr = dma_map_single(
						cesa->dev,
						hwCtrl->remain,
						HASH_BLOCK_SIZE*2,
						DMA_BIDIRECTIONAL);
	phy_dgst_addr = dma_map_single(
						cesa->dev,
						hwCtrl->state,
						32,
						DMA_BIDIRECTIONAL);
	vmsg = ioremap(MsgPhyAddr, msgLen);



	mutex_lock(&cesa->lock);

	//========================Init==============================
	ret = HashHdlr_BreakCheck_InitCheck(hwCtrl, HASH_STAGE_INIT);
	if (ret)
		goto dgst_done;


	switch (alg) {
	case FH_UNF_CIPHER_HASH_TYPE_SHA1:
		hwCtrl->st = rpu_st_switch(hwCtrl->regs, cesa->st, RPU_ST_SH1);
		hwCtrl->algo = RPU_ALGO_SHA1;
		rpu_hash_crypto_irq = rpu_sha1_crypto_irq;
		*dgstlen = 20;
		break;
	case FH_UNF_CIPHER_HASH_TYPE_SHA256:
		hwCtrl->st = rpu_st_switch(
				hwCtrl->regs,
				cesa->st,
				RPU_ST_AES128_SH256_RSA2048_4_SH256);
		hwCtrl->algo = RPU_ALGO_SHA256;
		rpu_hash_crypto_irq = rpu_sha256_crypto_irq;
		*dgstlen = 32;
		break;
	case FH_UNF_CIPHER_HASH_TYPE_SM3:
		hwCtrl->st = rpu_st_switch(
						hwCtrl->regs,
						cesa->st,
						RPU_ST_SM2_SM3_SM4_4_SM3);
		hwCtrl->algo = RPU_ALGO_SM3;
		rpu_hash_crypto_irq = rpu_sm3_crypto_irq;
		*dgstlen = 32;
		break;
	default:
		ret = FH_ERR_CIPHER_INVALID_PARA;
		goto dgst_done;
	}

	if (hwCtrl->st) {
		cesa->st = hwCtrl->st;
	} else {
		ret = FH_ERR_CIPHER_FAILED_CONFIGINS;
		goto dgst_done;
	}

	//========================Update==============================
	hwCtrl->remain_len = msgLen & 0x3F;
	if (msgLen && hwCtrl->remain_len == 0)
		hwCtrl->remain_len = HASH_BLOCK_SIZE;

	payload_length = msgLen - hwCtrl->remain_len;
	memcpy(hwCtrl->remain, vmsg+payload_length, hwCtrl->remain_len);

	if (payload_length) {
		rpu_hash_crypto_irq(
			regbase,
			(uint8_t *)MsgPhyAddr,
			payload_length,
			(uint8_t *)phy_dgst_addr,
			hwCtrl->is_init);
		wait_for_completion(&cesa->completion);
		reinit_completion(&cesa->completion);
		hwCtrl->is_init = false;
	}

	//========================Final==============================
	hwCtrl->total_len = msgLen;
	fh_cesa_hash_padding_sw(hwCtrl);

	dma_sync_single_for_device(
		cesa->dev,
		phy_remain_addr,
		HASH_BLOCK_SIZE*2,
		DMA_BIDIRECTIONAL);
	rpu_hash_crypto_irq(
		regbase,
		(uint8_t *)phy_remain_addr,
		hwCtrl->remain_len,
		(uint8_t *)phy_dgst_addr,
		hwCtrl->is_init);
	wait_for_completion(&cesa->completion);
	reinit_completion(&cesa->completion);
	dma_sync_single_for_cpu(
		cesa->dev,
		phy_dgst_addr,
		*dgstlen,
		DMA_BIDIRECTIONAL);
	memcpy(dgst, hwCtrl->state, *dgstlen);

dgst_done:
	iounmap(vmsg);
	dma_unmap_single(
		cesa->dev,
		phy_remain_addr,
		HASH_BLOCK_SIZE*2,
		DMA_BIDIRECTIONAL);
	dma_unmap_single(
		cesa->dev,
		phy_dgst_addr,
		32,
		DMA_BIDIRECTIONAL);
	mutex_unlock(&cesa->lock);
	kfree(hwCtrl);
	return ret;
}

FH_S32 FH_DRV_CIPHER_GetHashInfo(
			FH_HANDLE Hdlr,
			FH_UNF_CIPHER_HASH_TYPE_E *alg,
			FH_U8 **hmackey)
{
	DRV_HASH_HDLR_S *hwCtrl = (DRV_HASH_HDLR_S *)Hdlr;

	if (alg) {
		switch (hwCtrl->algo) {
		case RPU_ALGO_SHA1:
			if (hwCtrl->is_hmac)
				*alg = FH_UNF_CIPHER_HASH_TYPE_HMAC_SHA1;
			else
				*alg = FH_UNF_CIPHER_HASH_TYPE_SHA1;
			break;
		case RPU_ALGO_SHA256:
			if (hwCtrl->is_hmac)
				*alg = FH_UNF_CIPHER_HASH_TYPE_HMAC_SHA256;
			else
				*alg = FH_UNF_CIPHER_HASH_TYPE_SHA256;
			break;
		case RPU_ALGO_SM3:
			if (hwCtrl->is_hmac)
				*alg = FH_UNF_CIPHER_HASH_TYPE_HMAC_SM3;
			else
				*alg = FH_UNF_CIPHER_HASH_TYPE_SM3;
			break;
		default:
			return FH_ERR_CIPHER_FAILED_CHECKALG;
		}
	}

	if (hmackey)
		*hmackey = hwCtrl->hmackey;

	return FH_SUCCESS;
}

