/*
 * Copyright (c) 2022 listenai Intelligent Technology (anhui) Co., Ltd.
 *
 * SPDX-License-Identifier: Apache-2.0
 */
#define DT_DRV_COMPAT listenai_csk_crypto

#include <zephyr/init.h>
#include <zephyr/kernel.h>
#include <zephyr/device.h>
#include <zephyr/sys/__assert.h>
#include "zephyr/crypto/crypto.h"

#include "Driver_CRYPTO.h"
#include "crypto.h"

#include <zephyr/logging/log.h>
LOG_MODULE_REGISTER(crypto_csk6, LOG_LEVEL_DBG);


#define DEV_DATA(dev) ((struct crypto_csk6_data *const)(dev)->data)
#define DEV_CFG(dev) ((const struct crypto_csk6_config *const)(dev)->config)
#define DEV_HANDLER(dev) ((void *)(DEV_DATA(dev)->handle))

#define AES_BLOCK_SIZE	(16)

#define CSK6_CRYP_SUPPORT (CAP_RAW_KEY | CAP_SEPARATE_IO_BUFS | CAP_SYNC_OPS | \
		      CAP_NO_IV_PREFIX)

struct crypto_csk6_config {

};

struct crypto_csk6_data {
    /* driver handle */
    void *handle; 

	/* crypto driver callback event*/
	volatile uint32_t event;

	struct k_sem device_sem;
	struct k_sem session_sem;
};

static void crypto_csk6_isr(const void *arg)
{
	struct device *dev = (struct device *)arg;
	struct crypto_csk6_data *data = DEV_DATA(dev);
	CRYPTO_RESOURCES *crypto_resources = data->handle;

	if(crypto_resources->irq_handler != NULL)
	{
		crypto_resources->irq_handler();
	}

}

static void CRYPTO_EventCallback(uint32_t event, void* workspace)
{
	const struct device *dev = (const struct device *)workspace;
	struct crypto_csk6_data *data = DEV_DATA(dev);

	data->event |= event;
}

static void* Hash256_Padding(char* m, uint32_t len, int *r_len){
    int blocks = 0;

    if (len % 64){
        blocks = (len / 64) + 1;

        if ((len * 8 + 1) % 512 > 448)
        {
            blocks += 1;
        }

    } else {
        blocks = (len / 64) + 1;
    }
	*r_len = blocks * 64*sizeof(unsigned char);

    unsigned char *s = (unsigned char *)k_malloc(*r_len); // Internal storage for the message with padding.
	if(s == NULL){
		return NULL;
	}
    memset(s, 0, *r_len);

    for (int i = 0; i < len; i++)
    {
        s[i] = m[i]; // m is the message passed to the function, that has to be hashed.
    }

    s[len] = 0x80;

    for (int i = 0; i < 8; i++)
    {
        s[blocks * 64 - 1 - i] = (unsigned char)(((uint64_t)(len*8) & (uint64_t)(0xff << i * 8)) >> 8 * i);
    }

    return s;
}


static int csk6_hash_do_decrypt(struct hash_ctx *ctx, uint8_t *in_buf, int in_len, uint8_t *out_buf,
				bool finish)
{
	if(finish == 0){
		return -EINVAL;
	}
	const struct device *dev = ctx->device;
	struct crypto_csk6_data *data = DEV_DATA(dev);
	volatile uint32_t* source_addr = (uint32_t*)Hash256_Padding((char*)in_buf, in_len, &in_len);

	if(source_addr == NULL){
		LOG_ERR("Failed to malloc memory \n");
		return -ENOMEM;
	}
	CRYPTO_Hash(DEV_HANDLER(dev), (uint32_t *)source_addr, in_len, (uint32_t *)out_buf, 0);

	while (!(data->event & CSK_CRYPTO_EVENT_DONE));
	data->event = 0;
	k_free((void*)source_addr);
	return 0;
}

static int crypto_csk6_hase_encrypt(struct hash_ctx *ctx, struct hash_pkt *pkt, bool finish)
{
	int ret;

	ret = csk6_hash_do_decrypt(ctx, pkt->in_buf, pkt->in_len, pkt->out_buf, finish);
	if(0 == ret) {
		pkt->in_len = 0;
	}

	return ret;
}

static int csk6_do_decrypt(struct cipher_ctx *ctx, uint8_t *in_buf, int in_len,
		      uint8_t *out_buf)
{
	const struct device *dev = ctx->device;
	struct crypto_csk6_data *data = DEV_DATA(dev);

	switch(ctx->ops.cipher_mode)
	{
		case CRYPTO_CIPHER_MODE_ECB:
			CRYPTO_ECB_Decrypt(DEV_HANDLER(dev), (uint32_t *)in_buf, in_len, (uint32_t *)out_buf);
			break;

		case CRYPTO_CIPHER_MODE_CBC:
			CRYPTO_CBC_Decrypt(DEV_HANDLER(dev), (uint32_t *)in_buf, in_len, (uint32_t *)out_buf);
			break;

		default:
			LOG_ERR("Err decrypt mode %d\n", ctx->ops.cipher_mode);
			break;
	}

	while(!(data->event & CSK_CRYPTO_EVENT_DONE));

	return 0;
}

static int csk6_do_encrypt(struct cipher_ctx *ctx, uint8_t *in_buf, int in_len,
		      uint8_t *out_buf)
{
	const struct device *dev = ctx->device;
	struct crypto_csk6_data *data = DEV_DATA(dev);

	switch(ctx->ops.cipher_mode)
	{
		case CRYPTO_CIPHER_MODE_ECB:
			CRYPTO_ECB_Encrypt(DEV_HANDLER(dev), (uint32_t *)in_buf, in_len, (uint32_t *)out_buf);
			break;

		case CRYPTO_CIPHER_MODE_CBC:
			CRYPTO_CBC_Encrypt(DEV_HANDLER(dev), (uint32_t *)in_buf, in_len, (uint32_t *)out_buf);
			break;

		default:
			LOG_ERR("Err encrypt mode %d\n", ctx->ops.cipher_mode);
			break;
	}

	while(!(data->event & CSK_CRYPTO_EVENT_DONE));

	return 0;
}

static int crypto_csk6_ecb_encrypt(struct cipher_ctx *ctx,
				    struct cipher_pkt *pkt)
{
	int ret;

	ret = csk6_do_encrypt(ctx, pkt->in_buf, pkt->in_len, pkt->out_buf);
	if(0 == ret)
	{
		pkt->out_len = pkt->in_len;
	}

	return ret;
}

static int crypto_csk6_cbc_encrypt(struct cipher_ctx *ctx,
				    struct cipher_pkt *pkt, uint8_t *iv)
{
	int ret;
	const struct device *dev = ctx->device;

	CRYPTO_Control(DEV_HANDLER(dev), CSK_CRYPTO_SET_AES_IV, (uint32_t)iv);

	ret = csk6_do_encrypt(ctx, pkt->in_buf, pkt->in_len, pkt->out_buf);
	if(0 == ret)
	{
		pkt->out_len = pkt->in_len;
	}
	
	return ret;
}

static int crypto_csk6_cbc_decrypt(struct cipher_ctx *ctx,
				    struct cipher_pkt *pkt, uint8_t *iv)
{
	int ret;
	const struct device *dev = ctx->device;

	CRYPTO_Control(DEV_HANDLER(dev), CSK_CRYPTO_SET_AES_IV, (uint32_t)iv);

	ret = csk6_do_decrypt(ctx, pkt->in_buf, pkt->in_len, pkt->out_buf);
	if(0 == ret)
	{
		pkt->out_len = pkt->in_len;
	}
	
	return ret;
}

static int crypto_csk6_ecb_decrypt(struct cipher_ctx *ctx,
				    struct cipher_pkt *pkt)
{
	int ret;

	ret = csk6_do_decrypt(ctx, pkt->in_buf, pkt->in_len, pkt->out_buf);
	if(0 == ret)
	{
		pkt->out_len = pkt->in_len;
	}

	return ret;
}


static int crypto_csk6_session_setup(const struct device *dev,
				      struct cipher_ctx *ctx,
				      enum cipher_algo algo,
				      enum cipher_mode mode,
				      enum cipher_op op_type)
{
	if(ctx->keylen != 16u)
	{
		LOG_ERR("Unsupported keylen \n");
		return -EINVAL;
	}

    if (op_type == CRYPTO_CIPHER_OP_ENCRYPT)
    {
		switch (mode) 
        {
            case CRYPTO_CIPHER_MODE_ECB:
                ctx->ops.block_crypt_hndlr = crypto_csk6_ecb_encrypt;
                break;
            case CRYPTO_CIPHER_MODE_CBC:
                ctx->ops.cbc_crypt_hndlr = crypto_csk6_cbc_encrypt;
                break;
            default:
                return -EINVAL;
		}
	} else{
		switch (mode) 
        {
            case CRYPTO_CIPHER_MODE_ECB:
                ctx->ops.block_crypt_hndlr = crypto_csk6_ecb_decrypt;
                break;            
            case CRYPTO_CIPHER_MODE_CBC:
                ctx->ops.cbc_crypt_hndlr = crypto_csk6_cbc_decrypt;
                break;
            default:
                return -EINVAL;
		}

	}

	CRYPTO_PowerControl(DEV_HANDLER(dev), CSK_POWER_FULL);
    CRYPTO_Control(DEV_HANDLER(dev), CSK_CRYPTO_AES_KEY_MODE_USER, (uint32_t)ctx->key.bit_stream);

    return 0;
}

static int crypto_csk6_session_free(const struct device *dev,
				     struct cipher_ctx *ctx)
{
	struct crypto_csk6_data *data = DEV_DATA(dev);
	CRYPTO_PowerControl(DEV_HANDLER(dev), CSK_POWER_OFF);
	data->event = 0;
    return 0;
}

static int crypto_csk6_query_caps(const struct device *dev)
{
	return CSK6_CRYP_SUPPORT;
}

static int crtpto_csk6_async_callback_set(const struct device *dev, cipher_completion_cb cb)
{
	return -EINVAL;
}
static int crypto_csk6_hash_session_step(const struct device *dev, struct hash_ctx *ctx,
					 enum hash_algo algo)
{
	if(algo != CRYPTO_HASH_ALGO_SHA256) {
		LOG_ERR("Unsupported this mode \n");
		return -EINVAL;
	}

	struct crypto_csk6_data *data = DEV_DATA(dev);
	data->event = 0;
	ctx->hash_hndlr = crypto_csk6_hase_encrypt;
	CRYPTO_PowerControl(DEV_HANDLER(dev), CSK_POWER_FULL);
	CRYPTO_Control(DEV_HANDLER(dev), CSK_CRYPTO_AES_KEY_MODE_Pos, 0);

	return 0;
}
static int crypto_csk6_hash_session_free(const struct device *dev, struct hash_ctx *ctx)
{
	struct crypto_csk6_data *data = DEV_DATA(dev);
	CRYPTO_PowerControl(DEV_HANDLER(dev), CSK_POWER_OFF);
	data->event = 0;
	return 0;
}

static int crtpto_csk6_hash_async_callback_set(const struct device *dev, hash_completion_cb cb)
{
	return -EINVAL;
}
static int crypto_csk6_init(const struct device *dev)
{
    struct crypto_csk6_data *data = DEV_DATA(dev);

    data->handle = CRYPTO0();

	k_sem_init(&data->device_sem, 1, 1);
	k_sem_init(&data->session_sem, 1, 1);

    CRYPTO_Initialize(DEV_HANDLER(dev), CRYPTO_EventCallback, (void*)dev);

    IRQ_CONNECT(DT_INST_IRQN(0), DT_INST_IRQ(0, priority),
		    crypto_csk6_isr, DEVICE_DT_INST_GET(0), 0);

	irq_enable(DT_INST_IRQN(0));

	return 0;
}

static struct crypto_driver_api crypto_enc_funcs = {
	.cipher_begin_session = crypto_csk6_session_setup,
	.cipher_free_session = crypto_csk6_session_free,
	.cipher_async_callback_set = crtpto_csk6_async_callback_set,
	.query_hw_caps = crypto_csk6_query_caps,
	.hash_begin_session = crypto_csk6_hash_session_step,
	.hash_free_session = crypto_csk6_hash_session_free,
	.hash_async_callback_set = crtpto_csk6_hash_async_callback_set,
};

static struct crypto_csk6_data crypto_csk6_dev_data = {

};

static struct crypto_csk6_config crypto_csk6_dev_config = {

};

DEVICE_DT_INST_DEFINE(0, crypto_csk6_init, NULL,
		    &crypto_csk6_dev_data,
		    &crypto_csk6_dev_config, POST_KERNEL,
		    CONFIG_CRYPTO_INIT_PRIORITY, (void *)&crypto_enc_funcs);
