import os
import struct
import threading
from dataclasses import dataclass
from os import PathLike
from pathlib import Path
from typing import AnyStr, Tuple, Dict

import plyvel
from cachetools import LRUCache

from storage.blobstore.base import AbstractBlobStore
from storage.blobstore.compression import (
    CompressMethod,
    decompress_blob, compress_blob,
)
from storage.blobstore.crypto import (
    EncryptionAlgorithm,
    decrypt_blob, encrypt_blob,
)

BLOB_HEADER_FMT = '>HHHL16s'  # version,alg,comp,size,nonce_iv
BLOB_HEADER_SIZE = struct.calcsize(BLOB_HEADER_FMT)


@dataclass
class BlobHeader:
    version: int
    algo: EncryptionAlgorithm
    comp: CompressMethod
    length: int
    nonce_iv: bytes


class PlyvelBlobStore(AbstractBlobStore):
    def __init__(self,
                 path: AnyStr | PathLike,
                 *,
                 encrypt: EncryptionAlgorithm | int = EncryptionAlgorithm.AES128CTR,
                 secret: bytes = None,
                 compress: CompressMethod | int = CompressMethod.ZSTD,
                 batch: int = 100,
                 cache_size=1024):
        self._path = Path(path)
        self._db = plyvel.DB(str(self._path), create_if_missing=True)
        self._encrypt_alg = EncryptionAlgorithm(encrypt)
        self._secret = secret
        self._compress_method = CompressMethod(compress)
        self._cache = LRUCache(cache_size)
        self._lock = threading.RLock()

        # 未压缩、未加密、未落盘的待写入数据
        self._put_blob_buffer_size = batch
        self._put_blob_buffer: Dict[bytes, bytes] = dict()

    def _read_raw_blob(self, key: bytes) -> Tuple[BlobHeader | None, bytes | None]:
        data: bytes = self._db.get(key)
        if data is None:
            return None, None
        if len(data) <= BLOB_HEADER_SIZE:
            raise Exception(f'expect blob length > {BLOB_HEADER_SIZE} bytes, got {len(data)} bytes')

        version, algo, comp, length, nonce_iv = struct.unpack(BLOB_HEADER_FMT, data[:BLOB_HEADER_SIZE])
        hdr = BlobHeader(version=version, algo=algo, comp=comp, length=length, nonce_iv=nonce_iv)
        return hdr, data[BLOB_HEADER_SIZE:]

    def get_blob(self, key: bytes) -> bytes | None:
        if cached_result := self._cache.get(key):
            return cached_result

        if data := self._put_blob_buffer.get(key):
            return data

        hdr, data = self._read_raw_blob(key)
        if hdr is None or data is None:
            return None

        data = decrypt_blob(hdr.algo, self._secret, hdr.nonce_iv, data)
        data = decompress_blob(hdr.comp, data)
        self._cache[key] = data
        return data

    def put_blob(self, key: bytes, data: bytes):
        if len(self._put_blob_buffer) < self._put_blob_buffer_size:
            self._put_blob_buffer[key] = data
            return

        self._flush_to_disk()

    def _flush_to_disk(self):
        with self._db.write_batch() as wb:
            for key, data in self._put_blob_buffer.items():
                nonce_iv = b''
                encrypt_alg = self._encrypt_alg
                if encrypt_alg != EncryptionAlgorithm.Plaintext:
                    nonce_iv = os.urandom(16)

                compress_method = self._compress_method

                data = compress_blob(compress_method, data)
                data = encrypt_blob(encrypt_alg, self._secret, nonce_iv, data)
                hdr = BlobHeader(version=1, algo=encrypt_alg, comp=compress_method, length=len(data), nonce_iv=nonce_iv)
                value = struct.pack(BLOB_HEADER_FMT, hdr.version, hdr.algo, hdr.comp, hdr.length, hdr.nonce_iv)
                wb.put(key, value + data)
        self._put_blob_buffer = dict()

    def flush(self):
        self._flush_to_disk()
