import dataclasses
import math
import struct
import typing
from pathlib import Path

from typing_extensions import Buffer

from storage.blobstore.base import AbstractBlobStore
from storage.hash_algorithms import farm128
from storage.utils import common_progress, mmap_view


@dataclasses.dataclass
class MetaFileHeader:
    """
    MetaFile header

    +---------+---------+------------+
    | magic   | version | block-size |
    +---------+---------+------------+
    | char[4] | uint16  | uint32     |
    +---------+---------+------------+
    """
    MAGIC: typing.ClassVar[bytes] = b'DEDP'
    FMT: typing.ClassVar[str] = '>4sHL'
    SIZE: typing.ClassVar[int] = struct.calcsize(FMT)

    magic: bytes
    version: int
    block_size: int

    @classmethod
    def from_bytes(cls, data: Buffer) -> 'MetaFileHeader':
        magic, version, block_size = struct.unpack(cls.FMT, data)
        return cls(magic=magic, version=version, block_size=block_size)

    def to_bytes(self) -> bytes:
        return struct.pack(self.FMT, self.magic, self.version, self.block_size)


@dataclasses.dataclass
class MetaFileBlock:
    """
    MetaFile block

    +----------+-----------+
    | digest   | collision |
    +----------+-----------+
    | char[16] | uint16    |
    +----------+-----------+
    """

    FMT: typing.ClassVar[str] = '>16sH'
    SIZE: typing.ClassVar[int] = struct.calcsize(FMT)
    COLLISION_MAXIMUM: typing.ClassVar[int] = 0xFFFF

    digest: bytes
    collision: int

    @classmethod
    def from_bytes(cls, data: Buffer) -> 'MetaFileBlock':
        digest, collision = struct.unpack(cls.FMT, data)
        return cls(digest=digest, collision=collision)

    def to_bytes(self) -> bytes:
        return struct.pack(self.FMT, self.digest, self.collision)


def dedup_file(src: Path, dest: Path, block_size: int, blob_store: AbstractBlobStore, *,
               dry: bool = False, show_progress: bool = False):
    progress = common_progress(show_progress)
    task = None
    if show_progress:
        task = progress.add_task('[bold]dedup', total=src.stat().st_size)

    if not dest.is_dir():
        raise Exception(f'{dest} is not a directory')

    total_blocks = math.ceil(src.stat().st_size / block_size)
    dst = dest / src.name
    dst_size = MetaFileHeader.SIZE + total_blocks * MetaFileBlock.SIZE
    with (progress,
          mmap_view(src, readonly=True) as src_view,
          mmap_view(dst, readonly=False, truncate=dst_size) as dst_view):
        for src_from, dst_from in zip(range(0, len(src_view), block_size),
                                      range(MetaFileHeader.SIZE, dst_size, MetaFileBlock.SIZE)):
            src_to = src_from + block_size
            dst_to = dst_from + MetaFileBlock.SIZE

            collision = 0
            with src_view[src_from:src_to] as data:
                digest = farm128(bytes(data))
                while collision < MetaFileBlock.COLLISION_MAXIMUM:
                    blck = MetaFileBlock(digest=digest, collision=collision)
                    key = blck.to_bytes()
                    blob = blob_store.get_blob(key)
                    if blob is not None:
                        if blob == data:
                            dst_view[dst_from:dst_to] = key
                            break
                        collision += 1
                        continue
                    else:
                        blob_store.put_blob(key, bytes(data))
                        dst_view[dst_from:dst_to] = key
                        break

                if collision >= MetaFileBlock.COLLISION_MAXIMUM:
                    raise Exception('too many hash collision occurred')
                if show_progress:
                    progress.update(task, advance=block_size)

        header = MetaFileHeader(magic=MetaFileHeader.MAGIC, version=1, block_size=block_size)
        dst_view[:MetaFileHeader.SIZE] = header.to_bytes()


def restore_file(src: Path, dest: Path, blob_store: AbstractBlobStore, *, dry: bool = False,
                 show_progress: bool = False):
    with mmap_view(src, readonly=True) as src_view, (dest / src.name).open('wb') as df:
        magic, version, block_size = struct.unpack(MetaFileHeader.FMT, src_view[:MetaFileHeader.SIZE])
        if magic != MetaFileHeader.MAGIC:
            raise Exception('invalid magic')

        if version != 1:
            raise Exception('invalid version')

        for src_from in range(MetaFileHeader.SIZE, len(src_view), MetaFileBlock.SIZE):
            src_to = src_from + MetaFileBlock.SIZE
            with src_view[src_from:src_to] as key_data:
                data = blob_store.get_blob(bytes(key_data))
                if data is None:
                    raise Exception(f'missing blob {key_data.hex()}')
            df.write(data)
