import warnings
from collections.abc import Iterable, MutableMapping
from functools import cached_property
from itertools import count
from typing import Callable

from zkl_pyutils_fsspec import FsLike, resolve_fs

from zkl_aiutils_datasets.formats.directory import ChainedDirectoryDatasets, ChainedDirectoryDatasetsWriter
from zkl_aiutils_datasets.processing import KnownSizeChainedDatasets
from .bytes import BytesDataset, BytesDatasetWriter


class ChunkedBytesDataset(ChainedDirectoryDatasets[bytes, BytesDataset]):
    @cached_property
    def samples_bytes_n(self) -> Iterable[int]:
        return KnownSizeChainedDatasets([dataset.samples_bytes_n for dataset in self.children])

    @cached_property
    def total_bytes_n(self) -> int:
        return sum(dataset.total_bytes_n for dataset in self.children)


class ChunkedBytesDatasetWriter(ChainedDirectoryDatasetsWriter[bytes, BytesDatasetWriter]):
    def __init__(self,
        fs: FsLike, *,
        chunk_size: int = 500 * (2 ** 20),  # 500MB
        loader: Callable | type | None = ChunkedBytesDataset,
    ):
        fs = resolve_fs(fs)
        names = self._iter_chunk_names()
        child_factory = lambda child_path: BytesDatasetWriter(child_path, size_limit=chunk_size)
        super().__init__(fs, names, child_factory, loader=loader)
        self._chunk_size = chunk_size

    def _iter_chunk_names(self):
        for chunk_i in count():
            self._on_new_chunk(chunk_i)
            yield f"chunk_" + str(chunk_i)

    def _on_new_chunk(self, chunk_i: int):
        if chunk_i <= 0:
            return

        new_chunk_i_digits_n = len(str(chunk_i))
        old_chunk_i_digits_n = len(str(chunk_i - 1))
        if old_chunk_i_digits_n >= new_chunk_i_digits_n:
            return

        for previous_chunk_i in range(chunk_i):
            src_chunk_dir_name = f"chunk_" + format(previous_chunk_i, f"0{old_chunk_i_digits_n}d")
            dst_chunk_dir_name = f"chunk_" + format(previous_chunk_i, f"0{new_chunk_i_digits_n}d")
            self._fs.mv(src_chunk_dir_name, dst_chunk_dir_name, recursive=True)
            metadata_children = self._metadata.get('children')
            if metadata_children is not None:
                assert isinstance(metadata_children, MutableMapping)
                metadata_children[dst_chunk_dir_name] = metadata_children.pop(src_chunk_dir_name)

    def write(self, sample: bytes):
        if len(sample) > self._chunk_size:
            warnings.warn(
                f"Found a large sample with size={len(sample)}, "
                f"which is larger than chunk_size={self._chunk_size}. "
                f"Consider setting a larger chunk_size.")
        super().write(sample)
