import json
import os


from glob import glob
from typing import Any, Dict, List, Optional

import numpy as np
import torch
from PIL import Image
from torch.utils.data import Dataset


def _default_image_loader(path: str) -> torch.Tensor:
    image = Image.open(path).convert("RGB")
    tensor = torch.from_numpy(np.array(image)).permute(2, 0, 1).float() / 255.0
    return tensor


def _to_tensor(array: np.ndarray) -> torch.Tensor:
    return torch.from_numpy(array).float()


class DistillCacheDataset(Dataset):
    """Dataset that reads cached teacher outputs from a directory.

    Expected folder layout per sample (each in its own subdirectory):

        sample_dir/
            deapth.npy (or depth.npy)
            point_cloud.npy
            frame_attention_frame_block_*.npy
            global_attention_global_block_*.npy
            image_paths.json
            meta.json (optional)

    The dataset loads image sequences referenced by ``image_paths.json``. If the JSON
    contains relative paths, they are resolved against ``sample_dir``.
    """

    FRAME_PREFIX = "frame_attention_frame_block_"
    GLOBAL_PREFIX = "global_attention_global_block_"

    def __init__(
        self,
        cache_dir: str,
        frame_layers: Optional[List[int]] = None,
        global_layers: Optional[List[int]] = None,
        image_transform=None,
        image_loader=_default_image_loader,
    ) -> None:
        super().__init__()
        self.cache_dir = cache_dir
        self.sample_dirs = self._scan_sample_dirs(cache_dir)
        self.frame_layers = set(frame_layers or [])
        self.global_layers = set(global_layers or [])
        self.image_transform = image_transform
        self.image_loader = image_loader

    @staticmethod
    def _scan_sample_dirs(cache_dir: str) -> List[str]:
        entries: List[str] = []
        for root, dirs, files in os.walk(cache_dir):
            if not files:
                continue
            if "image_paths.json" not in files:
                continue
            if not any(fname.endswith(".npy") for fname in files):
                continue
            entries.append(root)
        if not entries:
            raise FileNotFoundError(
                f"No sample directories found under {cache_dir}. Expected nested folders like"
                " <class>/<sequence>/<iter_x>/ containing image_paths.json and teacher .npy files."
            )
        entries.sort()
        return entries

    def __len__(self) -> int:
        return len(self.sample_dirs)

    def __getitem__(self, idx: int) -> Dict[str, Any]:
        sample_dir = self.sample_dirs[idx]
        sample_name = os.path.basename(sample_dir.rstrip(os.sep))
        result: Dict[str, Any] = {"sample_dir": sample_dir, "name": sample_name, "prefix": sample_name}

        # Images
        image_paths = self._load_image_paths(sample_dir)
        images = []
        for path in image_paths:
            tensor = self.image_loader(path)
            if self.image_transform is not None:
                tensor = self.image_transform(tensor)
            images.append(tensor)
        if images:
            result["images"] = torch.stack(images, dim=0)
        result["image_paths"] = image_paths

        # Depth / point cloud
        depth = self._load_optional(sample_dir, ["deapth.npy", "depth.npy"])
        if depth is not None:
            result["depth"] = _to_tensor(depth)

        points = self._load_optional(sample_dir, ["point_cloud.npy", "points.npy"])
        if points is not None:
            result["points"] = _to_tensor(points)

        # Frame attention matrices
        frame_attn: Dict[int, torch.Tensor] = {}
        for path in glob(os.path.join(sample_dir, f"{self.FRAME_PREFIX}*.npy")):
            layer = self._parse_layer_idx(path, prefix=self.FRAME_PREFIX)
            if self.frame_layers and layer not in self.frame_layers:
                continue
            frame_attn[layer] = _to_tensor(np.load(path))
        result["frame_attn"] = frame_attn

        # Global attention matrices
        global_attn: Dict[int, torch.Tensor] = {}
        for path in glob(os.path.join(sample_dir, f"{self.GLOBAL_PREFIX}*.npy")):
            layer = self._parse_layer_idx(path, prefix=self.GLOBAL_PREFIX)
            if self.global_layers and layer not in self.global_layers:
                continue
            global_attn[layer] = _to_tensor(np.load(path))
        result["global_attn"] = global_attn

        # Meta information (optional)
        meta_path = os.path.join(sample_dir, "meta.json")
        if os.path.exists(meta_path):
            with open(meta_path, "r") as f:
                result["meta"] = json.load(f)

        return result

    def _load_image_paths(self, sample_dir: str) -> List[str]:
        meta_path = os.path.join(sample_dir, "image_paths.json")
        if not os.path.exists(meta_path):
            return []
        with open(meta_path, "r") as f:
            data = json.load(f)
        if isinstance(data, dict):
            paths = data.get("image_paths", [])
        else:
            paths = data
        resolved = []
        for path in paths:
            if not os.path.isabs(path):
                path = os.path.join(sample_dir, path)
            resolved.append(path)
        return resolved

    @staticmethod
    def _parse_layer_idx(path: str, prefix: str) -> int:
        name = os.path.basename(path)
        name = name.replace(prefix, "")
        name = name.split(".")[0]
        # e.g. "23" or "23_extra"
        layer_str = ''.join(ch for ch in name if ch.isdigit())
        if not layer_str:
            raise ValueError(f"Unable to parse layer index from {path}")
        return int(layer_str)

    @staticmethod
    def _load_optional(sample_dir: str, filenames: List[str]) -> Optional[np.ndarray]:
        for fname in filenames:
            path = os.path.join(sample_dir, fname)
            if os.path.exists(path):
                return np.load(path)
        return None
