from abc import abstractmethod
from glob import glob
from multiprocessing import Pool
import os
from typing import List, Optional, Tuple, Union
import warnings
import cv2
import torch
from torch.utils.data import Dataset
import torchvision.transforms.functional as TF
from PIL import Image, ImageDraw
import numpy as np
from scipy.ndimage import distance_transform_edt
import xml.etree.ElementTree as ET
from scipy.ndimage.measurements import find_objects
from skimage.morphology import binary_erosion
import scipy.io as sio
from stardist import star_dist
import cv2


def edt_prob(lbl_img):
    """Perform EDT on each labeled object and normalize."""

    def grow(sl, interior):
        return tuple(
            slice(s.start - int(w[0]), s.stop + int(w[1])) for s, w in zip(sl, interior)
        )

    def shrink(interior):
        return tuple(slice(int(w[0]), (-1 if w[1] else None)) for w in interior)

    constant_img = lbl_img.min() == lbl_img.max() and lbl_img.flat[0] > 0
    if constant_img:
        lbl_img = np.pad(lbl_img, ((1, 1),) * lbl_img.ndim, mode="constant")
        warnings.warn(
            "EDT of constant label image is ill-defined. (Assuming background around it.)"
        )

    objects = find_objects(lbl_img)
    prob = np.zeros(lbl_img.shape, np.float32)
    for i, sl in enumerate(objects, 1):
        if sl is None:
            continue
        interior = [(s.start > 0, s.stop < sz) for s, sz in zip(sl, lbl_img.shape)]
        shrink_slice = shrink(interior)
        grown_mask = lbl_img[grow(sl, interior)] == i
        mask = grown_mask[shrink_slice]
        edt = distance_transform_edt(grown_mask)[shrink_slice][mask]
        prob[sl][mask] = edt / (np.max(edt) + 1e-10)
    if constant_img:
        prob = prob[(slice(1, -1),) * lbl_img.ndim].copy()
    return prob


def gen_dist_label(label_path_or_label: Union[str, torch.Tensor], 
                    dist_path: Optional[str]=None) -> Optional[torch.Tensor]:
    label = (
        torch.load(label_path_or_label).long().cpu().numpy()
        if isinstance(label_path_or_label, str)
        else label_path_or_label.long().cpu().numpy()
    )
    dist = edt_prob(label)
    if dist_path is not None:
        torch.save(torch.from_numpy(dist).to(torch.float16), dist_path)
        return None
    return torch.from_numpy(dist).float()


class BaseDataset(Dataset):

    def __init__(
        self,
        base_dir: str,
        mode: Union[str, int] = "train",
        with_cls: bool = False,
        transform=None,
        load_from_memory: bool = True,
        re_gen: bool = False,
        save_dist_label: bool = True,
    ):
        super(BaseDataset, self).__init__()
        self.mode = mode
        self.transform = transform
        self.image_paths = self.load_image_paths()
        self.label_paths = self.load_label_paths()
        self.dist_paths = self.load_dist_paths()
        self.len = len(self.image_paths)
        self.with_cls = with_cls
        self.load_from_memory = load_from_memory
        self.base_dir = base_dir
        if save_dist_label and mode=='train' and (
            not all(map(os.path.exists, self.dist_paths)) or re_gen
        ):           
            self.gen_dist()

        self.load_data = (
            self.load_data_from_disk
            if not self.load_from_memory
            else self.load_data_from_memory
        )

    def __getitem__(self, index):
        image, *label = self.load_data(index)
        if self.transform is not None:
            image, *label = self.transform(image, *label)
        return image, *label

    def __len__(self):
        return self.len

    def load_data_from_memory(self, index: int) -> torch.Tensor:
        if not hasattr(self, "cache"):
            print('Load data into memory...')
            self.cache = [self.load_data_from_disk(i) for i in range(self.len)]
            print('Load data into memory finished')
        return self.cache[index]

    def gen_dist(self) -> None:
        os.makedirs(os.path.join(self.base_dir, "train", "Dists"), exist_ok=True)
        for label_path in self.label_paths:
            dist_path = label_path.replace("Labels", "Dists")
            gen_dist_label(label_path, dist_path)

    @abstractmethod
    def load_image_paths(self) -> List[str]:
        raise NotImplementedError

    @abstractmethod
    def load_label_paths(self) -> List[str]:
        raise NotImplementedError

    @abstractmethod
    def load_dist_paths(self) -> List[str]:
        raise NotImplementedError
    
    @abstractmethod
    def load_data_from_disk(self, index: int) -> torch.Tensor:
        raise NotImplementedError


class MoNuSeg(BaseDataset):

    def __init__(
        self,
        base_dir: str,
        re_gen: bool = False,
        mode: Union[str, int] = "train",
        transform=None,
        load_from_memory: bool = True,
        n_rays: int = 4,
        **kwargs,
    ):
        assert os.path.exists(base_dir), f"Path {base_dir} does not exist"
        self.base_dir = base_dir
        self.n_rays = n_rays
        if not os.path.exists(os.path.join(base_dir, "train", "Labels")):
            self.first_load()
        BaseDataset.__init__(
            self,
            base_dir=base_dir, 
            mode=mode, 
            with_cls=False, 
            transform=transform, 
            load_from_memory=load_from_memory, 
            re_gen=re_gen
        )

    def first_load(self):
        print("First load, this may take a while...")
        os.makedirs(os.path.join(self.base_dir, "train", "Labels"), exist_ok=True)       
        os.makedirs(os.path.join(self.base_dir, "test", "Labels"), exist_ok=True)
        train_ann_paths = glob(
            os.path.join(self.base_dir, "train", "Annotations/*.xml")
        )
        train_label_paths = [
            path.replace("Annotations", "Labels").replace("xml", "pt")
            for path in train_ann_paths
        ]
        test_ann_paths = glob(os.path.join(self.base_dir, "test", "Annotations/*.xml"))
        test_label_paths = [
            path.replace("Annotations", "Labels").replace("xml", "pt")
            for path in test_ann_paths
        ]
        p = Pool(os.cpu_count() - 2)
        p.starmap(self._ann_2_label, iterable=zip(train_ann_paths, train_label_paths))
        p.starmap(self._ann_2_label, iterable=zip(test_ann_paths, test_label_paths))
        p.close()
        p.join()

    def _ann_2_label(ann_path: str, label_path: str) -> None:
        tree = ET.parse(ann_path)
        root = tree.getroot().find("Annotation").find("Regions")
        width, height = 1000, 1000
        instances = root.findall("Region")
        labels = np.zeros((height, width), dtype=np.int16)

        def get_polygon(region):
            label = Image.new("L", (width, height))
            draw = ImageDraw.Draw(label)
            vertices = region.find("Vertices")
            id = int(region.get("Id"))
            xy = [
                (float(vertex.get("X")), float(vertex.get("Y"))) for vertex in vertices
            ]
            draw.polygon(xy, fill=1, outline=1)
            return np.array(label)

        for id, instance in enumerate(instances[::1]):
            inst = get_polygon(instance) != 0
            labels[inst] = id + 1
        labels = torch.from_numpy(np.array(labels))
        torch.save(labels, label_path)

    def load_image_paths(self):
        return glob(os.path.join(self.base_dir, self.mode, "Tissue Images", "*.tif"))

    def load_label_paths(self):
        return [
            path.replace("Tissue Images", "Labels").replace("tif", "pt")
            for path in self.image_paths
        ]

    def load_dist_paths(self):
        return [
            path.replace("Tissue Images", "Dists").replace("tif", "pt")
            for path in self.image_paths
        ]

    def load_data_from_disk(self, index: int) -> torch.Tensor:
        image = TF.to_tensor(Image.open(self.image_paths[index]))
        label = torch.load(self.label_paths[index])
        distances = (
            torch.from_numpy(np.transpose(star_dist(label.cpu().numpy(), self.n_rays), (2, 0, 1)))
            if self.mode == "train"
            else torch.zeros_like(label)
        )
        label = label.bool() if self.mode == "train" else label
        # dist = torch.load(self.dist_paths[index]) if self.mode == 'train' else torch.zeros_like(label)
        dist = torch.load(self.dist_paths[index]) if self.mode == 'train' else torch.zeros_like(label)
        return image, label.long(), dist.float(), distances

class CoNSeP(BaseDataset):

    def __init__(
        self,
        base_dir: str,
        re_gen: bool = False,
        mode: Union[str, int] = "train",
        transform=None,
        load_from_memory: bool = True,
        n_rays: int = 4,
        **kwargs,
    ):
        assert os.path.exists(base_dir), f"Path {base_dir} does not exist"
        self.base_dir = base_dir
        self.n_rays = n_rays
        BaseDataset.__init__(
            self,
            base_dir=base_dir, 
            mode=mode, 
            with_cls=False, 
            transform=transform, 
            load_from_memory=load_from_memory, 
            re_gen=re_gen,
            save_dist_label=True
        )

    def _ann_2_label(self, ann_path):
        return sio.loadmat(ann_path)["inst_map"].astype("int32")

    def load_data_from_disk(self, index: int) -> torch.Tensor:
        image = Image.open(self.image_paths[index]).convert("RGB")
        image = TF.to_tensor(image)
        label = torch.from_numpy(self._ann_2_label(self.label_paths[index]))
        distances = (
            torch.from_numpy(np.transpose(star_dist(label.cpu().numpy(), self.n_rays), (2, 0, 1)))
            if self.mode == "train"
            else torch.zeros_like(label)
        )
        dist = torch.load(self.dist_paths[index]).float() if self.mode == 'train' else torch.zeros_like(label)
        label = label.bool() if self.mode == "train" else label
        return image, label.long(), dist, distances

    def load_image_paths(self):
        return glob(os.path.join(self.base_dir, self.mode, "Images", "*.png"))

    def load_label_paths(self):
        return [
            path.replace("Images", "Labels").replace("png", "mat")
            for path in self.image_paths
        ]

    def load_dist_paths(self):
        return [
            path.replace("Images", "Dists").replace("png", "pt")
            for path in self.image_paths
        ]

    def gen_dist(self) -> None:
        os.makedirs(os.path.join(self.base_dir, "train", "Dists"), exist_ok=True)
        for label_path in self.label_paths:
            dist_path = label_path.replace("Labels", "Dists").replace('mat', 'pt')
            label = torch.from_numpy(self._ann_2_label(label_path))
            gen_dist_label(label, dist_path)


class CPM17(CoNSeP):

    def __init__(
        self,
        base_dir: str,
        re_gen: bool = False,
        mode: Union[str, int] = "train",
        transform=None,
        load_from_memory: bool = True,
        n_rays: int = 4,
        **kwargs,
    ):
        assert os.path.exists(base_dir), f"Path {base_dir} does not exist"
        self.base_dir = base_dir
        CoNSeP.__init__(
            self,
            base_dir=base_dir,
            mode=mode,
            transform=transform,
            load_from_memory=load_from_memory,
            re_gen=re_gen,
            n_rays=n_rays,
        )


class Lizard(BaseDataset):

    def __init__(
        self,
        base_dir: str,
        mode: str = "train",
        with_cls: bool = False,
        transform=None,
        load_from_memory: bool = True,
        re_gen: bool = False,
        n_rays: int = 4,
        **kwargs,
    ):
        self.fold_id = int(kwargs["fold_id"])
        self.base_dir = base_dir
        self.n_rays = n_rays
        self._read_info()
        BaseDataset.__init__(
            self,
            base_dir,
            mode,
            with_cls,
            transform,
            load_from_memory,
            False,
            False,
        )

    def _read_info(self):
        self.file_names = []
        with open(os.path.join(self.base_dir, "Lizard_Labels", "info.csv"), "r") as f:
            f.readline()
            lines = f.readlines()
            for line in lines:
                line = line.strip().split(",")
                if int(line[2]) == self.fold_id:
                    self.file_names.append(line[0])

    def load_image_paths(self) -> List[str]:
        return [os.path.join(self.base_dir, "Lizard_Images", name + '.png') for name in self.file_names]

    def load_label_paths(self) -> List[str]:
        return [path.replace("Lizard_Images", "Lizard_Labels/Labels").replace("png", "mat") for path in self.image_paths]

    def load_dist_paths(self) -> List[str]:
        return []

    def _get_label(self, label_path: str) -> Tuple[torch.Tensor, torch.Tensor]:
        label = sio.loadmat(label_path)
        inst_map = label["inst_map"]
        inst_map = torch.from_numpy(inst_map).long()
        classes = np.squeeze(label["class"]).tolist()
        classes = [0] + classes
        classes = torch.tensor(classes).long()
        clss_map = classes[inst_map]
        return inst_map, clss_map

    def load_data_from_disk(self, index: int):
        image = TF.to_tensor(Image.open(self.image_paths[index]))
        inst_label, cls_label = self._get_label(self.label_paths[index])
        # inst_label = inst_label if self.mode == 'train' else inst_label
        dist = gen_dist_label(inst_label) if self.mode == 'train' else torch.zeros_like(inst_label)
        if self.with_cls:
            return image, inst_label.long(), cls_label, dist.float()
        return image, inst_label.long(), dist.float()


class Pannuke(BaseDataset):
    
    def __init__(
        self,
        base_dir: str,
        mode: str = "train",
        with_cls: bool = False,
        transform=None,
        load_from_memory: bool = True,
        re_gen: bool = False,
        **kwargs
    ):
        self.fold_id = int(kwargs["fold_id"])
        self.base_dir = base_dir
        self._read_info()
        BaseDataset.__init__(
            self,
            base_dir,
            mode,
            with_cls,
            transform,
            load_from_memory,
            False,
            False,
        )
