import torch
from torch.utils.data import Sampler, SubsetRandomSampler

import bisect
from typing import Iterator, List, Sized


class CurriculumLearningSampler(Sampler[int]):
    r"""A Curriculum Learning Sampler with Human Designed Graph Difficulty Measurer and Training Scheduler.
        https://arxiv.org/pdf/2010.13166.pdf

    Args:
        data_source (Dataset): dataset to sample from
    """

    data_source: Sized

    def __init__(self, data_source: Sized, thresholds: List[int], learning_pace: List[int], proportion: float = 0.9) -> None:
        self.data_source = data_source
        self.samplers = []
        self.counter = [0] * (len(thresholds) + 1)
        self.length = [0] * (len(thresholds) + 1)
        self.thresholds = sorted(thresholds)
        self.learning_pace = sorted(learning_pace)
        self.proportion = proportion
        self.weights = [1]
        self.sampling = torch.distributions.Categorical(torch.Tensor(self.weights))
        self.generator = torch.Generator()
        if len(thresholds) != len(learning_pace) + 1:
            raise Exception("thresholds and learning_pace have different length")
        self.develop_curriculum()

    def develop_curriculum(self):
        threshold_num = len(self.thresholds)
        indices_list = [[] for _ in range(threshold_num)]
        for i in range(len(self.data_source)):
            index = bisect.bisect_left(self.thresholds, self.data_source[i]['label'])
            if index != threshold_num:
                indices_list[index].append(i)
                self.length[index] += 1
                self.length[-1] += 1
        for i in range(threshold_num):
            self.samplers.append(iter(SubsetRandomSampler(indices_list[i], self.generator)))
        for i in range(threshold_num-1):
            if self.learning_pace[i] > sum(self.length[:i+1]):
                self.learning_pace[i] = sum(self.length[:i+1])

    def __iter__(self) -> Iterator[int]:
        while self.counter[-1] < self.length[-1]:
            index = bisect.bisect_left(self.learning_pace, self.counter[-1])
            if index != len(self.learning_pace) and self.learning_pace[index] == self.counter[-1]:
                update = self.weights[-1] * self.proportion
                self.weights.append(self.weights[-1] - update)
                self.weights[-2] = update
                self.sampling = torch.distributions.Categorical(torch.Tensor(self.weights))
            next_ds_num = self.sampling.sample().item()
            yield next(self.samplers[next_ds_num])
            self.counter[next_ds_num] += 1
            self.counter[-1] += 1
            if self.counter[next_ds_num] == self.length[next_ds_num]:
                if sum(self.weights[:next_ds_num]) == 0:
                    if next_ds_num != len(self.weights) - 1:  # 必然事件 100%
                        self.weights[next_ds_num + 1] += self.weights[next_ds_num]
                        self.weights[next_ds_num] = 0
                        self.sampling = torch.distributions.Categorical(torch.Tensor(self.weights))
                else:
                    for i in range(next_ds_num):
                        if self.weights[i] != 0:
                            self.weights[i] += self.weights[next_ds_num]
                            self.weights[next_ds_num] = 0
                            self.sampling = torch.distributions.Categorical(torch.Tensor(self.weights))
                            break

    def __len__(self) -> int:
        return self.length[-1]
