from collections import Iterator, Callable
from numbers import Integral
from numpy.random import RandomState
from collections_abc import Indexable
from dataset import Dataset, IgnoredIndexException
from preloaded_map import preloaded_map
from collections import Iterable, Mapping, Sequence
import numpy as np
from itertools import tee


DataGenerator = Iterator


def _generate_index(nb_samples, shuffle, rng=None):
    assert isinstance(nb_samples, Integral) and nb_samples > 0
    assert not shuffle or (shuffle and isinstance(rng, RandomState))

    while True:
        if shuffle:
            sample_idx = rng.permutation(nb_samples)
        else:
            sample_idx = range(nb_samples)
        for i in sample_idx:
            yield i


# class _getitem(Callable):
#     def __init__(self, indexable):
#         assert isinstance(indexable, Indexable)
#         self._indexable = indexable
#
#     def __call__(self, index):
#         return self._indexable[index]


def _getitem(index):
    return globals()['_dataset'][index]


def _initializer(dataset_, initializer, *initargs):
    if initializer is not None:
        initializer(*initargs)
    globals()['_dataset'] = dataset_


class PreloadedDataGenerator(DataGenerator):
    def __init__(self, dataset, shuffle, rng=None, preloaded_len=None, processes=None, initializer=None, initargs=(),**pool_kwargs):
        assert isinstance(dataset, Dataset)
        self._dataset = dataset
        self._indexgen = _generate_index(len(dataset), shuffle, rng)
        self._datagen = preloaded_map(
            _getitem, self._indexgen, preloaded_len=preloaded_len, processes=processes, initializer=_initializer,
            initargs=(dataset, initializer) + initargs, **pool_kwargs
        )

    def __next__(self):
        try:
            item = next(self._datagen)
        except IgnoredIndexException:
            pass
        else:
            return item


def preloaded_data_generator(dataset, *more, shuffle, rng=None, preloaded_len=None, processes=None, initializer=None, initargs=(),**pool_kwargs):
    datasets = (dataset,) + more
    assert all(isinstance(d, Dataset) for d in datasets)
    len_of_dataset = len(dataset)
    assert all(len(d) == len_of_dataset for d in datasets)
    nb_datasets = len(datasets)
    indexgens = tee(_generate_index(len_of_dataset, shuffle, rng), nb_datasets)
    return tuple(
        preloaded_map(
            _getitem, indexgens[i], preloaded_len=preloaded_len, processes=processes, initializer=_initializer,
            initargs=(datasets[i], initializer) + initargs, **pool_kwargs
        ) for i in range(nb_datasets)
    )


class BatchedDataGenerator(DataGenerator):
    def __init__(self, iterable, batchsize, hierarchy=None):
        assert isinstance(iterable, Iterable)
        assert isinstance(batchsize, Integral) and batchsize > 0
        assert BatchedDataGenerator._check_hierarchy(hierarchy)

        self._iterator = iter(iterable)
        self._batchsize = batchsize
        self._hierarchy = hierarchy

    def __next__(self):
        items = tuple(next(self._iterator) for _ in self._batchsize)
        return BatchedDataGenerator._concatenate(self._hierarchy, items)

    @staticmethod
    def _concatenate(hierarchy, items):
        if hierarchy is None:
            return np.concatenate(items, axis=0)
        elif isinstance(hierarchy, Sequence):
            return type(hierarchy)(BatchedDataGenerator._concatenate(h, i) for h, i in zip(hierarchy, zip(*items)))
        elif isinstance(hierarchy, Mapping):
            return type(hierarchy)(
                (k, BatchedDataGenerator._concatenate(hierarchy[k], tuple(i[k] for i in items)))
                for k in hierarchy.keys()
            )
        else:
            return ValueError(hierarchy)

    @staticmethod
    def _check_hierarchy(hierarchy):
        if hierarchy is None:
            return True
        elif isinstance(hierarchy, Sequence):
            return all(BatchedDataGenerator._check_hierarchy(h) for h in hierarchy)
        elif isinstance(hierarchy, Mapping):
            return all(BatchedDataGenerator._check_hierarchy(h) for h in hierarchy.values())
        else:
            raise ValueError(hierarchy)
