import torch as th
import numpy as np
from types import SimpleNamespace as SN

class EpisodeBatch:
    def __init__(self, scheme, groups, batch_size, max_seq_length, data=None, preprocess=None, device="cpu"):
        self.scheme = scheme.copy()
        self.groups = groups
        self.batch_size = batch_size
        self.max_seq_length = max_seq_length
        self.preprocess = {} if preprocess is None else preprocess
        self.device = device

        if data is not None:
            self.data = data
        else:
            self.data = SN()
            self.data.transition_data = {}
            self.data.episode_data = {}
            self._setup_data(self.scheme, self.groups, batch_size, max_seq_length, self.preprocess)

    def _convert_to_tensor(self, v, dtype):
        if isinstance(v, list):
            try:
                v = np.array(v)
            except Exception:
                pass

        if isinstance(v, np.ndarray):
            return th.as_tensor(v, dtype=dtype, device=self.device)

        if isinstance(v, list) and all(isinstance(x, th.Tensor) for x in v):
            return th.stack(v).to(device=self.device, dtype=dtype)

        if isinstance(v, th.Tensor):
            return v.clone().detach().to(device=self.device, dtype=dtype)

        return th.as_tensor(v, dtype=dtype, device=self.device)

    def _setup_data(self, scheme, groups, batch_size, max_seq_length, preprocess):
        if preprocess is not None:
            for k in preprocess:
                assert k in scheme
                new_k = preprocess[k][0]
                transforms = preprocess[k][1]
                vshape = self.scheme[k]["vshape"]
                dtype = self.scheme[k]["dtype"]
                for transform in transforms:
                    vshape, dtype = transform.infer_output_info(vshape, dtype)
                self.scheme[new_k] = {"vshape": vshape, "dtype": dtype}
                if "group" in self.scheme[k]:
                    self.scheme[new_k]["group"] = self.scheme[k]["group"]
                if "episode_const" in self.scheme[k]:
                    self.scheme[new_k]["episode_const"] = self.scheme[k]["episode_const"]

        scheme["filled"] = {"vshape": (1,), "dtype": th.long}

        for field_key, field_info in scheme.items():
            vshape = field_info["vshape"]
            episode_const = field_info.get("episode_const", False)
            group = field_info.get("group", None)
            dtype = field_info.get("dtype", th.float32)

            if isinstance(vshape, int):
                vshape = (vshape,)
            shape = (groups[group], *vshape) if group else vshape

            if episode_const:
                self.data.episode_data[field_key] = th.zeros((batch_size, *shape), dtype=dtype, device=self.device)
            else:
                self.data.transition_data[field_key] = th.zeros((batch_size, max_seq_length, *shape), dtype=dtype, device=self.device)

    def update(self, data, bs=slice(None), ts=slice(None), mark_filled=True):
        slices = self._parse_slices((bs, ts))
        for k, v in data.items():
            if k in self.data.transition_data:
                target = self.data.transition_data
                if mark_filled:
                    target["filled"][slices] = 1
                    mark_filled = False
                _slices = slices
            elif k in self.data.episode_data:
                target = self.data.episode_data
                _slices = slices[0]
            else:
                raise KeyError(f"{k} not found in transition or episode data")

            dtype = self.scheme[k].get("dtype", th.float32)
            v = self._convert_to_tensor(v, dtype)
            self._check_safe_view(v, target[k][_slices])
            target[k][_slices] = v.view_as(target[k][_slices])

            if k in self.preprocess:
                new_k = self.preprocess[k][0]
                v = target[k][_slices]
                for transform in self.preprocess[k][1]:
                    v = transform.transform(v)
                target[new_k][_slices] = v.view_as(target[new_k][_slices])

    def _check_safe_view(self, v, dest):
        idx = len(v.shape) - 1
        for s in dest.shape[::-1]:
            if v.shape[idx] != s:
                if s != 1:
                    raise ValueError(f"Unsafe reshape of {v.shape} to {dest.shape}")
            else:
                idx -= 1

    def _parse_slices(self, items):
        if isinstance(items, (slice, int, list, np.ndarray, th.LongTensor, th.cuda.LongTensor)):
            items = (items, slice(None))
        if isinstance(items[1], list):
            raise IndexError("Indexing across Time must be contiguous")
        return tuple(slice(i, i+1) if isinstance(i, int) else i for i in items)

    def _new_data_sn(self):
        return SN(transition_data={}, episode_data={})

    def __getitem__(self, item):
        if isinstance(item, str):
            return self.data.episode_data.get(item) or self.data.transition_data.get(item)
        elif isinstance(item, tuple) and all(isinstance(i, str) for i in item):
            new_data = self._new_data_sn()
            for key in item:
                if key in self.data.transition_data:
                    new_data.transition_data[key] = self.data.transition_data[key]
                elif key in self.data.episode_data:
                    new_data.episode_data[key] = self.data.episode_data[key]
                else:
                    raise KeyError(f"Unrecognised key {key}")
            new_scheme = {k: self.scheme[k] for k in item}
            new_groups = {self.scheme[k]["group"]: self.groups[self.scheme[k]["group"]] for k in item if "group" in self.scheme[k]}
            return EpisodeBatch(new_scheme, new_groups, self.batch_size, self.max_seq_length, data=new_data, device=self.device)
        else:
            item = self._parse_slices(item)
            new_data = self._new_data_sn()
            for k, v in self.data.transition_data.items():
                new_data.transition_data[k] = v[item]
            for k, v in self.data.episode_data.items():
                new_data.episode_data[k] = v[item[0]]
            return EpisodeBatch(self.scheme, self.groups, self._get_num_items(item[0], self.batch_size), self._get_num_items(item[1], self.max_seq_length), data=new_data, device=self.device)

    def _get_num_items(self, indexing_item, max_size):
        if isinstance(indexing_item, (list, np.ndarray)):
            return len(indexing_item)
        elif isinstance(indexing_item, slice):
            r = indexing_item.indices(max_size)
            return 1 + (r[1] - r[0] - 1) // r[2]

    def max_t_filled(self):
        return th.sum(self.data.transition_data["filled"], 1).max(0)[0]

    def __repr__(self):
        return f"EpisodeBatch. Batch Size:{self.batch_size} Max_seq_len:{self.max_seq_length} Keys:{self.scheme.keys()} Groups:{self.groups.keys()}"


class ReplayBuffer(EpisodeBatch):
    def __init__(self, scheme, groups, buffer_size, max_seq_length, preprocess=None, device="cpu"):
        super().__init__(scheme, groups, buffer_size, max_seq_length, preprocess=preprocess, device=device)
        self.buffer_size = buffer_size
        self.buffer_index = 0
        self.episodes_in_buffer = 0

    def insert_episode_batch(self, ep_batch):
        if self.buffer_index + ep_batch.batch_size <= self.buffer_size:
            self.update(ep_batch.data.transition_data, slice(self.buffer_index, self.buffer_index + ep_batch.batch_size), slice(0, ep_batch.max_seq_length), mark_filled=False)
            self.update(ep_batch.data.episode_data, slice(self.buffer_index, self.buffer_index + ep_batch.batch_size))
            self.buffer_index = (self.buffer_index + ep_batch.batch_size) % self.buffer_size
            self.episodes_in_buffer = max(self.episodes_in_buffer, self.buffer_index)
        else:
            left = self.buffer_size - self.buffer_index
            self.insert_episode_batch(ep_batch[0:left, :])
            self.insert_episode_batch(ep_batch[left:, :])

    def can_sample(self, batch_size):
        return self.episodes_in_buffer >= batch_size

    def sample(self, batch_size):
        assert self.can_sample(batch_size)
        ids = np.random.choice(self.episodes_in_buffer, batch_size, replace=False)
        return self[ids]

    def __repr__(self):
        return f"ReplayBuffer. {self.episodes_in_buffer}/{self.buffer_size} episodes. Keys:{self.scheme.keys()} Groups:{self.groups.keys()}"
