from typing import Iterator

import numpy as np
import torch
from zkl_aiutils_datasets import Dataset, MappedDataset, ZippedDataset

class DelayRepeatSequence(Dataset):
    def __init__(self, *,
        vocab_tokens_n: int,
        random_seed: int | None = None,
    ):
        if not vocab_tokens_n >= 2:
            raise ValueError(f"Expected vocab_tokens_n>=2, got {vocab_tokens_n=}")

        self._vocab_tokens_n = vocab_tokens_n
        self._random_seed = np.random.randint(0, 2 ** 32) \
            if random_seed is None else random_seed

    def __iter__(self) -> Iterator[tuple[int, int, int]]:
        rng = np.random.default_rng(self._random_seed)

        token_empty = 0
        token_invoke = 1
        data_tokens_n = (self._vocab_tokens_n - 2) // 2
        make_input_token = lambda data_token: data_token + 2
        make_output_token = lambda data_token: data_token + data_tokens_n + 2

        tid_sleep = 0
        tid_input = 1
        tid_output = 2

        # first input
        data_token = int(rng.choice(data_tokens_n))
        token = make_input_token(data_token)
        yield token, token_empty, tid_input

        # randomly choose action
        while True:
            match int(rng.choice(3)):
                case 0:  # sleep
                    yield token_empty, token_empty, tid_sleep
                case 1:  # input
                    data_token = int(rng.choice(data_tokens_n))
                    token = make_input_token(data_token)
                    yield token, token_empty, tid_input
                case 2:  # output
                    token = make_output_token(data_token)
                    yield token_invoke, token, tid_output

def make_delay_repeat_dataset_for_training(*,
    vocab_tokens_n: int,
    random_seed: int | None = None,
    batch_samples_n: int | None = None,
    device: torch.device | str | None = None,
) -> Dataset[tuple[torch.Tensor, torch.Tensor, torch.Tensor]]:
    if batch_samples_n is None:
        dataset = DelayRepeatSequence(
            vocab_tokens_n=vocab_tokens_n,
            random_seed=random_seed)  # (x, y, t)
    else:
        samples_random_seed = ([None] * batch_samples_n) if random_seed is None \
            else np.random.randint(0, 2 ** 32, batch_samples_n)
        samples_sequences = tuple(DelayRepeatSequence(
            vocab_tokens_n=vocab_tokens_n,
            random_seed=sample_random_seed
        ) for sample_random_seed in samples_random_seed)
        dataset = ZippedDataset(samples_sequences)
        dataset = MappedDataset(dataset, lambda x: tuple(zip(*x)))  # (xs, ys, ts)

    dataset = MappedDataset(dataset, lambda x: (
        np.asarray(x[0], dtype=np.int64),
        np.asarray(x[1], dtype=np.int64),
        np.asarray(x[2], dtype=np.int64)))
    dataset = MappedDataset(dataset, lambda x: (
        torch.asarray(x[0], dtype=torch.int64, device=device).unsqueeze(-1),
        torch.asarray(x[1], dtype=torch.int64, device=device).unsqueeze(-1),
        torch.asarray(x[2], dtype=torch.int64, device=device).unsqueeze(-1)))
    return dataset  # Dataset[(x,y,t)]
