from dataclasses import dataclass
from typing import Literal

import torch

from .nbptt_transformer_layer import NbpttTransformerLayer, NbpttTransformerLayerHparams


@dataclass(kw_only=True)
class NbpttTransformerHparams:
    i_size: int | None = None
    layers_n: int
    memory_n: int
    queries_n: int
    groups_n: int
    qk_size: int
    v_size: int
    m_size: int
    h_size: int
    o_size: int


class NbpttTransformer(torch.nn.Module):
    def __init__(self,
        hparams: NbpttTransformerHparams,
        dtype: torch.dtype | None = None,
        device: torch.device | None = None,
    ):
        super().__init__()
        self.hparams = hparams

        if hparams.layers_n == 0:
            self.layers = torch.nn.ModuleList([])
        elif hparams.layers_n == 1:
            self.layers = torch.nn.ModuleList([
                NbpttTransformerLayer(
                    hparams=NbpttTransformerLayerHparams(
                        i_size=hparams.i_size,
                        memory_n=hparams.memory_n,
                        queries_n=hparams.queries_n,
                        groups_n=hparams.groups_n,
                        qk_size=hparams.qk_size,
                        v_size=hparams.v_size,
                        m_size=hparams.m_size,
                        h_size=hparams.h_size,
                        o_size=hparams.o_size),
                    dtype=dtype, device=device)])
        else:
            self.layers = torch.nn.ModuleList(
                [NbpttTransformerLayer(
                    hparams=NbpttTransformerLayerHparams(
                        i_size=hparams.i_size,
                        memory_n=hparams.memory_n,
                        queries_n=hparams.queries_n,
                        groups_n=hparams.groups_n,
                        qk_size=hparams.qk_size,
                        v_size=hparams.v_size,
                        m_size=hparams.m_size,
                        h_size=hparams.h_size,
                        o_size=hparams.m_size),
                    dtype=dtype, device=device)] +
                [NbpttTransformerLayer(
                    hparams=NbpttTransformerLayerHparams(
                        i_size=hparams.m_size,
                        memory_n=hparams.memory_n,
                        queries_n=hparams.queries_n,
                        groups_n=hparams.groups_n,
                        qk_size=hparams.qk_size,
                        v_size=hparams.v_size,
                        m_size=hparams.m_size,
                        h_size=hparams.h_size,
                        o_size=hparams.m_size),
                    dtype=dtype, device=device)
                    for _ in range(hparams.layers_n - 2)] +
                [NbpttTransformerLayer(
                    hparams=NbpttTransformerLayerHparams(
                        i_size=hparams.m_size,
                        memory_n=hparams.memory_n,
                        queries_n=hparams.queries_n,
                        groups_n=hparams.groups_n,
                        qk_size=hparams.qk_size,
                        v_size=hparams.v_size,
                        m_size=hparams.m_size,
                        h_size=hparams.h_size,
                        o_size=hparams.o_size),
                    dtype=dtype, device=device)])

    def forward(self,
        emb: torch.Tensor, *,
        reset_mask: torch.Tensor | Literal['all'] | None = None,
    ) -> torch.Tensor:
        for layer in self.layers:
            emb = layer(emb=emb, reset_mask=reset_mask)
        return emb
