from collections.abc import Iterable
from dataclasses import dataclass
from typing import Callable

import torch
from zkl_ptutils_neural import QkvInjector

from .transformer_layer import Llama3TransformerLayer, Llama3TransformerLayerHparams


@dataclass(kw_only=True)
class Llama3TransformerHparams:
    layers_n: int
    queries_n: int
    groups_n: int
    qk_size: int
    v_size: int
    m_size: int
    h_size: int


class Llama3Transformer(torch.nn.Module):
    def __init__(self, *,
        hparams: Llama3TransformerHparams,
        dtype: torch.dtype | None = None,
        device: torch.device | None = None,
    ):
        super().__init__()
        self.layers = torch.nn.ModuleList([
            Llama3TransformerLayer(
                hparams=Llama3TransformerLayerHparams(
                    queries_n=hparams.queries_n,
                    groups_n=hparams.groups_n,
                    qk_size=hparams.qk_size,
                    v_size=hparams.v_size,
                    h_size=hparams.h_size,
                    m_size=hparams.m_size),
                dtype=dtype, device=device)
            for _ in range(hparams.layers_n)])

    def forward(self, *,
        tokens_emb: torch.Tensor,
        tokens_pos_emb: torch.Tensor | None,
        tokens_mask: torch.Tensor | None = None,
        layers_extra_tokens_kv: Iterable[tuple[torch.Tensor, torch.Tensor] | None] | None = None,
        layers_extra_tokens_mask: Iterable[torch.Tensor | None] | None = None,
        qkv_injector_factory: Callable[[QkvInjector | None], QkvInjector | None] | None = None,
    ) -> tuple[torch.Tensor, tuple[tuple[torch.Tensor, torch.Tensor], ...]]:
        if layers_extra_tokens_kv is None:
            layers_extra_tokens_kv = [None] * len(self.layers)
        if layers_extra_tokens_mask is None:
            layers_extra_tokens_mask = [None] * len(self.layers)

        layers_tokens_kv = []
        for layer, extra_tokens_kv, extra_tokens_mask \
            in zip(self.layers, layers_extra_tokens_kv, layers_extra_tokens_mask):
            assert isinstance(layer, Llama3TransformerLayer)
            tokens_emb, tokens_kv = layer.forward(
                tokens_emb=tokens_emb,
                tokens_pos_emb=tokens_pos_emb,
                tokens_mask=tokens_mask,
                extra_tokens_kv=extra_tokens_kv,
                extra_tokens_mask=extra_tokens_mask,
                qkv_injector_factory=qkv_injector_factory)
            layers_tokens_kv.append(tokens_kv)
        return tokens_emb, tuple(layers_tokens_kv)
