from dataclasses import dataclass
from typing import Iterable

import torch
from zkl_llmpt_llama3 import Llama3Transformer


@dataclass
class Llama3SynergySubTransformerOutput:
    tokens_emb: torch.Tensor
    layers_tokens_kv: tuple[tuple[torch.Tensor, torch.Tensor], ...]


class Llama3SynergySubTransformer(torch.nn.Module):
    def __init__(self, transformer: Llama3Transformer):
        super().__init__()
        self.transformer = transformer

    def forward(self, *,
        tokens_emb: torch.Tensor,
        tokens_pos_emb: torch.Tensor | None = None,
        tokens_mask: torch.Tensor | None = None,
        layers_extra_tokens_kv: Iterable[tuple[torch.Tensor, torch.Tensor] | None] | None = None,
    ) -> Llama3SynergySubTransformerOutput:
        tokens_emb, layers_tokens_kv = self.transformer.forward(
            tokens_emb=tokens_emb,
            tokens_pos_emb=tokens_pos_emb,
            # tokens_mask=tokens_mask, # ignored for performance reasons
            layers_extra_tokens_kv=layers_extra_tokens_kv)
        return Llama3SynergySubTransformerOutput(
            tokens_emb=tokens_emb,
            layers_tokens_kv=layers_tokens_kv)
