from torch import nn
import Rope
from RMSNormal import RMSNormal
from Attention import Attention
from Expert import Expert


class TransformerDecoder(nn.Module):
    """
    解码器
    """

    def __init__(
        self,
        num_layers,  # 解码器的层数
        input_dim,
        hide_dim,
        n_q_heads,
        n_kv_heads,
        num_experts,
        top_k,
        max_len,
    ):
        super().__init__()

        self._layers = nn.ModuleList(
            [
                TransformerLayer(
                    input_dim, hide_dim, n_q_heads, n_kv_heads, num_experts, top_k
                )
                for _ in range(num_layers)
            ]
        )

        _freq_cis = Rope.precompute_freqs_cis(input_dim // n_q_heads, max_len)

        self.register_buffer("freq_cis", _freq_cis, persistent=False)

    def forward(self, x):
        _x = x
        for _layer in self._layers:
            _x = _layer(_x, self.freq_cis)
        return _x


class TransformerLayer(nn.Module):
    """
    单层的Transformer结构
    """

    def __init__(self, input_dim, hide_dim, n_q_heads, n_kv_heads, num_experts, top_k):
        super().__init__()

        self._att_norm = RMSNormal(input_dim)
        self._att_layer = Attention(input_dim, n_q_heads, n_kv_heads)
        self._ffn_norm = RMSNormal(input_dim)
        self._ffn_layer = Expert(num_experts, top_k, input_dim, hide_dim)

    def forward(self, x, freq_cis):
        _x = x
        _x = self._att_norm(_x)
        _x = self._att_layer(_x, freq_cis)

        _x = x + _x

        _y = _x
        _y = self._ffn_norm(_y)
        _y = self._ffn_layer(_y)

        _y = _y + _x

        return _y


if __name__ == "__main__":
    a = TransformerDecoder(
        num_layers=3,  # 解码器的层数
        input_dim=2,  # 输入的维度
        hide_dim=2,  # 隐藏层维度
        n_q_heads=2,
        n_kv_heads=2,
        num_experts=8,
        top_k=1,
        max_len=105,
    )
    print(a)
