import torch
from zkl_aiutils_neural import RMSNorm


class EmbeddingMatcher(torch.nn.Module):
    def __init__(self, embedding: torch.nn.Embedding):
        super().__init__()
        self.embedding = embedding
        self.rms_norm = RMSNorm(
            self.embedding.embedding_dim,
            dtype=self.embedding.weight.dtype,
            device=self.embedding.weight.device)

    def forward(self, emb: torch.Tensor):
        """
        :param emb: shape=[..., emb_size]
        :return: logits: shape=[..., embs_n]
        """
        emb = self.rms_norm(emb)
        # [..., emb_size]

        vocab_emb = self.embedding.weight
        # [embs_n, emb_size]

        logits = torch.matmul(emb, vocab_emb.T)
        # [..., embs_n]

        emb_size = vocab_emb.shape[-1]
        emb_size = torch.asarray(emb_size, dtype=emb.dtype, device=emb.device)
        scale = torch.rsqrt(emb_size) * 4.0
        logits = logits * scale
        # []

        return logits
