import torch
from torch import nn

import torch.nn.functional as F

import numpy as np

x=torch.rand(10,16,32)


class self_att(nn.Module):
    def __init__(self) -> None:
        super().__init__()
        self.q=nn.Linear(32,64)
        self.k=nn.Linear(32,64)
        self.v=nn.Linear(32,128)

    def forward(self,x):
        query=self.q(x)
        keys=self.k(x)
        value=self.v(x)
        x=F.softmax(torch.matmul(query,keys.permute(0,2,1))/query.size(-1)**(1/2),dim=0)
        x=torch.matmul(x,value)
        return x

class multi_head_att(nn.Module):
    def __init__(self,self_att:nn.Module,v_size,out_size) -> None:
        super().__init__()

        self.heads=nn.ModuleList([
            self_att()
        for _ in range(3)] )

        self.fc=nn.Linear(v_size,out_size)

    def forward(self,x):
        head_res=torch.cat([head(x) for head in self.heads],dim=1)
        res=self.fc(head_res)
        return res


net=multi_head_att(self_att,128,512)

out=net(x)

print(out.size())