import os.path

import torch
import shutil

b = 2
s = 32
embedding_dim = 128
k_size = 96
v_size = 64

w_q = torch.rand(embedding_dim, k_size)
w_k = torch.rand(embedding_dim, k_size)
w_v = torch.rand(embedding_dim, v_size)

x = torch.rand(b, s, embedding_dim)

q = torch.matmul(x, w_q)  # (b, s, k_size)
k = torch.matmul(x, w_k)  # (b, s, k_size)
v = torch.matmul(x, w_v)  # (b, s, v_size)

score = torch.matmul(q, k.transpose(1, 2))  # (b, s, s)

# safe_softmax
attention_weight = torch.softmax(score - torch.max(score, dim=-1, keepdim=True)[0], dim=-1)

attention_out = torch.matmul(attention_weight, v)

print(f'attention_out={attention_out}')
if os.path.exists('dump/single'):
    shutil.rmtree('dump/single')
os.mkdir('dump/single')

torch.save(q, 'dump/single/q.bin')
torch.save(k, 'dump/single/k.bin')
torch.save(v, 'dump/single/v.bin')
torch.save(attention_out, 'dump/single/single_out0.bin')


