import sys

import torch
from sympy.geometry.entity import scale

# 导入外部文件中的函数
sys.path.append('../01_test')
import utils



b = 2
s = 10
embedding_dim = 128
k_size = 96
v_size = 64

w_q = torch.rand(embedding_dim, k_size)
w_k = torch.rand(embedding_dim, k_size)
w_v = torch.rand(embedding_dim, v_size)

x = torch.rand(b, s, embedding_dim)

q = torch.matmul(x, w_q)  # (b, s, k_size)
k = torch.matmul(x, w_k)  # (b, s, k_size)
v = torch.matmul(x, w_v)  # (b, s, v_size)

score = torch.matmul(q, k.transpose(1, 2))  # (b, s, s)
scale = q.shape[2] ** 0.5
score = score / scale

mask = torch.triu(torch.ones(s, s)).unsqueeze(0)
print(f'mask={mask}')

score = score * mask
print(f'score={score}')

# 使用torch计算softmax
attention_weight = torch.softmax(score - torch.max(score, dim=-1, keepdim=True)[0], dim=-1)
attention_out = torch.matmul(attention_weight, v)
print(f'attention_out={attention_out}')

# 手动计算softmax
attention_weight2 = utils.safe_softmax(score)
attention_out2 = torch.matmul(attention_weight2, v)
print(f'attention_out={attention_out2}')

# 对比两种计算结果
if torch.allclose(attention_out2, attention_out, atol=0.001, rtol=0.001):
    print(f'==============')
else:
    print(f'!!!!!!!!!!!!!!!')


