import torch
from sentence_transformers import SentenceTransformer

sentences = list("中国的首都在哪里？")
model = SentenceTransformer('./data/BAAI/bge-small-zh')
embeddings = model.encode(sentences, normalize_embeddings=True)
# embeddings.shape (9, 512)  9: seq_len  512: embedding_dims

X = torch.from_numpy(embeddings).unsqueeze(0)
batch_size = X.shape[0]
embedding_dims = X.shape[-1]

hidden_dims = 512
w_q = torch.randn((batch_size, embedding_dims, hidden_dims), requires_grad=True)
w_k = torch.randn((batch_size, embedding_dims, hidden_dims), requires_grad=True)
w_v = torch.randn((batch_size, embedding_dims, hidden_dims), requires_grad=True)
w_o = torch.randn((batch_size, hidden_dims, embedding_dims), requires_grad=True)
Q = X @ w_q
K = X @ w_k
V = X @ w_v

# 缩放点积注意力机制
Z = torch.softmax(Q @ K.transpose(1, 2) / embedding_dims ** 0.5, dim=-1) @ V
O = Z @ w_o
print(O.shape)
