import torch
from torch.nn.functional import softmax

x = [
    [1, 0, 1, 0],
    [0, 2, 0, 2],
    [1, 1, 1, 1]
]

x = torch.tensor(x, dtype=torch.float32)

w_key = [
  [0, 0, 1],
  [1, 1, 0],
  [0, 1, 0],
  [1, 1, 0]
]
w_query = [
  [1, 0, 1],
  [1, 0, 0],
  [0, 0, 1],
  [0, 1, 1]
]
w_value = [
  [0, 2, 0],
  [0, 3, 0],
  [1, 0, 3],
  [1, 1, 0]
]
w_key = torch.tensor(w_key, dtype=torch.float32)
w_query = torch.tensor(w_query, dtype=torch.float32)
w_value = torch.tensor(w_value, dtype=torch.float32)

keys = x @ w_key
querys = x @ w_query
values = x @ w_value

attn_scores = querys @ keys.T
attn_scores_softmax = softmax(attn_scores, dim=-1)
weighted_values = values[:, None] * attn_scores_softmax.T[:, :, None]
outputs = weighted_values.sum(dim=0)

# Press the green button in the gutter to run the script.
if __name__ == '__main__':
    print("Weights for key: \n", w_key)
    print("Weights for query: \n", w_query)
    print("Weights for value: \n", w_value)
    print("Keys: \n", keys)
    print("Querys: \n", querys)
    print("Values: \n", values)
    print(attn_scores)
    print(attn_scores_softmax)
    print(weighted_values)
    print(outputs)