from torch import nn
from transformers import AutoConfig, AutoTokenizer
from math import sqrt
import torch

model_ckpt = "bert-base-uncased"
tokenizer = AutoTokenizer.from_pretrained(model_ckpt)

text = "time flies like an arrow"
inputs = tokenizer(text, return_tensors="pt", add_special_tokens=False)
print(inputs)
print("\n")
print(inputs.input_ids)

print("--"* 50)
print("\n")
config = AutoConfig.from_pretrained(model_ckpt)
token_emb = nn.Embedding(config.vocab_size, config.hidden_size)
print(token_emb)
print("\n")

input_embeds = token_emb(inputs.input_ids)
print(input_embeds)
print("\n")

Q = K = V = input_embeds
dim_k = K.size(-1)
scores = torch.bmm(Q, K.transpose(1, 2)) / sqrt(dim_k)
print(scores)
print("\n")
print(scores.size())
print("--"*50,"\n")


import torch.nn.functional as F
weights = F.softmax(scores, dim = -1)
print(weights.sum(dim=-1))
print("--"*50,"\n")

atten_outputs = torch.bmm(weights, V)
print(atten_outputs.shape)
print("--"*50,"\n")
