import torch

def precompute_freqs_cis(dim, seqlen, theta = 10000.0):
    freqs = 1.0 / (theta ** (torch.arange(0, dim, 2)[: (dim // 2)].float() / dim))
    t = torch.arange(seqlen)   #顺序位置，0 ~ seqlen - 1
    freqs = torch.outer(t, freqs).float()
    return freqs

embedding_dim = 8
sequence_length = 5

# 标记嵌入，全为1
token_embedding = torch.ones((sequence_length, embedding_dim))

freqs = precompute_freqs_cis(embedding_dim, sequence_length)

# 标准位置编码
pe = torch.zeros(sequence_length, embedding_dim)
pe[:, 0::2] = torch.sin(freqs)
pe[:, 1::2] = torch.cos(freqs)

# 标记嵌入 + 位置嵌入
pe_out = token_embedding + pe
print(pe_out)

# 旋转位置编码
freqs_cis = torch.polar(torch.ones_like(freqs), freqs)
token_embedding_cis = torch.view_as_complex(token_embedding.reshape(sequence_length,-1,2))
rope_out = torch.view_as_real(token_embedding_cis * freqs_cis).flatten(1)
print(rope_out)