import numpy as np

import torch


def getPositionEncoding(seq_len, dim, n=10000):
    PE = np.zeros(shape=(seq_len, dim))
    for pos in range(seq_len):
        for i in range(int(dim / 2)):
            denominator = np.power(n, 2 * i / dim)
            PE[pos, 2 * i] = np.sin(pos / denominator)
            PE[pos, 2 * i + 1] = np.cos(pos / denominator)
    return PE


def getPositionTensorEncoding(seq_len, dim, n=10000):
    position = torch.arange(0, seq_len).float().unsqueeze(1)
    _2i = torch.arange(0, dim, 2).float()
    # 计算 sin 和 cos 的位置编码
    position_encoding = torch.zeros((seq_len, dim))
    position_encoding[:, 0::2] = torch.sin(position / (10000 ** (_2i / dim)))
    position_encoding[:, 1::2] = torch.cos(position / (10000 ** (_2i / dim)))

    return position_encoding


np_value = getPositionEncoding(20, 10)
tensor_value = getPositionTensorEncoding(20, 10)

print(np_value)
print(tensor_value)
