# -*- coding: utf-8 -*-

import torch
import numpy as np
import torch.nn as nn


class PositionalEncoding(nn.Module):
    def __init__(self, d_hid, n_position):
        super(PositionalEncoding, self).__init__()
        self.d_hid = d_hid
        self.n_position = n_position
        # self.pos_table: [1, n_position, E] (torch.float)
        self.pos_table = self._get_sinusoid_encoding_table(self.n_position, self.d_hid)
        # self.register_buffer("pos_table", )

    @staticmethod
    def _get_sinusoid_encoding_table(n_position, d_hid):
        ''' Sinusoid position encoding table '''

        # TODO: make it with torch instead of numpy

        def get_position_angle_vec(position):
            return [position / np.power(10000, 2 * (hid_j // 2) / d_hid) for hid_j in range(d_hid)]

        sinusoid_table = np.array([get_position_angle_vec(pos_i) for pos_i in range(n_position)])
        sinusoid_table[:, 0::2] = np.sin(sinusoid_table[:, 0::2])  # dim 2i
        sinusoid_table[:, 1::2] = np.cos(sinusoid_table[:, 1::2])  # dim 2i+1

        # sinusoid_table : n_position, E
        # --> 1, n_position, E
        return torch.FloatTensor(sinusoid_table).unsqueeze(0)

    def forward(self, token_embedding):
        """

        :param token_embedding: B, L, E (torch.float)
        :return:
        """
        ################################
        # pos_table 一直保持原始的初始化的值
        ################################
        return token_embedding + self.pos_table[:, :token_embedding.size(1), :].detach()
