import math
import torch
from torch import nn


class Time_Embedding(nn.Module):
    def __init__(self, hidden_size, seq):
        super(Time_Embedding, self).__init__()
        self.hidden_size = hidden_size
        self.seq = seq

        te = torch.zeros(self.hidden_size, self.seq)
        position = torch.arange(start=0, end=self.hidden_size).unsqueeze(dim=1).float()
        div_term = torch.exp(torch.arange(start=0, end=self.seq, step=2).float() / self.seq * math.log(10000.0))
        te[:, 0::2] = torch.sin(position * div_term)
        te[:, 1::2] = torch.cos(position * div_term)

        te = te.unsqueeze(dim=0).unsqueeze(dim=0)
        self.register_buffer('te', te)

    def forward(self, x):
        return x + self.te.to(x.device)