import imp


import torch
from torch.nn import Module
from torch.nn import Linear, ReLU
class Activate(Module):
    def __init__(self, item_dim):
        super(Activate).__init__()
        self.layer1 = Linear(item_dim, item_dim)
        
        self.layer2 = Linear(item_dim, item_dim)
        self.relu = ReLU()

    def forward(self, input_item, clicked_sequence):
        seq = self.layer1(clicked_sequence)
        seq = self.relu(seq)
        seq = self.layer1(clicked_sequence)
        att = torch.matmul(input_item, seq)

        out = att * clicked_sequence

        out = torch.sum(out, dim=1)

        return out

