import torch
from torch.nn import Module
from torch.nn import Linear, Softmax
class DynamicWeightLayer(Module):
    def __init__(self, in_feature):
        super(DynamicWeightLayer).__init__()
        self.layer1 = Linear(in_feature, in_feature)
        self.layer2 = Linear(in_feature, in_feature)
        self.softmax = Softmax(in_feature)
    def farward(self, input):
        field_num = len(input)
        b, dim = input[0].shape
        # squeeze
        z = torch.zeros((b, field_num))
        for i, input_field in enumerate(input):
            z[:, i] = torch.sum(input_field)/input_field.shape[1]

        z = self.layer1(z)
        z = self.softmax(z)
        z = self.layer2(z)
        z = self.softmax(z)

        output = []
        for i, input_field in enumerate(input):
            output.append(input_field*z[:, i])
        
        return output

