from abc import abstractmethod

import torch
import torch.nn as nn


class BasicReducer(nn.Module):
    """
    Args:
        dropout : dropout_radio
    Input:
        n * [*, dim]
    Output:
        [*, dim]
    """

    def __init__(self, dropout=0.):
        super(BasicReducer, self).__init__()
        self.dropout = nn.Dropout(dropout)

    def forward(self, inputs):
        inputs = self._forward(inputs)
        return self.dropout(inputs)

    @abstractmethod
    def _forward(self, inputs):
        raise NotImplementedError


class MeanReducer(BasicReducer):
    """
    Input:
        n * [*, dim]
    Output:
        [*, dim]
    """

    def _forward(self, inputs):
        inputs = torch.stack(inputs, dim=-2)  # [* , n, dim]
        inputs = torch.mean(inputs, dim=-2)  # [* , dim]
        return inputs


class SumReducer(BasicReducer):
    """
    Input:
        n * [*, dim]
    Output:
        [*, dim]
    """

    def _forward(self, inputs):
        inputs = torch.stack(inputs, dim=-2)  # [* , n, dim]
        inputs = torch.sum(inputs, dim=-2)  # [* , dim]
        return inputs


# class MlpReducer(BasicReduce


class MlpReducer(BasicReducer):
    """
    Input:
        n * [*, dim]
    Output:
        [*, dim]
    """

    def __init__(self, n, dim, bias=True, dropout=0., activation="relu"):
        super().__init__(dropout)
        self.mlp = nn.Sequential(
            nn.Linear(n * dim, dim, bias=bias),
            nn.ReLU(),
        )

    def _forward(self, inputs):
        inputs = torch.cat(inputs, dim=-1)  # [* , n*dim]
        inputs = self.mlp(inputs)
        return inputs
