import torch
import torch.nn as nn
import plugin


@plugin.register_plugin("model", "FFN")
class FeedForwardNetwork(nn.Module):

    def __init__(self, ffn_configs: dict):
        super().__init__()
        self.config = ffn_configs
        layers = []
        for in_feat, out_feat in zip(ffn_configs["dims"][:-2], ffn_configs["dims"][1: -1]):
            layers.append(LinearUnit(in_feat, out_feat, **ffn_configs))
        layers.append(nn.Linear(ffn_configs["dims"][-2], ffn_configs["dims"][-1]))
        self.net = nn.Sequential(
            *layers
        )
    
    def forward(self, x: torch.Tensor):
        return self.net(x)


@plugin.register_plugin("model", "LinearUnit")
class LinearUnit(nn.Module):

    def __init__(
        self,
        in_feat: int,
        out_feat: int,
        batchnorm: bool = True,
        act: str = "relu",
        dropout: float = 0.1,
        *args,
        **kwargs
    ):
        super().__init__()
        self.fc = nn.Linear(in_feat, out_feat)
        self.batchnorm = batchnorm
        if batchnorm:
            self.bn = nn.BatchNorm1d(out_feat)
        self.act = plugin.get_plugin("act", act)()
        self.dropout = nn.Dropout(dropout)
    
    def forward(self, x: torch.Tensor):
        """

        Args:
            x [batch_size, feat] or [batch_size, seq_len, feat].
        """
        x = self.fc(x)
        if self.batchnorm and x.dim() == 2:
            x = self.bn(x)
        elif self.batchnorm and x.dim() == 3:
            x = torch.transpose(self.bn(torch.transpose(x, 1, 2)), 1, 2)
        x = self.dropout(self.act(x))
        return x

