import torch.nn as nn


class Fwd(nn.Module):
    def __init__(self, fnn_size, learning_rate=0.0001):
        super(Fwd, self).__init__()
        self.fnn_size = fnn_size
        self.lr = learning_rate
        self.fnn = self.build_fnn()

    def build_fnn(self):
        size = self.fnn_size
        layers = []
        for i in range(len(size) - 2):
            layers.append(nn.Linear(size[i], size[i + 1]))
            # layers.append(nn.BatchNorm1d(size[i + 1]))  # relu BN
            layers.append(nn.ReLU())
        layers.append(nn.Linear(size[-2], size[-1]))
        # layers.append(nn.BatchNorm1d(size[-1]))  # sigmoid BN
        layers.append(nn.Sigmoid())

        return nn.Sequential(*layers)

    def forward(self, x):
        return self.fnn(x)


class Inv(nn.Module):
    def __init__(self, inn_size, learning_rate=0.0001):
        super(Inv, self).__init__()
        self.inn_size = inn_size
        self.lr = learning_rate
        self.inn = self.build_inn()

    def build_inn(self):
        size = self.inn_size
        layers = []
        for i in range(len(size) - 2):
            layers.append(nn.Linear(size[i], size[i + 1]))
            # layers.append(nn.BatchNorm1d(size[i + 1]))  # relu BN
            layers.append(nn.ReLU())
        layers.append(nn.Linear(size[-2], size[-1]))
        # layers.append(nn.BatchNorm1d(size[-1]))  # sigmoid BN
        layers.append(nn.Sigmoid())

        return nn.Sequential(*layers)

    def forward(self, x):
        return self.inn(x)
