import torch
import torch.nn as nn
from models.activator.mish import Mish

class BasicBlock(nn.Module):

    def __init__(self, n_hidden_1,n_hidden_2, activator=Mish(),dropout_p=0.5):
        super(BasicBlock, self).__init__()
        self.fc1 = nn.Linear(n_hidden_1, n_hidden_2)
        self.fc2 = nn.Linear(n_hidden_2, n_hidden_2)
        self.fc3 = nn.Linear(n_hidden_2, n_hidden_2)
        self.activator = activator
        self.dropout = nn.Dropout(p=dropout_p)

    def forward(self, x):

        x = self.dropout(x)
        x = self.fc1(x)
        x = self.activator(x)

        residual = x

        x = self.fc2(x)
        out = self.activator(x)
        out = self.fc3(out)
        out = self.activator(out)

        out = residual+out

        return out

class DNN(nn.Module):
    def __init__(self, in_dim, num_class=3, hidden_list=[4096, 2048, 1024,1024,1024,512], dropout_p_list=[0.5,0.5,0.4,0.4,0.4,0.2]):
        super().__init__()
        layers= []
        hidden_list.insert(0,in_dim)
        for i in range(len(hidden_list)-1):
            layers.append(BasicBlock(hidden_list[i],hidden_list[i+1],dropout_p=dropout_p_list[i]))
        self.residual_mlp = nn.Sequential(*layers)
        self.fc = nn.Linear(hidden_list[-1],num_class)



    def forward(self, x):
        x = self.residual_mlp(x)
        x = self.fc(x)
        return x

if __name__=='__main__':
    model = DNN(20000,3)
    input = torch.zeros((8, 20000))
    output = model(input)
    print(output)