import torch
import torch.nn as nn
from models.activator.mish import Mish
class DNN(nn.Module):
    def __init__(self, in_dim,
                 n_hidden_1=8192, n_hidden_2=2048,n_hidden_3=1024,
                 num_class=3, dropout_p=0.4):
        super().__init__()
        self.layer1 = nn.Linear(in_dim, n_hidden_1)
        self.layer2 = nn.Linear(n_hidden_1, n_hidden_2)
        self.layer3 = nn.Linear(n_hidden_2, n_hidden_3)
        self.layer4= nn.Linear(n_hidden_3, n_hidden_3)
        self.layer5 = nn.Linear(n_hidden_3, num_class)
        self.activator = Mish()
        self.dropout0 = nn.Dropout(p=0.4)
        self.dropout1 = nn.Dropout(p=dropout_p)
        self.dropout2 = nn.Dropout(p=dropout_p)
        self.dropout3 = nn.Dropout(p=dropout_p)
        self.dropout4 = nn.Dropout(p=dropout_p)
        # self.softmax = nn.Softmax(dim=1)

    def forward(self, x):
        x = self.dropout0(x)
        x = self.layer1(x)
        x = self.activator(x)
        x = self.dropout1(x)
        x = self.layer2(x)
        x = self.activator(x)
        x = self.dropout2(x)
        x = self.layer3(x)
        x = self.activator(x)
        x = self.dropout3(x)
        x = self.layer4(x)
        x = self.activator(x)
        x = self.dropout4(x)
        x = self.layer5(x)
        # x = self.softmax(x)
        return x