import torch
import torch.nn as nn
from models.activator.mish import Mish
from models.dnn0 import DNN


# from models.dnn_residual import DNN
# from models.dnn_1 import DNN
class DNNs(nn.Module):
    def __init__(self, pth_path_list=None):
        super().__init__()
        self.dnn0 = DNN(28169, 4096, 512, 3, dropout_p=0.4)
        self.dnn1 = DNN(28169, 4096, 512, 3, dropout_p=0.4)
        self.dnn2 = DNN(28169, 4096, 512, 3, dropout_p=0.4)
        self.dnn3 = DNN(28169, 4096, 512, 3, dropout_p=0.4)
        self.dnn4 = DNN(28169, 4096, 512, 3, dropout_p=0.4)
        # self.dnn0 = DNN(28169)
        # self.dnn1 = DNN(28169)
        # self.dnn2 = DNN(28169)
        # self.dnn3 = DNN(28169)
        # self.dnn4 = DNN(28169)
        self.softmax = nn.Softmax(dim=1)
        if pth_path_list != None:
            for i, dnn in enumerate([self.dnn0, self.dnn1, self.dnn2, self.dnn3]):
                pre_weight = torch.load(pth_path_list[i])
                dnn.load_state_dict(pre_weight)

    def forward(self, x):
        output0, x1_0 = self.dnn0(x)
        output1, x1_1 = self.dnn1(x)
        output2, x1_2 = self.dnn2(x)
        output3, x1_3 = self.dnn3(x)
        output4, x1_4 = self.dnn4(x)
        output0 = self.softmax(output0)
        output1 = self.softmax(output1)
        output2 = self.softmax(output2)
        output3 = self.softmax(output3)
        output4 = self.softmax(output4)
        output = output0 + output1 + output2 + output3 + output4
        output = output / 5

        return output, x1_0, x1_1, x1_2, x1_3, x1_4
