import torch
import torch.nn as nn


class DNN(nn.Module):
    def __init__(self, inputs_dim, hidden_units=(300, 300, 128), use_bn=True):
        """
        DNN 分类网络
        :param inputs_dim: 输入维度
        :param hidden_units: 中间层的维度
        :param use_bn: 使用使用 BatchNormal
        """
        super(DNN, self).__init__()
        self.use_bn = use_bn
        if len(hidden_units) == 0:
            raise ValueError("hidden_units is empty!!")
        if inputs_dim > 0:
            hidden_units = [inputs_dim] + list(hidden_units)
        else:
            hidden_units = list(hidden_units)

        self.linears = nn.ModuleList(
            [nn.Linear(hidden_units[i], hidden_units[i + 1]) for i in range(len(hidden_units) - 1)]
        )

        if self.use_bn:
            self.bn = nn.ModuleList(
                [nn.BatchNorm1d(hidden_units[i + 1]) for i in range(len(hidden_units) - 1)]
            )

        self.activation_layers = nn.ModuleList(
            [nn.ReLU(inplace=True) for i in range(len(hidden_units) - 1)]
        )

        for name, tensor in self.linears.named_parameters():
            if 'weight' in name:
                nn.init.normal_(tensor, mean=0, std=0.0001)

    def forward(self, inputs):
        deep_input = inputs
        for i in range(len(self.linears)):
            fc = self.linears[i](deep_input)
            if self.use_bn:
                fc = self.bn[i](fc)
            fc = self.activation_layers[i](fc)
            deep_input = fc
        return deep_input
