# -*- coding: utf-8 -*-
"""
# @file name  : bn_and_initialize.py
# @author     : TingsongYu https://github.com/TingsongYu
# @date       : 2019-11-01
# @brief      : 对比bn与权值初始化，对向量x均方误差的影响
"""
import torch
import torch.nn as nn

from tools.common_tools import set_seed


class MLP(nn.Module):
    """
    模型

    """

    def __init__(self, neural_num, layers=100):
        """

        :param neural_num: 输入数据维度
        :param layers: 线性层的层数
        """
        super(MLP, self).__init__()
        # 线性层
        self.linears = nn.ModuleList([nn.Linear(neural_num, neural_num, bias=False) for _ in range(layers)])
        # BN层
        self.bns = nn.ModuleList([nn.BatchNorm1d(neural_num) for i in range(layers)])
        # 输入数据维度
        self.neural_num = neural_num

    def forward(self, x, use_BN):
        """

        :param use_BN: 是否使用BN层
        :param x:
        :return:
        """
        # 每个线性层后接一个BN层进行归一化
        # BN层在网络层后，在激活层前
        for (i, linear), bn in zip(enumerate(self.linears), self.bns):
            x = linear(x)
            if use_BN:
                x = bn(x)
            x = torch.relu(x)

            # 如果数据的方差消失，输出
            if torch.isnan(x.std()):
                print("output is nan in {} layers".format(i))
                break

            # 输出每层线性层的输出的方差
            print("layers:{}, std:{}".format(i, x.std().item()))

        return x

    def initialize(self):
        """
            使用权值初始化的方法
        :return:
        """
        for m in self.modules():
            if isinstance(m, nn.Linear):
                # method 1
                # nn.init.normal_(m.weight.data, std=1)    # normal: mean=0, std=1

                # method 2 kaiming
                nn.init.kaiming_normal_(m.weight.data)


if __name__ == '__main__':
    set_seed(1)  # 设置随机种子
    neural_nums = 256  # 输入维度
    layer_nums = 100  # 线性层数
    batch_size = 16  # 每个batch的大小

    net = MLP(neural_nums, layer_nums)

    # 输入
    inputs = torch.randn((batch_size, neural_nums))

    # 是否使用BN层
    use_bn = True

    # 如果不适用BN，则使用kaiming_normal_对权值进行初始化
    if not use_bn:
        net.initialize()
    output = net(inputs, use_bn)

    '''
        从两个结果来看，有了BN层可以不用精心设置权值的初始化就能使得x非常稳定
    '''

    print(output)
