# -*- coding: utf-8 -*-
"""
# @file name  : bn_and_initialize.py
# @author     : TingsongYu https://github.com/TingsongYu
# @date       : 2019-11-01
# @brief      :
BN的三个方法
    1、nn.BatchNorm1d：每个特征维度为一个一维数组
    2、nn.BatchNorm2d：每个特征维度为一个二维数组
    3、nn.BatchNorm3d：每个特征维度是一个三维数组
主要属性：
    1、running_mean：均值
        running_mean=(1 - momentum) * pre_running_mean + momentum * cur_mean
    2、running_var：方差
        running_var=(1 - momentum) * pre_running_var + momentum * cur_var
    3、weight：affine transform的γ
    4、bias：affine transform的β
"""
import torch
import torch.nn as nn
from tools.common_tools import set_seed


def BatchNorm_1d_test():
    """
        nn.BatchNorm1d 的例子
    :return:
    """
    batch_size = 3  # mini_batch的大小
    num_features = 5  # 每个数据样本的特征数
    momentum = 0.3
    features_shape = 1  # 每个特征维度

    # 构造一个特征
    feature_map = torch.ones(features_shape)
    # 构造一个数据样本，每个数据样本拥有多个特征
    feature_maps = torch.stack([feature_map * (i + 1) for i in range(num_features)], dim=0)  # 2D
    # 构造一个mini_batch的样本，每个mini_batch有多个数据样本
    feature_maps_bs = torch.stack([feature_maps for i in range(batch_size)], dim=0)  # 3D

    # 输出数据及其形状
    print("input data:\n{} shape is {}".format(feature_maps_bs, feature_maps_bs.shape))

    # 构造BN层
    bn = nn.BatchNorm1d(num_features=num_features, momentum=momentum)

    # 数据均值、方差的初始化，当做第一个mini_batch的时候作为上一个mini_batch的参数
    running_mean, running_var = 0, 1

    # 进行两次迭代
    for i in range(2):
        outputs = bn(feature_maps_bs)

        print("iteration:{}, running mean: {} ".format(i, bn.running_mean))
        print("iteration:{}, running var:{} ".format(i, bn.running_var))
        print("经过BN后的数据：{}".format(outputs))

        # 计算下一个mini_batch的均值和方差
        mean_t, var_t = 2, 0

        # 预估的均值和方差，用以下一个mini_batch的标准化
        running_mean = (1 - momentum) * running_mean + momentum * mean_t
        running_var = (1 - momentum) * running_var + momentum * var_t

        print("iteration:{}, 第二个特征的running mean: {} ".format(i, running_mean))
        print("iteration:{}, 第二个特征的running var:{}".format(i, running_var))


def BatchNorm_2d_test():
    """
        nn.BatchNorm2d 的例子
    :return:
    """
    batch_size = 3
    num_features = 6
    momentum = 0.3

    features_shape = (2, 2)

    feature_map = torch.ones(features_shape)  # 2D
    feature_maps = torch.stack([feature_map * (i + 1) for i in range(num_features)], dim=0)  # 3D
    feature_maps_bs = torch.stack([feature_maps for i in range(batch_size)], dim=0)  # 4D

    print("input data:\n{} shape is {}".format(feature_maps_bs, feature_maps_bs.shape))

    bn = nn.BatchNorm2d(num_features=num_features, momentum=momentum)

    running_mean, running_var = 0, 1

    for i in range(2):
        outputs = bn(feature_maps_bs)

        print("\niter:{}, running_mean.shape: {}".format(i, bn.running_mean.shape))
        print("iter:{}, running_var.shape: {}".format(i, bn.running_var.shape))

        print("iter:{}, weight.shape: {}".format(i, bn.weight.shape))
        print("iter:{}, bias.shape: {}".format(i, bn.bias.shape))


def BatchNorm_3d_test():
    """
        nn.BatchNorm3d 的例子
    :return:
    """
    batch_size = 3
    num_features = 4
    momentum = 0.3

    features_shape = (2, 2, 3)

    feature = torch.ones(features_shape)  # 3D
    feature_map = torch.stack([feature * (i + 1) for i in range(num_features)], dim=0)  # 4D
    feature_maps = torch.stack([feature_map for i in range(batch_size)], dim=0)  # 5D

    print("input data:\n{} shape is {}".format(feature_maps, feature_maps.shape))

    bn = nn.BatchNorm3d(num_features=num_features, momentum=momentum)

    running_mean, running_var = 0, 1

    for i in range(2):
        outputs = bn(feature_maps)

        print("\niter:{}, running_mean.shape: {}".format(i, bn.running_mean.shape))
        print("iter:{}, running_var.shape: {}".format(i, bn.running_var.shape))

        print("iter:{}, weight.shape: {}".format(i, bn.weight.shape))
        print("iter:{}, bias.shape: {}".format(i, bn.bias.shape))


if __name__ == '__main__':
    set_seed(1)  # 设置随机种子

    # nn.BatchNorm1d
    BatchNorm_1d_test()

    # nn.BatchNorm2d
    BatchNorm_2d_test()

    # nn.BatchNorm3d
    BatchNorm_3d_test()
