# LN = 把B独立出去
import torch
import torch.nn as nn

from einops import rearrange, reduce

torch.random.manual_seed(0)

x = torch.randn((32, 3, 224, 224))  # [B,C,H,W]
# set mean val per sample
# x[0] = x[0] - x[0].mean() + 400.
# x[1] = x[1] - x[1].mean() + 90.
# x[2] = x[2] - x[2].mean() + 7.
# print(50 * '=')
# for i in range(4):
#     print(x[i].mean())
# print(50 * '=')

# set mean val per channel
# x[:, 0, :, :] = x[:, 0, :, :] - x[:, 0, :, :].mean() + 400.
# x[:, 1, :, :] = x[:, 1, :, :] - x[:, 1, :, :].mean() + 90.
# x[:, 2, :, :] = x[:, 2, :, :] - x[:, 2, :, :].mean() + 7.
# print(50 * '=')
# for i in range(3):
#     print(x[:, i, :, :].mean())
# print(50 * '=')

b, c, h, w = x.shape
print(x)

# pytorch
ln = nn.LayerNorm([c, h, w], eps=1e-12, elementwise_affine=False)
# elementwise_affine=False 关闭映射
y = ln(x)
print(y)

# 为了方便理解，这里使用了einops库实现
print(50 * '-')
x_ = rearrange(x, 'b c h w -> (h w c) b')
mean = rearrange(x_.mean(dim=0), 'b -> b 1 1 1')
print(mean)
std = rearrange(x_.std(dim=0), 'b -> b 1 1 1')
print(std)
y_ = (x - mean) / std
print(y_)

print(30 * '=', 'y', 30 * '=')
for i in range(3):
    print(y[:, i, :, :].mean())
print(50 * '-')
for i in range(y_.shape[0]):
    print(y[i].mean())

print(30 * '=', 'y_', 30 * '=')
for i in range(3):
    print(y_[:, i, :, :].mean())
print(50 * '-')
for i in range(y_.shape[0]):
    print(y_[i].mean())

# 输出差别
print('diff={}'.format(torch.abs(y - y_).max()))
# diff=2.384185791015625e-05

# LN，每个样本均值为零，但通道之间均值差异较大。