import numpy as np
import matplotlib.pyplot as plt 

def sigmoid(x):
    return 1/(1+np.exp(-x))

def relu(x):
    return np.maximum(0,x)

input_data = np.random.randn(1000,100) # 1000个数据
node_num = 100 # 各隐藏层的节点（神经元）数
hidden_layer_size = 5 # 隐藏层有5层
activations = {} # 激活值的结果保存在这里

x = input_data
for i in range(hidden_layer_size):
    if i != 0:
        x=activations[i-1]

    # 改变初始值进行实验
    # w = np.random.randn(node_num,node_num)* 1
    # w = np.random.randn(node_num,node_num)* 0.01

    # sigmoid设置初始值为xavier
    # w = np.random.randn(node_num,node_num)*np.sqrt(1.0/node_num)

    # relu设置初始值为he
    w = np.random.randn(node_num,node_num)*np.sqrt(2.0/node_num)

    a = np.dot(x,w)

    # 激活函数
    # z = sigmoid(a)
    z = relu(a)
    activations[i] = z

# 绘制直方图
for i,a in activations.items():
    plt.subplot(1,len(activations),i+1)
    plt.title(str(i+1)+"-layer")
    if i !=0: plt.yticks([],[])

    plt.hist(a.flatten(), 30, range=(0,1))
plt.show()