import numpy as np
import matplotlib.pyplot as plt 

def sigmoid(x):
    return 1/(1+np.exp(-x))

def ReLU(x):
    return np.maximum(0,x)

def tanh(x):
    return np.tanh(x)

input_data = np.random.randn(1000,100) # 1000个数据
node_num = 100 #各隐藏层的节点（神经元）数
hidden_layer_size = 5 # 隐藏层有5层
activations = {} # 激活值的结果保存在这里

x = input_data

for i in range(hidden_layer_size):
    if i!=0:
        x = activations[i-1]

    # 设定权重初始值
    # w = np.random.randn(node_num,node_num) * 1
    # w = np.random.randn(node_num, node_num) * 0.01
    # w = np.random.randn(node_num, node_num) * np.sqrt(1.0 / node_num)
    w = np.random.randn(node_num, node_num) * np.sqrt(2.0 / node_num)
    
    a = np.dot(x,w)
    # 使用激活函数
    # z = sigmoid(a)
    z = ReLU(a)
    activations[i]=z 

# 绘图
for i,a in activations.items():
    plt.subplot(1,len(activations),i+1)
    plt.title(str(i+1)+"-layer")
    if i!=0:
        plt.yticks([],[])
    plt.hist(a.flatten(),30,range=(0,1))

plt.show()

