import numpy as np
import matplotlib.pyplot as plt

def sigmoid(x):
    return 1/(1+np.exp(-x))

def ReLU(x):
    return np.maximum(0,x)

def tanh(x):
    return np.tanh(x)

input_data = np.random.randn(1000,100)
node_num = 100
hidden_layer_size = 5
activations = {}

x = input_data
for i in range(hidden_layer_size):
    if i != 0:
        x = activations[i-1]
    # 改变初始值进行实验！
    # w = np.random.randn(node_num,node_num) * 1
    # w = np.random.randn(node_num,node_num) * 0.01
    w = np.random.randn(node_num,node_num) * np.sqrt(1.0/node_num)
    a = np.dot(x,w)

    # 将激活函数的种类也改变，来进行实验
    z = sigmoid(a)

    activations[i] = z 

# 绘制直方图
for i, a in activations.items():
    plt.subplot(1,len(activations), i+1)
    plt.title(str(i+1)+"-layer")
    if i!=0:
        plt.yticks([],[])
    plt.hist(a.flatten(), 30, range=(0,1))
plt.show()
