# sigmod函数
import numpy as np
import matplotlib.pyplot as plt
x = np.linspace(-10,10,50)
y_sigmod = 1 / (1 + np.exp(-x))
plt.plot(x,y_sigmod)
plt.title('sigmod')
plt.xlabel('x')
plt.ylabel('y')
plt.legend()
plt.show()
# --------------------------------------------------------------------------#
# relu函数
x = np.linspace(-10,10,50)
y_relu = np.maximum(0,x)
plt.plot(x,y_relu)
plt.title('relu')
plt.xlabel('x')
plt.ylabel('y')
plt.legend()
plt.show()
# --------------------------------------------------------------------------#
# softmax函数
import numpy as np
def softmax(vector):
    exp_vector = np.exp(vector)
    return exp_vector / np.sum(exp_vector)
# 示例向量
vector = np.array([1.0, 2.0, 3.0, 4.0, 1.0])
# 计算Softmax
softmax_result = softmax(vector)
# 输出Softmax结果
print("Softmax结果:", softmax_result)
# 验证结果是否相加为1
sum_of_softmax = np.sum(softmax_result)
print("Softmax结果相加为:", sum_of_softmax)
# --------------------------------------------------------------------------#
# 双曲正切函数
x = np.linspace(-10, 10, 50)
y = np.exp(x) - np.exp(-x)
z = np.exp(x) + np.exp(-x)
tanh = y / z
plt.plot(x,tanh)
plt.title('tanh')
plt.xlabel('x')
plt.ylabel('y')
plt.legend()
plt.show()