import matplotlib.pyplot as plt
import numpy as np
import torch
from torch.nn import functional as F


# 定义激活函数
def sigmoid(x):
    return 1 / (1 + np.exp(-x))


def tanh(x):
    return np.tanh(x)


def relu(x):
    return np.maximum(0, x)


def leaky_relu(x, alpha=0.01):
    return np.where(x > 0, x, alpha * x)


def elu(x, alpha=1.0):
    return np.where(x > 0, x, alpha * (np.exp(x) - 1))


def swish(x):
    return x * sigmoid(x)


def softmax(x):
    exp_x = np.exp(x - np.max(x))  # 防止数值溢出
    return exp_x / np.sum(exp_x)


# 示例输入
x = np.array([2.0, 1.0, 0.1])

# 计算 Softmax 输出
probabilities = softmax(x)
labels = ['Class 1', 'Class 2', 'Class 3']

# 定义输入范围
x = np.linspace(-10, 10, 100)

# 绘制图像
plt.figure(figsize=(16, 8))

# Sigmoid
plt.subplot(2, 4, 1)
plt.plot(x, sigmoid(x), label="Sigmoid", color="blue")
plt.title("Sigmoid")
plt.grid(True)

# Tanh
plt.subplot(2, 4, 2)
plt.plot(x, tanh(x), label="Tanh", color="orange")
plt.title("Tanh")
plt.grid(True)

# ReLU
plt.subplot(2, 4, 3)
plt.plot(x, relu(x), label="ReLU", color="green")
plt.title("ReLU")
plt.grid(True)

# Leaky ReLU
plt.subplot(2, 4, 4)
plt.plot(x, leaky_relu(x), label="Leaky ReLU", color="red")
plt.title("Leaky ReLU")
plt.grid(True)

# ELU
plt.subplot(2, 4, 5)
plt.plot(x, elu(x), label="ELU", color="purple")
plt.title("ELU")
plt.grid(True)

# Swish
plt.subplot(2, 4, 6)
plt.plot(x, swish(x), label="Swish", color="brown")
plt.title("Swish")
plt.grid(True)

plt.subplot(2, 4, 7)
# 绘制概率分布图
plt.plot(x, F.softmax(torch.from_numpy(x), dim=-1), label="Softmax", color="black")
plt.title("Softmax")
plt.grid(True)

plt.subplot(2, 4, 8)
# 绘制概率分布图
# plt.figure(figsize=(6, 4))
plt.bar(labels, probabilities, color=['blue', 'orange', 'green'])
plt.title('Softmax Output: Probability Distribution')
plt.ylabel('Probability')
plt.ylim(0, 1)

# 保存图像到文件（可选）
plt.savefig('softmax_output.png')

# 调整布局并显示
# plt.tight_layout()
# 显示图像
plt.show()
