# 模拟数据回归
# MLPRegressor用法网址：https://scikit-learn.org/stable/modules/generated/sklearn.neural_network.MLPRegressor.html#sklearn.neural_network.MLPRegressor
import numpy as np
import matplotlib.pyplot as plt
from sklearn.neural_network import MLPRegressor
# 生成模拟数据
# 导入必要的库
import numpy as np
import matplotlib.pyplot as plt
from sklearn.neural_network import MLPRegressor

# 生成模拟数据
X = np.linspace(-3.14, 3.14, 400)
y = np.sin(X) + 0.3 * np.random.rand(len(X))

# Step 1: 画出由X和y生成的散点图
plt.scatter(X, y, color='blue', label='Original Data')
plt.title('Scatter Plot of Original Data')
plt.xlabel('X')
plt.ylabel('y')
plt.legend()
plt.show()

# Step 2: 利用BP神经网络训练模型并给出回归结果，尝试不同激活函数
# 定义模型，隐藏层有两个节点，激活函数为relu
model_relu = MLPRegressor(hidden_layer_sizes=(2,), activation='relu', solver='adam', max_iter=1000)
model_relu.fit(X, y)
y_pred_relu = model_relu.predict(X)

# 定义模型，隐藏层有两个节点，激活函数为tanh
model_tanh = MLPRegressor(hidden_layer_sizes=(2,), activation='tanh', solver='adam', max_iter=1000)
model_tanh.fit(X, y)
y_pred_tanh = model_tanh.predict(X)

# 定义模型，隐藏层有两个节点，激活函数为sigmoid
model_sigmoid = MLPRegressor(hidden_layer_sizes=(2,), activation='sigmoid', solver='adam', max_iter=1000)
model_sigmoid.fit(X, y)
y_pred_sigmoid = model_sigmoid.predict(X)

# Step 3: 画出预测曲线，与step1生成的散点图对比
plt.figure()
plt.scatter(X, y, color='blue', label='Original Data')
plt.plot(X, y_pred_relu, color='red', label='ReLU Regression')
plt.plot(X, y_pred_tanh, color='green', label='Tanh Regression')
plt.plot(X, y_pred_sigmoid, color='magenta', label='Sigmoid Regression')
plt.title('Regression Curves')
plt.xlabel('X')
plt.ylabel('y')
plt.legend()
plt.show()
