import numpy as np
import matplotlib.pyplot as plt
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import RBF, ConstantKernel as C

# 1. 生成虚拟数据
np.random.seed(42)
X = np.sort(5 * np.random.rand(30, 1), axis=0)  # 输入数据
y = np.sin(X).ravel() + np.random.normal(0, 0.2, X.shape[0])  # 真实值 + 噪声

# 测试点
X_test = np.linspace(0, 5, 1000).reshape(-1, 1)

# 2. 配置高斯过程回归模型
# 使用常数核和RBF核的乘积
kernel = C(1.0, (1e-4, 1e1)) * RBF(1.0, (1e-4, 1e1))
gp = GaussianProcessRegressor(kernel=kernel, n_restarts_optimizer=10, alpha=0.1)

# 3. 拟合数据
gp.fit(X, y)

# 4. 预测
y_pred, sigma = gp.predict(X_test, return_std=True)

# 5. 计算残差
y_train_pred, _ = gp.predict(X, return_std=True)
residuals = y - y_train_pred

# 6. 绘图
plt.figure(figsize=(12, 8))

# 第一张图：原始数据与预测插值曲线
plt.subplot(2, 1, 1)
plt.plot(X_test, y_pred, 'b-', label='Prediction (mean)', lw=2)  # 预测均值曲线
plt.fill_between(X_test.ravel(), y_pred - 2*sigma, y_pred + 2*sigma, color='lightgray', alpha=0.5, label='Confidence interval (±2σ)')  # 置信区间
plt.scatter(X, y, c='r', s=50, zorder=10, label='Observed data')  # 真实数据点
plt.title('Gaussian Process Regression with Confidence Interval')
plt.xlabel('Input')
plt.ylabel('Output')
plt.legend(loc='upper left')

# 第二张图：残差分析
plt.subplot(2, 1, 2)
plt.scatter(X, residuals, c='purple', s=50, zorder=10)
plt.axhline(y=0, color='k', linestyle='--', lw=2)
plt.title('Residuals Analysis')
plt.xlabel('Input')
plt.ylabel('Residuals (True - Predicted)')

plt.tight_layout()
plt.show()