import numpy as np
import matplotlib.pyplot as plt

# 初始化设置
plt.rcParams['font.sans-serif'] = ['KaiTi']  # 中文字体
plt.rcParams['mathtext.fontset'] = 'stix'     # 数学字体
plt.rcParams['axes.unicode_minus'] = False

# 原始数据准备
np.random.seed(1)
X = 2 * np.random.rand(100, 1)
y = 4 + 3 * X + np.random.randn(100, 1)
X_b = np.c_[np.ones((100, 1)), X]

# 超参数设置
learning_rate = 0.1
n_iterations = 1000
lambda_reg = 0.1  # 正则化系数

# ==================== 原始梯度下降 ====================
theta_original = np.random.randn(2, 1)
loss_history_original = []

for iteration in range(n_iterations):
    gradients = (1 / len(X_b)) * X_b.T.dot(X_b.dot(theta_original) - y)
    theta_original = theta_original - learning_rate * gradients

    loss = np.mean((X_b.dot(theta_original) - y) ** 2)
    loss_history_original.append(loss)

# ==================== L2正则化梯度下降 ====================
theta_l2 = np.random.randn(2, 1)
loss_history_l2 = []

for iteration in range(n_iterations):
    gradients = (1 / len(X_b)) * X_b.T.dot(X_b.dot(theta_l2) - y) + lambda_reg * theta_l2
    theta_l2 = theta_l2 - learning_rate * gradients

    loss = np.mean((X_b.dot(theta_l2) - y) ** 2) + lambda_reg * np.sum(theta_l2 ** 2) / 2
    loss_history_l2.append(loss)

# ==================== L1正则化梯度下降 ====================
theta_l1 = np.random.randn(2, 1)
loss_history_l1 = []

for iteration in range(n_iterations):
    gradients = (1 / len(X_b)) * X_b.T.dot(X_b.dot(theta_l1) - y) + lambda_reg * np.sign(theta_l1)
    theta_l1 = theta_l1 - learning_rate * gradients

    loss = np.mean((X_b.dot(theta_l1) - y) ** 2) + lambda_reg * np.sum(np.abs(theta_l1))
    loss_history_l1.append(loss)

# ==================== 绘制损失曲线 ====================
plt.figure(figsize=(10, 6))
plt.plot(range(n_iterations), loss_history_original, label='Original', linewidth=2)
plt.plot(range(n_iterations), loss_history_l2, label='L2 Regularization', linewidth=2)
plt.plot(range(n_iterations), loss_history_l1, label='L1 Regularization', linewidth=2)
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.title('Loss Comparison with Different Regularizations')
plt.legend()
plt.grid(True)
plt.show()

# 打印最终参数
print("原始梯度下降参数:", theta_original.T)
print("L2正则化参数:", theta_l2.T)
print("L1正则化参数:", theta_l1.T)
