import numpy as np
from sklearn.preprocessing import StandardScaler
import matplotlib.pyplot as plt

# 生成数据
np.random.seed(42)
X1 = 5 * np.random.rand(100, 1)
X2 = 1000 * np.random.rand(100, 1)
y = 5 + 4 * X1 + 3 * X2 + np.random.rand(100, 1)
X1_X2 = np.c_[X1, X2]
X1_X2_c = np.c_[np.ones((100, 1)), X1, X2]

# 超参数
m = 1000  # 迭代次数
batch_size = 10
number = 100 // batch_size

# 未标准化梯度下降
theta = np.random.rand(3, 1)
lr = 0.0000000001  # 调整学习率

loss_history1 = []
for _ in range(m):
    for i in range(number):
        arr = np.arange(100)
        np.random.shuffle(arr)
        X_batch = X1_X2_c[arr][i * batch_size:(i + 1) * batch_size]
        y_batch = y[arr][i * batch_size:(i + 1) * batch_size]
        gradients = X_batch.T.dot(X_batch.dot(theta) - y_batch) / batch_size
        theta -= lr * gradients
    loss1 = np.mean((X1_X2_c.dot(theta) - y) ** 2)
    loss_history1.append(loss1)
print("未标准化参数:", theta)

# 数据标准化
scaler = StandardScaler()
X1_X2_scaled = scaler.fit_transform(X1_X2)
X1_X2_scaled_c = np.c_[np.ones((100, 1)), X1_X2_scaled]

# 标准化梯度下降
theta_scaled = np.random.rand(3, 1)
lr2 = 0.01  # 调整学习率

loss_history2 = []
for _ in range(m):
    for i in range(number):
        arr = np.arange(100)
        np.random.shuffle(arr)
        X_batch = X1_X2_scaled_c[arr][i * batch_size:(i + 1) * batch_size]
        y_batch = y[arr][i * batch_size:(i + 1) * batch_size]
        gradients = X_batch.T.dot(X_batch.dot(theta_scaled) - y_batch) / batch_size
        theta_scaled -= lr2 * gradients
    theta_scaled_original_h = np.zeros_like(theta_scaled)
    theta_scaled_original_h[0] = theta_scaled[0] - np.sum(
        theta_scaled[1:] * scaler.mean_.reshape(-1, 1) / scaler.scale_.reshape(-1, 1))
    theta_scaled_original_h[1:] = theta_scaled[1:] / scaler.scale_.reshape(-1, 1)
    loss2 = np.mean((X1_X2_scaled_c.dot(theta_scaled_original_h) - y) ** 2)
    loss_history2.append(loss2)
print("标准化参数:", theta_scaled)

# 参数逆变换
theta_scaled_original = np.zeros_like(theta_scaled)
theta_scaled_original[0] = theta_scaled[0] - np.sum(
    theta_scaled[1:] * scaler.mean_.reshape(-1, 1) / scaler.scale_.reshape(-1, 1))
theta_scaled_original[1:] = theta_scaled[1:] / scaler.scale_.reshape(-1, 1)

print("=" * 60)
print("标准化参数转换原始尺度:")
print("截距:", theta_scaled_original[0][0])
print("X1系数:", theta_scaled_original[1][0])
print("X2系数:", theta_scaled_original[2][0])

plt.plot(loss_history1, 'b-')
plt.plot(loss_history2, 'r-')
plt.xlabel("Epoch")
plt.ylabel("Loss")
plt.title("Training Loss")
plt.show()
