"""
用五次多项式拟合sin(x)
sin(x) = a0 + a1*X + a2*X^2 + a3*X^3 +a4*X^4 + a5*X^5

五次多项式系数如下:
theta[0]: -7.561397254857108e-16
theta[1]: -3.2977488713862306
theta[2]: 4.019592872526443e-15
theta[3]: 5.774812294913357
theta[4]: -2.814744436510316e-15
theta[5]: -2.333922096981485

误差:
均方误差: 0.0000

"""
import math
import numpy as np
import matplotlib.pyplot as plt

# 生成数据点
x = np.linspace(0, 2 * np.pi, 200)
y = np.sin(x)

# 求均值
def mean(x):
    total =0
    len = 0
    for xi in x:
        total += xi
        len += 1
    return total/len


# 求标准差
def std(x):
    x_mean = mean(x)
    x_std = 0
    for xi in x:
        x_std += (xi - x_mean) ** 2
    x_std /= (len(x) -1)
    return x_std

x_mean = mean(x)
x_std = std(x)
x_normalized = (x - x_mean) / x_std


# 构建多项式特征矩阵
def poly_features(X, degree):
    A = np.zeros((len(X), degree + 1))       # 创建 m 行 n 列的数组
    for i in range(degree + 1):
        A[:, i] = np.power(X, i)             # A 的形式形如[x^0, x^1, x^2...]
    return A


# 损失函数（均方误差）  J(theta) = 1/2[H(theta) - y(i)]^2
def cost_function(A, y, theta):
    pre = A.dot(theta)
    errors = np.subtract(pre, y)             # 预测值和真实值之间的误差
    Errorssqr = np.square(errors)            # 误差平方和
    J = 1 / 2 * np.sum(Errorssqr)            # 误差平方和的二分一
    return J

# 梯度下降  alpha 学习率， iters 迭代次数
def gradient_descent(A, y, theta, alpha, iters):
    m = len(y)
    J= np.zeros(iters)

    for i in range(iters):
        H_theta = A.dot(theta)                # 预测值
        errors = np.subtract(H_theta, y)      # 预测值与真实值之间的误差
        sum_delta = (alpha / m) * A.transpose().dot(errors)  #  A.transpose() A的转置   sum_delta为J对theta的偏导之和
        theta = theta - sum_delta             # 更新theta
        J[i] = cost_function(A, y, theta)

    return theta, J

# 多项式度数
degree = 5
A = poly_features(x_normalized, degree)
#initial_theta = np.ones(degree + 1)
initial_theta = np.zeros(degree + 1)
alpha = 0.1  # 调整学习率
num_iters = 200000  # 调整迭代次数  迭代次数达到200000是，误差为0， 迭代次数过高，运行时间较就长。

# 运行梯度下降
theta, J = gradient_descent(A, y, initial_theta, alpha, num_iters)

# 计算拟合值  A的转置为【x^0,x^2,x^3,x^4,x^5], dot实现矩阵的乘法
y_fit = A.dot(theta)

# 计算误差
errors = y - y_fit
mean = np.mean(errors**2)
#std = math.sqrt(std)

# 打印多项式系数
print("五次多项式系数如下:")
for i, coef in enumerate(theta):
    print(f"theta[{i}]: {coef}")

# 打印误差统计
print("\n误差:")
print(f"均方误差: {mean:.4f}")
#print(f"Root Mean Squared Error (RMSE): {std:.4f}")

# 创建一个图形窗口
plt.figure(figsize=(10, 6))

# 第一个子图：损失函数值
plt.subplot(1, 2, 1)  # (rows, columns, panel number)
plt.plot(J)
plt.xlabel('Number of iterations')
plt.ylabel('Cost J')
plt.title('Cost over iterations')

# 第二个子图：原始数据和拟合曲线
plt.subplot(1, 2, 2)
plt.plot(x, y, 'b-', label='Original sin(x)')
plt.plot(x, y_fit, 'r--', label='Fitted Polynomial')
plt.title('Sin(x) and its Polynomial Fit')
plt.xlabel('x')
plt.ylabel('sin(x) and Polynomial Fit')
plt.legend()
plt.grid(True)

# 显示图形
plt.show()

