
"""
y = b + a*x
房价线性回归系数:
theta[0]: 22.53280632411073
theta[1]: 4.493445879544479

误差:
均方差 (x_mean): 6.2846
标准差 (s_std): 0.4937
损失值： 21.800275885584778

"""


import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

# 读取CSV文件
file_path = r'D:\py_flask_2024_7_16\2024_8_9训练\机器学习\boston.csv'
data = pd.read_csv(file_path)

# 提取 'MEDV' 和 'RM' 列
y = data['MEDV']
x = data['RM']

# 求均值
def mean(x):
    total =0
    len = 0
    for xi in x:
        total += xi
        len += 1
    return total/len


# 求标准差
def std(x):
    x_mean = mean(x)
    x_std = 0
    for xi in x:
        x_std += (xi - x_mean) ** 2
    x_std /= (len(x) -1)
    return x_std

x_mean = mean(x)
x_std = std(x)
x_normalized = (x - x_mean) / x_std

# 确保 x_normalized 是二维数组
x_normalized = x_normalized.values.reshape(-1, 1)

# 构建多项式特征矩阵
def poly_features(X, degree):
    A = np.zeros((X.shape[0], degree + 1))
    for i in range(degree + 1):
        A[:, i] = np.power(X.ravel(), i)  # 使用 ravel() 将 X 转换为一维数组   A 的形式形如[x^0, x^1, x^2...]
    return A

# 损失函数（均方误差） J(theta) = 1/2[H(theta) - y(i)]^2
def cost_function(A, y, theta):
    m = len(y)
    predictions = A.dot(theta)
    errors = np.subtract(predictions, y)
    squared_errors = np.square(errors)
    J = 1 / (2 * m) * np.sum(squared_errors)
    return J

# 梯度下降  alpha 学习率， iters 迭代次数
def gradient_descent(A, y, theta, alpha, iters):
    m = len(y)
    J_theta = np.zeros(iters)
    for i in range(iters):
        H_theta = A.dot(theta)                     # 预测值
        errors = np.subtract(H_theta, y)           # 预测值与真实值之间的误差
        sum_delta = (alpha / m) * A.T.dot(errors)  #  A.transpose() A的转置   sum_delta为J对theta[i]的偏导之和
        J_theta[i] = cost_function(A, y, theta)
        theta = theta - sum_delta
    return theta, J_theta


# 多项式度数
degree = 1
A = poly_features(x_normalized, degree)
initial_theta = np.zeros(degree + 1)
alpha = 0.1  # 学习率
num_iters = 20000  # 迭代次数

# 运行梯度下降
theta, J = gradient_descent(A, y.values, initial_theta, alpha, num_iters)

# 计算拟合值
y_fit = A.dot(theta)

# 计算误差
x_mean = mean(x)
x_std = std(x)
x_normalized = (x - x_mean) / x_std

# 打印多项式系数
print("房价线性回归系数:")
for i, coef in enumerate(theta):
    print(f"theta[{i}]: {coef}")

# 打印误差统计
print("\n误差:")
print(f"均方差 (x_mean): {x_mean:.4f}")
print(f"标准差 (s_std): {x_std:.4f}")
print("损失值：", J[-1])
# 创建一个图形窗口
plt.figure(figsize=(10, 6))

# 第一个子图：损失函数值
plt.subplot(1, 2, 1)
plt.plot(J)
plt.xlabel('Number of iterations')
plt.ylabel('Cost J')
plt.title('Cost over iterations')

# 第二个子图：原始数据和拟合曲线
plt.subplot(1, 2, 2)
plt.scatter(x, y, label='Original Data',s=6)
plt.plot(x, y_fit, label='Fitted Polynomial', color='red')
plt.title('House Prices and their Polynomial Fit')
plt.xlabel('House Size (RM)')
plt.ylabel('Median Value of Homes (MEDV)')
plt.legend()
plt.grid(True)

# 显示图形
plt.show()