# !/usr/bin/env python
# -*- coding: utf-8 -*-
# @File  : 波士顿房价预测.py
# @Author: dongguangwen
# @Date  : 2025-02-05 21:00
#  0.导包
# from sklearn.datasets import load_boston  # 1.2.0版本后不再使用该方法
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.linear_model import LinearRegression, SGDRegressor
from sklearn.metrics import mean_squared_error

#  1.加载数据
# boston = load_boston()
# print(boston)

import numpy as np
import pandas as pd

data_url = "http://lib.stat.cmu.edu/datasets/boston"
raw_df = pd.read_csv(data_url, sep="\s+", skiprows=22, header=None)
data = np.hstack([raw_df.values[::2, :], raw_df.values[1::2, :2]])
target = raw_df.values[1::2, 2]
# print(target)

#  2.数据集划分
# x_train,x_test,y_train,y_test =train_test_split(boston.data,boston.target,test_size=0.2,random_state=22)
x_train, x_test, y_train, y_test = train_test_split(data, target, test_size=0.2, random_state=22)

# 3.标准化
process = StandardScaler()
x_train = process.fit_transform(x_train)
x_test = process.transform(x_test)

# 4.模型训练
# 4.1 实例化(正规方程)
# model = LinearRegression(fit_intercept=True)
# 4.1 实例化(随机梯度下降)
# model = SGDRegressor()
model = SGDRegressor(learning_rate='constant', eta0=0.01, max_iter=1000)

# 4.2 fit
model.fit(x_train, y_train)

print("模型的权重系数为:\n", model.coef_)
print("模型的偏置为:\n", model.intercept_)

# 5.预测
y_pred = model.predict(x_test)
print(y_pred)

# 6.模型评估
print(mean_squared_error(y_test, y_pred))

"""
# 正规方程
模型的权重系数为:
 [-0.73088157  1.13214851 -0.14177415  0.86273811 -2.02555721  2.72118285
 -0.1604136  -3.36678479  2.5618082  -1.68047903 -1.67613468  0.91214657
 -3.79458347]
模型的偏置为:
 22.57970297029704
[27.99617259 31.37458822 21.16274236 32.97684211 19.85350998 19.20417092
 21.07981458 19.43179838 19.55995513 32.46251423 20.95643087 27.76018784
 15.39262695 19.6442985  36.52126959 18.64394966  8.93258608 18.30021084
 30.44252024 24.30768161 19.25674385 33.77398082 29.85656182 17.6870715
 34.76784961 26.40009819 34.60438567 27.32689821 19.24589917 14.83463871
 30.57090061 15.46969575 37.09516033  7.00967398 16.15634365 17.44040193
  7.35639245 20.22713119 40.76195302 28.77723574 25.15616291 17.83858135
 39.41405113  7.01259096 22.06502935 25.1857296  20.23571567 20.29713521
 17.46268122 26.21457012  8.54093689 27.39499407 30.85069122 16.66356032
  9.32950573 35.17417878 32.19041763 21.85580143 17.42877945 21.9784194
 23.49485953 23.98615371 19.99362404 38.29415052 24.6854627  20.00405044
 13.97439271  6.75563393 42.12286805 21.90181891 16.96618423 22.41014979
 40.43814159 21.39704278 36.54107982 27.02184938 20.86435197 20.20989601
 25.23282952 22.09034092 31.02297114 20.28759449 24.25986656 31.37575515
 26.70629065 21.09919693 28.92018399 21.82055488 26.31821902 19.69611634
 25.35744988 24.45514806 20.07329789 15.73996971 15.33732169 18.6504852
 24.6741861  16.51788689 21.16432291 26.46888071 21.00213732 18.01814407]
20.77068478427006
"""

"""
# 随机梯度下降算法：
模型的权重系数为:
 [-0.5172976   0.95137208 -0.23323919  1.03209507 -2.13020256  2.44111009
 -0.2598023  -3.23907988  2.40472804 -1.59270879 -1.38577677  0.55456117
 -3.59007365]
模型的偏置为:
 [22.67154009]
[27.61149265 30.70383947 21.36379851 33.1487773  20.08861859 19.16563066
 21.2812805  19.87769701 19.97793062 31.20093147 21.05837443 27.46671746
 15.71790095 19.57906499 34.851142   19.39344698  8.86663057 18.73731462
 29.90300481 24.7097755  19.15231478 32.15945651 29.93724293 18.35274251
 33.87656227 26.21090753 33.83147649 27.15851117 19.38405904 15.61362006
 29.25598122 15.65872051 35.7204628  10.5560257  16.66190729 17.87252202
  8.84497037 20.34660702 40.29127548 28.561383   25.19354986 18.27151786
 38.74109213  8.11549644 22.23136278 25.12520746 20.52682587 20.18389189
 18.72934886 25.92915712  9.27542081 27.35987476 31.17183113 17.17450597
 11.21655541 33.53404799 31.65573358 22.12925904 17.6623487  22.2528026
 23.68092537 24.11990656 20.41126628 36.94831685 24.82210702 19.85520565
 15.10240605  8.08185614 40.89112794 22.44484618 18.81454746 22.69348308
 38.2793352  21.85699368 34.76814465 26.83896547 21.06698386 21.57964546
 25.44115578 21.91667507 30.50943302 20.70526758 25.21397059 30.40184677
 26.23370375 21.13042051 28.42313097 22.13599843 26.25009537 19.62197541
 25.04362408 25.1559717  19.93979049 17.77898349 16.69223756 19.05929633
 24.7070948  17.73182502 21.05240369 26.21523395 20.85695349 18.00639319]
23.518499247341182
"""
