{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "67cc8cdb",
   "metadata": {},
   "outputs": [],
   "source": [
    "# datasets.load_boston这里报错:load_boston` has been removed from scikit-learn since version 1.2.\n",
    "# 1.2版本以上已经被移除掉了，所以这里为了做题，先卸掉1.2版本，装一个低版本的"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "94b3fcc1",
   "metadata": {},
   "outputs": [],
   "source": [
    "# pip uninstall scikit-learn"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "fc55769b",
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn import datasets    # 导入一份数据集，库中本来就有的案例数据\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import warnings\n",
    "warnings.filterwarnings('ignore')"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "f94521b0",
   "metadata": {},
   "source": [
    "1.准备数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "b900c256",
   "metadata": {},
   "outputs": [],
   "source": [
    "boston = datasets.load_boston() # 导入波士顿房价数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "a53bcef7",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'data': array([[6.3200e-03, 1.8000e+01, 2.3100e+00, ..., 1.5300e+01, 3.9690e+02,\n",
       "         4.9800e+00],\n",
       "        [2.7310e-02, 0.0000e+00, 7.0700e+00, ..., 1.7800e+01, 3.9690e+02,\n",
       "         9.1400e+00],\n",
       "        [2.7290e-02, 0.0000e+00, 7.0700e+00, ..., 1.7800e+01, 3.9283e+02,\n",
       "         4.0300e+00],\n",
       "        ...,\n",
       "        [6.0760e-02, 0.0000e+00, 1.1930e+01, ..., 2.1000e+01, 3.9690e+02,\n",
       "         5.6400e+00],\n",
       "        [1.0959e-01, 0.0000e+00, 1.1930e+01, ..., 2.1000e+01, 3.9345e+02,\n",
       "         6.4800e+00],\n",
       "        [4.7410e-02, 0.0000e+00, 1.1930e+01, ..., 2.1000e+01, 3.9690e+02,\n",
       "         7.8800e+00]]),\n",
       " 'target': array([24. , 21.6, 34.7, 33.4, 36.2, 28.7, 22.9, 27.1, 16.5, 18.9, 15. ,\n",
       "        18.9, 21.7, 20.4, 18.2, 19.9, 23.1, 17.5, 20.2, 18.2, 13.6, 19.6,\n",
       "        15.2, 14.5, 15.6, 13.9, 16.6, 14.8, 18.4, 21. , 12.7, 14.5, 13.2,\n",
       "        13.1, 13.5, 18.9, 20. , 21. , 24.7, 30.8, 34.9, 26.6, 25.3, 24.7,\n",
       "        21.2, 19.3, 20. , 16.6, 14.4, 19.4, 19.7, 20.5, 25. , 23.4, 18.9,\n",
       "        35.4, 24.7, 31.6, 23.3, 19.6, 18.7, 16. , 22.2, 25. , 33. , 23.5,\n",
       "        19.4, 22. , 17.4, 20.9, 24.2, 21.7, 22.8, 23.4, 24.1, 21.4, 20. ,\n",
       "        20.8, 21.2, 20.3, 28. , 23.9, 24.8, 22.9, 23.9, 26.6, 22.5, 22.2,\n",
       "        23.6, 28.7, 22.6, 22. , 22.9, 25. , 20.6, 28.4, 21.4, 38.7, 43.8,\n",
       "        33.2, 27.5, 26.5, 18.6, 19.3, 20.1, 19.5, 19.5, 20.4, 19.8, 19.4,\n",
       "        21.7, 22.8, 18.8, 18.7, 18.5, 18.3, 21.2, 19.2, 20.4, 19.3, 22. ,\n",
       "        20.3, 20.5, 17.3, 18.8, 21.4, 15.7, 16.2, 18. , 14.3, 19.2, 19.6,\n",
       "        23. , 18.4, 15.6, 18.1, 17.4, 17.1, 13.3, 17.8, 14. , 14.4, 13.4,\n",
       "        15.6, 11.8, 13.8, 15.6, 14.6, 17.8, 15.4, 21.5, 19.6, 15.3, 19.4,\n",
       "        17. , 15.6, 13.1, 41.3, 24.3, 23.3, 27. , 50. , 50. , 50. , 22.7,\n",
       "        25. , 50. , 23.8, 23.8, 22.3, 17.4, 19.1, 23.1, 23.6, 22.6, 29.4,\n",
       "        23.2, 24.6, 29.9, 37.2, 39.8, 36.2, 37.9, 32.5, 26.4, 29.6, 50. ,\n",
       "        32. , 29.8, 34.9, 37. , 30.5, 36.4, 31.1, 29.1, 50. , 33.3, 30.3,\n",
       "        34.6, 34.9, 32.9, 24.1, 42.3, 48.5, 50. , 22.6, 24.4, 22.5, 24.4,\n",
       "        20. , 21.7, 19.3, 22.4, 28.1, 23.7, 25. , 23.3, 28.7, 21.5, 23. ,\n",
       "        26.7, 21.7, 27.5, 30.1, 44.8, 50. , 37.6, 31.6, 46.7, 31.5, 24.3,\n",
       "        31.7, 41.7, 48.3, 29. , 24. , 25.1, 31.5, 23.7, 23.3, 22. , 20.1,\n",
       "        22.2, 23.7, 17.6, 18.5, 24.3, 20.5, 24.5, 26.2, 24.4, 24.8, 29.6,\n",
       "        42.8, 21.9, 20.9, 44. , 50. , 36. , 30.1, 33.8, 43.1, 48.8, 31. ,\n",
       "        36.5, 22.8, 30.7, 50. , 43.5, 20.7, 21.1, 25.2, 24.4, 35.2, 32.4,\n",
       "        32. , 33.2, 33.1, 29.1, 35.1, 45.4, 35.4, 46. , 50. , 32.2, 22. ,\n",
       "        20.1, 23.2, 22.3, 24.8, 28.5, 37.3, 27.9, 23.9, 21.7, 28.6, 27.1,\n",
       "        20.3, 22.5, 29. , 24.8, 22. , 26.4, 33.1, 36.1, 28.4, 33.4, 28.2,\n",
       "        22.8, 20.3, 16.1, 22.1, 19.4, 21.6, 23.8, 16.2, 17.8, 19.8, 23.1,\n",
       "        21. , 23.8, 23.1, 20.4, 18.5, 25. , 24.6, 23. , 22.2, 19.3, 22.6,\n",
       "        19.8, 17.1, 19.4, 22.2, 20.7, 21.1, 19.5, 18.5, 20.6, 19. , 18.7,\n",
       "        32.7, 16.5, 23.9, 31.2, 17.5, 17.2, 23.1, 24.5, 26.6, 22.9, 24.1,\n",
       "        18.6, 30.1, 18.2, 20.6, 17.8, 21.7, 22.7, 22.6, 25. , 19.9, 20.8,\n",
       "        16.8, 21.9, 27.5, 21.9, 23.1, 50. , 50. , 50. , 50. , 50. , 13.8,\n",
       "        13.8, 15. , 13.9, 13.3, 13.1, 10.2, 10.4, 10.9, 11.3, 12.3,  8.8,\n",
       "         7.2, 10.5,  7.4, 10.2, 11.5, 15.1, 23.2,  9.7, 13.8, 12.7, 13.1,\n",
       "        12.5,  8.5,  5. ,  6.3,  5.6,  7.2, 12.1,  8.3,  8.5,  5. , 11.9,\n",
       "        27.9, 17.2, 27.5, 15. , 17.2, 17.9, 16.3,  7. ,  7.2,  7.5, 10.4,\n",
       "         8.8,  8.4, 16.7, 14.2, 20.8, 13.4, 11.7,  8.3, 10.2, 10.9, 11. ,\n",
       "         9.5, 14.5, 14.1, 16.1, 14.3, 11.7, 13.4,  9.6,  8.7,  8.4, 12.8,\n",
       "        10.5, 17.1, 18.4, 15.4, 10.8, 11.8, 14.9, 12.6, 14.1, 13. , 13.4,\n",
       "        15.2, 16.1, 17.8, 14.9, 14.1, 12.7, 13.5, 14.9, 20. , 16.4, 17.7,\n",
       "        19.5, 20.2, 21.4, 19.9, 19. , 19.1, 19.1, 20.1, 19.9, 19.6, 23.2,\n",
       "        29.8, 13.8, 13.3, 16.7, 12. , 14.6, 21.4, 23. , 23.7, 25. , 21.8,\n",
       "        20.6, 21.2, 19.1, 20.6, 15.2,  7. ,  8.1, 13.6, 20.1, 21.8, 24.5,\n",
       "        23.1, 19.7, 18.3, 21.2, 17.5, 16.8, 22.4, 20.6, 23.9, 22. , 11.9]),\n",
       " 'feature_names': array(['CRIM', 'ZN', 'INDUS', 'CHAS', 'NOX', 'RM', 'AGE', 'DIS', 'RAD',\n",
       "        'TAX', 'PTRATIO', 'B', 'LSTAT'], dtype='<U7'),\n",
       " 'DESCR': \".. _boston_dataset:\\n\\nBoston house prices dataset\\n---------------------------\\n\\n**Data Set Characteristics:**  \\n\\n    :Number of Instances: 506 \\n\\n    :Number of Attributes: 13 numeric/categorical predictive. Median Value (attribute 14) is usually the target.\\n\\n    :Attribute Information (in order):\\n        - CRIM     per capita crime rate by town\\n        - ZN       proportion of residential land zoned for lots over 25,000 sq.ft.\\n        - INDUS    proportion of non-retail business acres per town\\n        - CHAS     Charles River dummy variable (= 1 if tract bounds river; 0 otherwise)\\n        - NOX      nitric oxides concentration (parts per 10 million)\\n        - RM       average number of rooms per dwelling\\n        - AGE      proportion of owner-occupied units built prior to 1940\\n        - DIS      weighted distances to five Boston employment centres\\n        - RAD      index of accessibility to radial highways\\n        - TAX      full-value property-tax rate per $10,000\\n        - PTRATIO  pupil-teacher ratio by town\\n        - B        1000(Bk - 0.63)^2 where Bk is the proportion of black people by town\\n        - LSTAT    % lower status of the population\\n        - MEDV     Median value of owner-occupied homes in $1000's\\n\\n    :Missing Attribute Values: None\\n\\n    :Creator: Harrison, D. and Rubinfeld, D.L.\\n\\nThis is a copy of UCI ML housing dataset.\\nhttps://archive.ics.uci.edu/ml/machine-learning-databases/housing/\\n\\n\\nThis dataset was taken from the StatLib library which is maintained at Carnegie Mellon University.\\n\\nThe Boston house-price data of Harrison, D. and Rubinfeld, D.L. 'Hedonic\\nprices and the demand for clean air', J. Environ. Economics & Management,\\nvol.5, 81-102, 1978.   Used in Belsley, Kuh & Welsch, 'Regression diagnostics\\n...', Wiley, 1980.   N.B. Various transformations are used in the table on\\npages 244-261 of the latter.\\n\\nThe Boston house-price data has been used in many machine learning papers that address regression\\nproblems.   \\n     \\n.. topic:: References\\n\\n   - Belsley, Kuh & Welsch, 'Regression diagnostics: Identifying Influential Data and Sources of Collinearity', Wiley, 1980. 244-261.\\n   - Quinlan,R. (1993). Combining Instance-Based and Model-Based Learning. In Proceedings on the Tenth International Conference of Machine Learning, 236-243, University of Massachusetts, Amherst. Morgan Kaufmann.\\n\",\n",
       " 'filename': 'boston_house_prices.csv',\n",
       " 'data_module': 'sklearn.datasets.data'}"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "type(boston)  #sklearn.utils._bunch.Bunch相当于一个字典类型的数据\n",
    "boston  #这里已经把其他列和目标列给区分开来了，对应的data就是其他列，target就是目标列    feature_names是每列的名字"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "996d3e3d",
   "metadata": {},
   "outputs": [],
   "source": [
    "X = boston.data  #非目标列\n",
    "Y = boston.target #目标列"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "6e842fe5",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>CRIM</th>\n",
       "      <th>ZN</th>\n",
       "      <th>INDUS</th>\n",
       "      <th>CHAS</th>\n",
       "      <th>NOX</th>\n",
       "      <th>RM</th>\n",
       "      <th>AGE</th>\n",
       "      <th>DIS</th>\n",
       "      <th>RAD</th>\n",
       "      <th>TAX</th>\n",
       "      <th>PTRATIO</th>\n",
       "      <th>B</th>\n",
       "      <th>LSTAT</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.00632</td>\n",
       "      <td>18.0</td>\n",
       "      <td>2.31</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.538</td>\n",
       "      <td>6.575</td>\n",
       "      <td>65.2</td>\n",
       "      <td>4.0900</td>\n",
       "      <td>1.0</td>\n",
       "      <td>296.0</td>\n",
       "      <td>15.3</td>\n",
       "      <td>396.90</td>\n",
       "      <td>4.98</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.02731</td>\n",
       "      <td>0.0</td>\n",
       "      <td>7.07</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.469</td>\n",
       "      <td>6.421</td>\n",
       "      <td>78.9</td>\n",
       "      <td>4.9671</td>\n",
       "      <td>2.0</td>\n",
       "      <td>242.0</td>\n",
       "      <td>17.8</td>\n",
       "      <td>396.90</td>\n",
       "      <td>9.14</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.02729</td>\n",
       "      <td>0.0</td>\n",
       "      <td>7.07</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.469</td>\n",
       "      <td>7.185</td>\n",
       "      <td>61.1</td>\n",
       "      <td>4.9671</td>\n",
       "      <td>2.0</td>\n",
       "      <td>242.0</td>\n",
       "      <td>17.8</td>\n",
       "      <td>392.83</td>\n",
       "      <td>4.03</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.03237</td>\n",
       "      <td>0.0</td>\n",
       "      <td>2.18</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.458</td>\n",
       "      <td>6.998</td>\n",
       "      <td>45.8</td>\n",
       "      <td>6.0622</td>\n",
       "      <td>3.0</td>\n",
       "      <td>222.0</td>\n",
       "      <td>18.7</td>\n",
       "      <td>394.63</td>\n",
       "      <td>2.94</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.06905</td>\n",
       "      <td>0.0</td>\n",
       "      <td>2.18</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.458</td>\n",
       "      <td>7.147</td>\n",
       "      <td>54.2</td>\n",
       "      <td>6.0622</td>\n",
       "      <td>3.0</td>\n",
       "      <td>222.0</td>\n",
       "      <td>18.7</td>\n",
       "      <td>396.90</td>\n",
       "      <td>5.33</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "      CRIM    ZN  INDUS  CHAS    NOX     RM   AGE     DIS  RAD    TAX  \\\n",
       "0  0.00632  18.0   2.31   0.0  0.538  6.575  65.2  4.0900  1.0  296.0   \n",
       "1  0.02731   0.0   7.07   0.0  0.469  6.421  78.9  4.9671  2.0  242.0   \n",
       "2  0.02729   0.0   7.07   0.0  0.469  7.185  61.1  4.9671  2.0  242.0   \n",
       "3  0.03237   0.0   2.18   0.0  0.458  6.998  45.8  6.0622  3.0  222.0   \n",
       "4  0.06905   0.0   2.18   0.0  0.458  7.147  54.2  6.0622  3.0  222.0   \n",
       "\n",
       "   PTRATIO       B  LSTAT  \n",
       "0     15.3  396.90   4.98  \n",
       "1     17.8  396.90   9.14  \n",
       "2     17.8  392.83   4.03  \n",
       "3     18.7  394.63   2.94  \n",
       "4     18.7  396.90   5.33  "
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#把数据转成DataFrame类型\n",
    "df = pd.DataFrame(data=X,columns=boston.feature_names)\n",
    "df.head()"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "80e3d6ca",
   "metadata": {},
   "source": [
    "2.拆分数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "16f121bf",
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.model_selection import train_test_split\n",
    "X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.33, random_state=42)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "74664abd",
   "metadata": {},
   "source": [
    "3.标准化"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "ea4fdea6",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Number of training examples:339\n",
      "Number of testing examples:167\n",
      "X_train shape(339, 13)\n",
      "Y_train shape(339,)\n"
     ]
    }
   ],
   "source": [
    "from sklearn.preprocessing import StandardScaler\n",
    "\n",
    "# 定义规则\n",
    "transfer = StandardScaler()\n",
    "# 应用到数据上\n",
    "X_train = transfer.fit_transform(X_train)  #训练转换\n",
    "# X_test = transfer.fit_transform(X_test)   #这样也可以 \n",
    "X_test = transfer.transform(X_test)   #这里X_train用了训练转换，这里就可以直接用转换也行，因为它认为前面已经训练过了，就不需要训练了，就直接转换了\n",
    "# 转型    这里如果后续涉及不到对列名操作，可以不转型成dataframe\n",
    "# 如果没有转型，那么X_train、X_test都是array类型，因为我下面的操作不用直接操作列名，所以没有直接转型\n",
    "\n",
    "print('Number of training examples:'+str(X_train.shape[0]))  #看行数\n",
    "print('Number of testing examples:'+str(X_test.shape[0]))  #看行数\n",
    "print('X_train shape' + str(X_train.shape))\n",
    "print('Y_train shape' + str(Y_train.shape))"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "5027714c",
   "metadata": {},
   "source": [
    "4.定义模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "b3332c6f",
   "metadata": {},
   "outputs": [],
   "source": [
    "#用集成算法stacking建模\n",
    "from sklearn.ensemble import GradientBoostingRegressor as GBDT\n",
    "from sklearn.ensemble import ExtraTreesRegressor as ET\n",
    "from sklearn.ensemble import RandomForestRegressor as RF\n",
    "from sklearn.ensemble import AdaBoostRegressor as ADA\n",
    "\n",
    "model_num = 4   #指定stacking算法用多少种算法模型进行建模\n",
    "# 用到的算法模型\n",
    "models = [GBDT(n_estimators=100),\n",
    "         RF(n_estimators=100),\n",
    "         ET(n_estimators=100),\n",
    "         ADA(n_estimators=100)]\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "ba0f3dfd",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0.]])"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# numpy.zeros():在算法实现或数据分析中，经常需要初始化一个数组。使用numpy.zeros()可以快速创建一个全零数组，为后续的计算或赋值操作提供便利\n",
    "# shape：一个整数或者整数元组，指定了输出数组的形状。例如，shape=(3,)将创建一个长度为3的一维数组，shape=(2, 3)将创建一个2行3列的二维数组。\n",
    "# order：可选参数，指定数组在内存中的存储顺序，通常是’C’（按行存储）或’F’（按列存储）。在大多数情况下，我们不需要改变这个参数。\n",
    "# dtype：可选参数，用于指定数组中元素的数据类型。默认是float64，但可以根据需要设置为其他类型，如int、float32等\n",
    "X_train_stack = np.zeros((X_train.shape[0],len(models))) \n",
    "#创建一个初始化全为0的二维数组，行数：X_train.shape[0]，列数：len(models)  作用是把后面每个模型的预测结果加进来，方便作为第二层的训练数据\n",
    "# np.zeros((X_test.shape[0],len(models))) 总体的作用：为第二层准备数据\n",
    "X_test_stack = np.zeros((X_test.shape[0],len(models))) \n",
    "X_test_stack"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "d1dc0776",
   "metadata": {},
   "source": [
    "5.交叉验证构建第一层数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "d57424a1",
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.model_selection import KFold  #KFold:K折交叉验证器\n",
    "n_folds = 10 # 数据分成几份\n",
    "kf = KFold(n_splits=n_folds)  #做交叉验证的目的就是为了构建训练集和测试集\n",
    "count = 0\n",
    "# #kf.split 返回划分的索引   生成索引以将数据拆分为训练集和测试集。\n",
    "for i,model in enumerate(models):  # 第一层for循环把模型取出来  i：列表下标，用来控制下面数据写在哪一列中    model：下标对应模型\n",
    "    #X_test.shape[0]：生成一个与测试集数据大小相同的样本数，n_folds一共有10列\n",
    "    X_stack_test_n = np.zeros((X_test.shape[0],n_folds)) # (X_test.shape[0],n_folds)：(test样本数，10组索引)\n",
    "    #j：刚好对应n_folds的10列：0，1，..9 。也就意味着对X_train数据拆分成了10份，这个for循环就是用来做交叉验证拆分数据的\n",
    "    for j,(train_index,test_index) in enumerate(kf.split(X_train)): \n",
    "        #获取到每一份训练数据，拆分后目标列和其他列对应的索引肯定是相同的\n",
    "        tr_x = X_train[train_index] #从每份数据中根据索引取对应数据，而每份数据中有相同的索引很正常，因为拆分的时候可以不均衡拆\n",
    "        tr_y = Y_train[train_index]\n",
    "        model.fit(tr_x,tr_y) #训练\n",
    "        #生成stacking 训练数据集，把每一份模型预测的结果X_train_stack作为下一层的输入数据集\n",
    "        #而X_train[test_index]相当于预测了测试集数据，这里train_index、test_index相当于把数据拆分成了训练集和测试集\n",
    "        #test_index的类型是一个arry类型的，X_train_stack[test_index,i] = model.predict(X_train[test_index])相当于把测试集每一行数据预测的\n",
    "        #结果一一赋值给test_index中的每一个索引，这种写法要注意，直接是多值赋值给多个索引\n",
    "#         print(type(test_index))\n",
    "        X_train_stack[test_index,i] = model.predict(X_train[test_index])\n",
    "        X_stack_test_n[:,j] = model.predict(X_test)\n",
    "    #生成stacking 测试数据集:主要用来看后面评分的   按列求平均值,其实就是按行求均值，这里特么是个坑，一直以为按列求均值是1列数据\n",
    "    #相加除以多少个数\n",
    "    #查了array.mean()的用法后才发现当axis=1字面上是按列求，但实际上是按行求均值\n",
    "    #为什么要取均值呢？这里没办法用到10次的结果，为了保持和训练集的列数一致\n",
    "    X_test_stack[:,i] = X_stack_test_n.mean(axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "ff940a3c",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "X_train_stack shape(339, 4)\n",
      "X_test_stack shape(167, 4)\n"
     ]
    }
   ],
   "source": [
    "#查看构建的新数据集\n",
    "print('X_train_stack shape' + str(X_train_stack.shape))\n",
    "print('X_test_stack shape' + str(X_test_stack.shape))\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "d4ad1dfd",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([22.7949254 , 31.74404569, 16.93430868, 25.5270771 , 17.93778628,\n",
       "       21.80365563, 20.18624698, 14.02958997, 20.51344974, 20.26626435,\n",
       "       19.9719267 , 20.67279599, 11.20546823, 21.67064571, 21.35030107,\n",
       "       22.76408278, 18.98004861, 11.37942105, 44.63972698, 17.4698268 ,\n",
       "       26.46337186, 26.57564437, 17.27028056, 20.85557037, 15.45008859,\n",
       "       18.49593368, 21.67885749, 17.36524011, 20.32348313, 20.57635883,\n",
       "       20.73164056, 25.50030658, 21.09425758, 19.88951706, 16.34934463,\n",
       "       17.18634031, 36.02473205, 20.99931012, 20.99487292, 24.84945609,\n",
       "       17.67975808, 29.07704865, 44.57158678, 21.39456175, 22.69921545,\n",
       "       15.47135874, 19.29013352, 25.82716858, 18.77533264, 27.42195715,\n",
       "       21.86519713, 33.22461719, 19.3099135 , 25.25803525, 46.01356443,\n",
       "       20.76027209, 16.0877076 , 33.15980787, 22.6586815 , 20.34495579,\n",
       "       24.00595209, 33.54092171, 30.26337169, 20.068881  , 26.77661375,\n",
       "       18.49245435, 15.39741842, 25.61254333, 29.37878576, 17.47282718,\n",
       "       21.32330168, 24.59126788, 11.35723619, 25.04990972, 21.76734158,\n",
       "       10.98989678, 20.86884454, 44.80666712, 11.74756145, 13.73466456,\n",
       "       22.37913753, 13.04708758, 21.86414665, 11.26273148, 21.31283763,\n",
       "       24.77558817, 18.18297491, 25.60536407, 25.57851324, 18.50894026,\n",
       "       22.01127133, 11.41823916, 20.15796929, 21.5250621 , 25.46586104,\n",
       "       20.65362075, 24.95203985, 12.63121613, 14.45735723, 13.12879688,\n",
       "       21.48141462, 22.57991811, 14.30755534, 20.90945831, 22.74921714,\n",
       "       13.497742  , 20.3292746 , 27.15665888, 21.48201932, 25.75836089,\n",
       "       11.29314676, 14.60663145, 26.18688092, 22.62831521, 31.22168046,\n",
       "       17.33585276, 37.73537151, 19.11497896, 20.95775476, 26.17157367,\n",
       "       21.77321806, 25.76067031, 11.58531528, 21.88061045, 26.77761357,\n",
       "       23.0904665 , 25.75863114, 34.75102889, 18.33608079, 44.59972911,\n",
       "       15.3848021 , 25.75412915, 20.63383278, 20.15672805, 13.76993838,\n",
       "       20.23260193, 21.85218782, 32.52585532, 29.44791779, 19.29055738,\n",
       "       19.64458145, 27.68665432, 21.85384984, 18.6916417 , 11.14961123,\n",
       "       21.64516666, 18.63875786, 14.08760141, 17.57014653, 42.56406401,\n",
       "       17.84934363, 17.75522226, 26.7031759 , 22.09386399, 21.85227754,\n",
       "       24.97641076, 18.04742829, 22.55820053, 34.56639592, 11.35092352,\n",
       "       22.25177903, 21.0698744 , 20.68280522, 25.56721604, 22.94352203,\n",
       "       21.93769017, 44.75142991])"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "X_stack_test_n.mean(axis=1)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "9577bfc0",
   "metadata": {},
   "source": [
    "6.Stacking第二层建模。其实一层和二层并没有什么太大的联系，只是二层用了1层的数据，如果二层不用第一层的数据，就不叫stacking算法了"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "3b92a8eb",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<style>#sk-container-id-1 {color: black;background-color: white;}#sk-container-id-1 pre{padding: 0;}#sk-container-id-1 div.sk-toggleable {background-color: white;}#sk-container-id-1 label.sk-toggleable__label {cursor: pointer;display: block;width: 100%;margin-bottom: 0;padding: 0.3em;box-sizing: border-box;text-align: center;}#sk-container-id-1 label.sk-toggleable__label-arrow:before {content: \"▸\";float: left;margin-right: 0.25em;color: #696969;}#sk-container-id-1 label.sk-toggleable__label-arrow:hover:before {color: black;}#sk-container-id-1 div.sk-estimator:hover label.sk-toggleable__label-arrow:before {color: black;}#sk-container-id-1 div.sk-toggleable__content {max-height: 0;max-width: 0;overflow: hidden;text-align: left;background-color: #f0f8ff;}#sk-container-id-1 div.sk-toggleable__content pre {margin: 0.2em;color: black;border-radius: 0.25em;background-color: #f0f8ff;}#sk-container-id-1 input.sk-toggleable__control:checked~div.sk-toggleable__content {max-height: 200px;max-width: 100%;overflow: auto;}#sk-container-id-1 input.sk-toggleable__control:checked~label.sk-toggleable__label-arrow:before {content: \"▾\";}#sk-container-id-1 div.sk-estimator input.sk-toggleable__control:checked~label.sk-toggleable__label {background-color: #d4ebff;}#sk-container-id-1 div.sk-label input.sk-toggleable__control:checked~label.sk-toggleable__label {background-color: #d4ebff;}#sk-container-id-1 input.sk-hidden--visually {border: 0;clip: rect(1px 1px 1px 1px);clip: rect(1px, 1px, 1px, 1px);height: 1px;margin: -1px;overflow: hidden;padding: 0;position: absolute;width: 1px;}#sk-container-id-1 div.sk-estimator {font-family: monospace;background-color: #f0f8ff;border: 1px dotted black;border-radius: 0.25em;box-sizing: border-box;margin-bottom: 0.5em;}#sk-container-id-1 div.sk-estimator:hover {background-color: #d4ebff;}#sk-container-id-1 div.sk-parallel-item::after {content: \"\";width: 100%;border-bottom: 1px solid gray;flex-grow: 1;}#sk-container-id-1 div.sk-label:hover label.sk-toggleable__label {background-color: #d4ebff;}#sk-container-id-1 div.sk-serial::before {content: \"\";position: absolute;border-left: 1px solid gray;box-sizing: border-box;top: 0;bottom: 0;left: 50%;z-index: 0;}#sk-container-id-1 div.sk-serial {display: flex;flex-direction: column;align-items: center;background-color: white;padding-right: 0.2em;padding-left: 0.2em;position: relative;}#sk-container-id-1 div.sk-item {position: relative;z-index: 1;}#sk-container-id-1 div.sk-parallel {display: flex;align-items: stretch;justify-content: center;background-color: white;position: relative;}#sk-container-id-1 div.sk-item::before, #sk-container-id-1 div.sk-parallel-item::before {content: \"\";position: absolute;border-left: 1px solid gray;box-sizing: border-box;top: 0;bottom: 0;left: 50%;z-index: -1;}#sk-container-id-1 div.sk-parallel-item {display: flex;flex-direction: column;z-index: 1;position: relative;background-color: white;}#sk-container-id-1 div.sk-parallel-item:first-child::after {align-self: flex-end;width: 50%;}#sk-container-id-1 div.sk-parallel-item:last-child::after {align-self: flex-start;width: 50%;}#sk-container-id-1 div.sk-parallel-item:only-child::after {width: 0;}#sk-container-id-1 div.sk-dashed-wrapped {border: 1px dashed gray;margin: 0 0.4em 0.5em 0.4em;box-sizing: border-box;padding-bottom: 0.4em;background-color: white;}#sk-container-id-1 div.sk-label label {font-family: monospace;font-weight: bold;display: inline-block;line-height: 1.2em;}#sk-container-id-1 div.sk-label-container {text-align: center;}#sk-container-id-1 div.sk-container {/* jupyter's `normalize.less` sets `[hidden] { display: none; }` but bootstrap.min.css set `[hidden] { display: none !important; }` so we also need the `!important` here to be able to override the default hidden behavior on the sphinx rendered scikit-learn.org. See: https://github.com/scikit-learn/scikit-learn/issues/21755 */display: inline-block !important;position: relative;}#sk-container-id-1 div.sk-text-repr-fallback {display: none;}</style><div id=\"sk-container-id-1\" class=\"sk-top-container\"><div class=\"sk-text-repr-fallback\"><pre>LinearRegression()</pre><b>In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. <br />On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.</b></div><div class=\"sk-container\" hidden><div class=\"sk-item\"><div class=\"sk-estimator sk-toggleable\"><input class=\"sk-toggleable__control sk-hidden--visually\" id=\"sk-estimator-id-1\" type=\"checkbox\" checked><label for=\"sk-estimator-id-1\" class=\"sk-toggleable__label sk-toggleable__label-arrow\">LinearRegression</label><div class=\"sk-toggleable__content\"><pre>LinearRegression()</pre></div></div></div></div></div>"
      ],
      "text/plain": [
       "LinearRegression()"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.linear_model import LinearRegression #线性回归\n",
    "from sklearn.metrics import r2_score    #用于评分的算法\n",
    "# 线性回归简单理解就是：y=ax+b    其中a会换成不同的属性（列）\n",
    "#第二层的训练\n",
    "model_second = LinearRegression()\n",
    "model_second.fit(X=X_train_stack,y=Y_train)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "341ddc79",
   "metadata": {},
   "source": [
    "7.回归模型评估"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "5c76eeae",
   "metadata": {},
   "source": [
    "回归模型评估中有几个关键字：mas,mse,rmase:都是和方差相关的评估，所以越小越好\n",
    "r2_score：取值范围0-1，越大越好"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "fc6eba8f",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "R2: 0.8853798998092478\n"
     ]
    }
   ],
   "source": [
    "#第一次模型评估\n",
    "pred = model_second.predict(X_test_stack)\n",
    "print(\"R2:\",r2_score(y_true=Y_test,y_pred=pred))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "9deec43b",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MAE:1.82\n",
      "Real:23.600000,Predict:23.665127\n",
      "Real:32.400000,Predict:32.012298\n",
      "Real:13.600000,Predict:15.400416\n",
      "Real:22.800000,Predict:23.706721\n",
      "Real:16.100000,Predict:16.571660\n",
      "Real:20.000000,Predict:21.870323\n",
      "Real:17.800000,Predict:18.543819\n",
      "Real:14.000000,Predict:14.392153\n",
      "Real:19.600000,Predict:20.987390\n",
      "Real:16.800000,Predict:20.188518\n",
      "Real:21.500000,Predict:20.958799\n",
      "Real:18.900000,Predict:19.879542\n",
      "Real:7.000000,Predict:8.929723\n",
      "Real:21.200000,Predict:21.716498\n",
      "Real:18.500000,Predict:20.063146\n",
      "Real:29.800000,Predict:22.553269\n",
      "Real:18.800000,Predict:19.915041\n",
      "Real:10.200000,Predict:8.763121\n",
      "Real:50.000000,Predict:45.740016\n",
      "Real:14.100000,Predict:15.740537\n",
      "Real:25.200000,Predict:24.080625\n",
      "Real:29.100000,Predict:25.289899\n",
      "Real:12.700000,Predict:13.565563\n",
      "Real:22.400000,Predict:21.633957\n",
      "Real:14.200000,Predict:15.564612\n",
      "Real:13.800000,Predict:15.823186\n",
      "Real:20.300000,Predict:21.981372\n",
      "Real:14.900000,Predict:13.476866\n",
      "Real:21.700000,Predict:20.586610\n",
      "Real:18.300000,Predict:20.909099\n",
      "Real:23.100000,Predict:20.886466\n",
      "Real:23.800000,Predict:23.636282\n",
      "Real:15.000000,Predict:18.702683\n",
      "Real:20.800000,Predict:21.263962\n",
      "Real:19.100000,Predict:14.657018\n",
      "Real:19.400000,Predict:16.719192\n",
      "Real:34.700000,Predict:33.980594\n",
      "Real:19.500000,Predict:19.099662\n",
      "Real:24.400000,Predict:21.755997\n",
      "Real:23.400000,Predict:23.600670\n",
      "Real:19.700000,Predict:17.496917\n",
      "Real:28.200000,Predict:29.778064\n",
      "Real:50.000000,Predict:45.369605\n",
      "Real:17.400000,Predict:20.204533\n",
      "Real:22.600000,Predict:22.338873\n",
      "Real:15.100000,Predict:14.635360\n",
      "Real:13.100000,Predict:15.492865\n",
      "Real:24.200000,Predict:24.376104\n",
      "Real:19.900000,Predict:17.899042\n",
      "Real:24.000000,Predict:27.590731\n",
      "Real:18.900000,Predict:21.159697\n",
      "Real:35.400000,Predict:35.639145\n",
      "Real:15.200000,Predict:15.743099\n",
      "Real:26.500000,Predict:25.506481\n",
      "Real:43.500000,Predict:44.623874\n",
      "Real:21.200000,Predict:21.697865\n",
      "Real:18.400000,Predict:15.724266\n",
      "Real:28.500000,Predict:32.431611\n",
      "Real:23.900000,Predict:22.902465\n",
      "Real:18.500000,Predict:18.322455\n",
      "Real:25.000000,Predict:23.555818\n",
      "Real:35.400000,Predict:34.881629\n",
      "Real:31.500000,Predict:30.198193\n",
      "Real:20.200000,Predict:19.613243\n",
      "Real:24.100000,Predict:24.996288\n",
      "Real:20.000000,Predict:18.659559\n",
      "Real:13.100000,Predict:13.784000\n",
      "Real:24.800000,Predict:23.384239\n",
      "Real:30.800000,Predict:28.635811\n",
      "Real:12.700000,Predict:15.206297\n",
      "Real:20.000000,Predict:21.181015\n",
      "Real:23.700000,Predict:25.047629\n",
      "Real:10.800000,Predict:11.119031\n",
      "Real:20.600000,Predict:20.938207\n",
      "Real:20.800000,Predict:22.589122\n",
      "Real:5.000000,Predict:7.207793\n",
      "Real:20.100000,Predict:20.283633\n",
      "Real:48.500000,Predict:45.927280\n",
      "Real:10.900000,Predict:11.433268\n",
      "Real:7.000000,Predict:11.461916\n",
      "Real:20.900000,Predict:21.794765\n",
      "Real:17.200000,Predict:11.500956\n",
      "Real:20.900000,Predict:19.037091\n",
      "Real:9.700000,Predict:10.631950\n",
      "Real:19.400000,Predict:20.057554\n",
      "Real:29.000000,Predict:27.709022\n",
      "Real:16.400000,Predict:15.348294\n",
      "Real:25.000000,Predict:23.870154\n",
      "Real:25.000000,Predict:24.836010\n",
      "Real:17.100000,Predict:18.050796\n",
      "Real:23.200000,Predict:22.162395\n",
      "Real:10.400000,Predict:8.014149\n",
      "Real:19.600000,Predict:19.554812\n",
      "Real:17.200000,Predict:18.896239\n",
      "Real:27.500000,Predict:28.107496\n",
      "Real:23.000000,Predict:20.055501\n",
      "Real:50.000000,Predict:29.962560\n",
      "Real:17.900000,Predict:10.946560\n",
      "Real:9.600000,Predict:12.571793\n",
      "Real:17.200000,Predict:12.969839\n",
      "Real:22.500000,Predict:20.710957\n",
      "Real:21.400000,Predict:23.211013\n",
      "Real:12.000000,Predict:12.605149\n",
      "Real:19.900000,Predict:20.106174\n",
      "Real:19.400000,Predict:20.013458\n",
      "Real:13.400000,Predict:11.228763\n",
      "Real:18.200000,Predict:19.186854\n",
      "Real:24.600000,Predict:26.115678\n",
      "Real:21.100000,Predict:20.253568\n",
      "Real:24.700000,Predict:23.632466\n",
      "Real:8.700000,Predict:8.421442\n",
      "Real:27.500000,Predict:14.293920\n",
      "Real:20.700000,Predict:21.809690\n",
      "Real:36.200000,Predict:24.074239\n",
      "Real:31.600000,Predict:32.273476\n",
      "Real:11.700000,Predict:14.615248\n",
      "Real:39.800000,Predict:42.359821\n",
      "Real:13.900000,Predict:15.358250\n",
      "Real:21.800000,Predict:21.251412\n",
      "Real:23.700000,Predict:24.354261\n",
      "Real:17.600000,Predict:18.718745\n",
      "Real:24.400000,Predict:24.469034\n",
      "Real:8.800000,Predict:8.222900\n",
      "Real:19.200000,Predict:20.261453\n",
      "Real:25.300000,Predict:23.447606\n",
      "Real:20.400000,Predict:22.097093\n",
      "Real:23.100000,Predict:23.726703\n",
      "Real:37.900000,Predict:38.287072\n",
      "Real:15.600000,Predict:16.625694\n",
      "Real:45.400000,Predict:46.496061\n",
      "Real:15.700000,Predict:17.729350\n",
      "Real:22.600000,Predict:22.908425\n",
      "Real:14.500000,Predict:18.495235\n",
      "Real:18.700000,Predict:19.591909\n",
      "Real:17.800000,Predict:13.948985\n",
      "Real:16.100000,Predict:18.864775\n",
      "Real:20.600000,Predict:20.840848\n",
      "Real:31.600000,Predict:30.572634\n",
      "Real:29.100000,Predict:29.038757\n",
      "Real:15.600000,Predict:16.703292\n",
      "Real:17.500000,Predict:17.410954\n",
      "Real:22.500000,Predict:24.679032\n",
      "Real:19.400000,Predict:20.110373\n",
      "Real:19.300000,Predict:18.582282\n",
      "Real:8.500000,Predict:7.685979\n",
      "Real:20.600000,Predict:21.676867\n",
      "Real:17.000000,Predict:19.021045\n",
      "Real:17.100000,Predict:13.280638\n",
      "Real:14.500000,Predict:14.311532\n",
      "Real:50.000000,Predict:43.836016\n",
      "Real:14.300000,Predict:15.003947\n",
      "Real:12.600000,Predict:15.716547\n",
      "Real:28.700000,Predict:26.660868\n",
      "Real:21.200000,Predict:21.053958\n",
      "Real:19.300000,Predict:21.197366\n",
      "Real:23.100000,Predict:21.508864\n",
      "Real:19.100000,Predict:17.119246\n",
      "Real:25.000000,Predict:25.582027\n",
      "Real:33.400000,Predict:35.020477\n",
      "Real:5.000000,Predict:9.034501\n",
      "Real:29.600000,Predict:23.505652\n",
      "Real:18.700000,Predict:18.969339\n",
      "Real:21.700000,Predict:21.062256\n",
      "Real:23.100000,Predict:23.568337\n",
      "Real:22.800000,Predict:23.927867\n",
      "Real:21.000000,Predict:19.863702\n",
      "Real:48.800000,Predict:48.259361\n"
     ]
    }
   ],
   "source": [
    "#多次模型评估\n",
    "from sklearn.metrics import mean_absolute_error\n",
    "\n",
    "print('MAE:%.2f'%mean_absolute_error(Y_test,pred))   #求平均误差绝对值，也就是看下面Y_test[i]真实值和预测值pred[i]之间误差的平均值\n",
    "#分别将真实值和预测值进行输出并观察比较   其实意义不大\n",
    "for i in range(len(Y_test)):\n",
    "    print('Real:%f,Predict:%f'%(Y_test[i],pred[i]))"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "bcade809",
   "metadata": {},
   "source": [
    "8.使用现成的stacking库来解决stacking算法，上面的都是一个个实现的"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "6905308a",
   "metadata": {},
   "outputs": [],
   "source": [
    "# pip install mlxtend"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "19825bd8",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<style>#sk-container-id-2 {color: black;background-color: white;}#sk-container-id-2 pre{padding: 0;}#sk-container-id-2 div.sk-toggleable {background-color: white;}#sk-container-id-2 label.sk-toggleable__label {cursor: pointer;display: block;width: 100%;margin-bottom: 0;padding: 0.3em;box-sizing: border-box;text-align: center;}#sk-container-id-2 label.sk-toggleable__label-arrow:before {content: \"▸\";float: left;margin-right: 0.25em;color: #696969;}#sk-container-id-2 label.sk-toggleable__label-arrow:hover:before {color: black;}#sk-container-id-2 div.sk-estimator:hover label.sk-toggleable__label-arrow:before {color: black;}#sk-container-id-2 div.sk-toggleable__content {max-height: 0;max-width: 0;overflow: hidden;text-align: left;background-color: #f0f8ff;}#sk-container-id-2 div.sk-toggleable__content pre {margin: 0.2em;color: black;border-radius: 0.25em;background-color: #f0f8ff;}#sk-container-id-2 input.sk-toggleable__control:checked~div.sk-toggleable__content {max-height: 200px;max-width: 100%;overflow: auto;}#sk-container-id-2 input.sk-toggleable__control:checked~label.sk-toggleable__label-arrow:before {content: \"▾\";}#sk-container-id-2 div.sk-estimator input.sk-toggleable__control:checked~label.sk-toggleable__label {background-color: #d4ebff;}#sk-container-id-2 div.sk-label input.sk-toggleable__control:checked~label.sk-toggleable__label {background-color: #d4ebff;}#sk-container-id-2 input.sk-hidden--visually {border: 0;clip: rect(1px 1px 1px 1px);clip: rect(1px, 1px, 1px, 1px);height: 1px;margin: -1px;overflow: hidden;padding: 0;position: absolute;width: 1px;}#sk-container-id-2 div.sk-estimator {font-family: monospace;background-color: #f0f8ff;border: 1px dotted black;border-radius: 0.25em;box-sizing: border-box;margin-bottom: 0.5em;}#sk-container-id-2 div.sk-estimator:hover {background-color: #d4ebff;}#sk-container-id-2 div.sk-parallel-item::after {content: \"\";width: 100%;border-bottom: 1px solid gray;flex-grow: 1;}#sk-container-id-2 div.sk-label:hover label.sk-toggleable__label {background-color: #d4ebff;}#sk-container-id-2 div.sk-serial::before {content: \"\";position: absolute;border-left: 1px solid gray;box-sizing: border-box;top: 0;bottom: 0;left: 50%;z-index: 0;}#sk-container-id-2 div.sk-serial {display: flex;flex-direction: column;align-items: center;background-color: white;padding-right: 0.2em;padding-left: 0.2em;position: relative;}#sk-container-id-2 div.sk-item {position: relative;z-index: 1;}#sk-container-id-2 div.sk-parallel {display: flex;align-items: stretch;justify-content: center;background-color: white;position: relative;}#sk-container-id-2 div.sk-item::before, #sk-container-id-2 div.sk-parallel-item::before {content: \"\";position: absolute;border-left: 1px solid gray;box-sizing: border-box;top: 0;bottom: 0;left: 50%;z-index: -1;}#sk-container-id-2 div.sk-parallel-item {display: flex;flex-direction: column;z-index: 1;position: relative;background-color: white;}#sk-container-id-2 div.sk-parallel-item:first-child::after {align-self: flex-end;width: 50%;}#sk-container-id-2 div.sk-parallel-item:last-child::after {align-self: flex-start;width: 50%;}#sk-container-id-2 div.sk-parallel-item:only-child::after {width: 0;}#sk-container-id-2 div.sk-dashed-wrapped {border: 1px dashed gray;margin: 0 0.4em 0.5em 0.4em;box-sizing: border-box;padding-bottom: 0.4em;background-color: white;}#sk-container-id-2 div.sk-label label {font-family: monospace;font-weight: bold;display: inline-block;line-height: 1.2em;}#sk-container-id-2 div.sk-label-container {text-align: center;}#sk-container-id-2 div.sk-container {/* jupyter's `normalize.less` sets `[hidden] { display: none; }` but bootstrap.min.css set `[hidden] { display: none !important; }` so we also need the `!important` here to be able to override the default hidden behavior on the sphinx rendered scikit-learn.org. See: https://github.com/scikit-learn/scikit-learn/issues/21755 */display: inline-block !important;position: relative;}#sk-container-id-2 div.sk-text-repr-fallback {display: none;}</style><div id=\"sk-container-id-2\" class=\"sk-top-container\"><div class=\"sk-text-repr-fallback\"><pre>StackingCVRegressor(cv=10, meta_regressor=LinearRegression(), random_state=42,\n",
       "                    regressors=(GradientBoostingRegressor(),\n",
       "                                RandomForestRegressor(), ExtraTreesRegressor(),\n",
       "                                AdaBoostRegressor()),\n",
       "                    use_features_in_secondary=True)</pre><b>In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. <br />On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.</b></div><div class=\"sk-container\" hidden><div class=\"sk-item sk-dashed-wrapped\"><div class=\"sk-label-container\"><div class=\"sk-label sk-toggleable\"><input class=\"sk-toggleable__control sk-hidden--visually\" id=\"sk-estimator-id-2\" type=\"checkbox\" ><label for=\"sk-estimator-id-2\" class=\"sk-toggleable__label sk-toggleable__label-arrow\">StackingCVRegressor</label><div class=\"sk-toggleable__content\"><pre>StackingCVRegressor(cv=10, meta_regressor=LinearRegression(), random_state=42,\n",
       "                    regressors=(GradientBoostingRegressor(),\n",
       "                                RandomForestRegressor(), ExtraTreesRegressor(),\n",
       "                                AdaBoostRegressor()),\n",
       "                    use_features_in_secondary=True)</pre></div></div></div><div class=\"sk-parallel\"><div class=\"sk-parallel-item\"><div class=\"sk-item\"><div class=\"sk-label-container\"><div class=\"sk-label sk-toggleable\"><input class=\"sk-toggleable__control sk-hidden--visually\" id=\"sk-estimator-id-3\" type=\"checkbox\" ><label for=\"sk-estimator-id-3\" class=\"sk-toggleable__label sk-toggleable__label-arrow\">meta_regressor: LinearRegression</label><div class=\"sk-toggleable__content\"><pre>LinearRegression()</pre></div></div></div><div class=\"sk-serial\"><div class=\"sk-item\"><div class=\"sk-estimator sk-toggleable\"><input class=\"sk-toggleable__control sk-hidden--visually\" id=\"sk-estimator-id-4\" type=\"checkbox\" ><label for=\"sk-estimator-id-4\" class=\"sk-toggleable__label sk-toggleable__label-arrow\">LinearRegression</label><div class=\"sk-toggleable__content\"><pre>LinearRegression()</pre></div></div></div></div></div></div></div></div></div></div>"
      ],
      "text/plain": [
       "StackingCVRegressor(cv=10, meta_regressor=LinearRegression(), random_state=42,\n",
       "                    regressors=(GradientBoostingRegressor(),\n",
       "                                RandomForestRegressor(), ExtraTreesRegressor(),\n",
       "                                AdaBoostRegressor()),\n",
       "                    use_features_in_secondary=True)"
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from mlxtend.regressor import StackingCVRegressor #带CV的作用就是做交叉验证的次数  这是回归器\n",
    "from mlxtend.classifier import StackingCVClassifier   #分类器\n",
    "#定义集成模型\n",
    "model_1 = GBDT()\n",
    "model_1.fit(X_train,Y_train)\n",
    "\n",
    "model_2 = RF()\n",
    "model_2.fit(X_train,Y_train)\n",
    "\n",
    "model_3 = ET()\n",
    "model_3.fit(X_train,Y_train)\n",
    "\n",
    "model_4 = ADA()\n",
    "model_4.fit(X_train,Y_train)\n",
    "\n",
    "model_second = LinearRegression()\n",
    "model_second.fit(X=X_train_stack,y=Y_train)\n",
    "# # 模型 数据 cv\n",
    "# use_features_in_secondary:表示meta_regressor接收的数据要不要接收前面regressors训练的数据\n",
    "stack = StackingCVRegressor(regressors=(model_1,model_2,model_3,model_4),meta_regressor=model_second,cv=10,use_features_in_secondary=True,random_state=42)\n",
    "stack.fit(X_train,Y_train)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "ad5800c0",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "R2: 0.8786081773709414\n"
     ]
    }
   ],
   "source": [
    "pred_stack = stack.predict(X_test)\n",
    "print('R2:',r2_score(Y_test,pred_stack))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "97baa4a9",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MAE:1.96\n",
      "Real:23.600000,Predict:24.367535\n",
      "Real:32.400000,Predict:33.034936\n",
      "Real:13.600000,Predict:14.192562\n",
      "Real:22.800000,Predict:24.155712\n",
      "Real:16.100000,Predict:17.279830\n",
      "Real:20.000000,Predict:22.189804\n",
      "Real:17.800000,Predict:18.209829\n",
      "Real:14.000000,Predict:13.780600\n",
      "Real:19.600000,Predict:23.093739\n",
      "Real:16.800000,Predict:20.914559\n",
      "Real:21.500000,Predict:20.980719\n",
      "Real:18.900000,Predict:18.824546\n",
      "Real:7.000000,Predict:5.244183\n",
      "Real:21.200000,Predict:21.403739\n",
      "Real:18.500000,Predict:19.740700\n",
      "Real:29.800000,Predict:24.851997\n",
      "Real:18.800000,Predict:21.610605\n",
      "Real:10.200000,Predict:8.204956\n",
      "Real:50.000000,Predict:47.195986\n",
      "Real:14.100000,Predict:15.368383\n",
      "Real:25.200000,Predict:25.700069\n",
      "Real:29.100000,Predict:27.450811\n",
      "Real:12.700000,Predict:12.214881\n",
      "Real:22.400000,Predict:21.733885\n",
      "Real:14.200000,Predict:15.815916\n",
      "Real:13.800000,Predict:16.390676\n",
      "Real:20.300000,Predict:22.500254\n",
      "Real:14.900000,Predict:12.088030\n",
      "Real:21.700000,Predict:19.630589\n",
      "Real:18.300000,Predict:21.144134\n",
      "Real:23.100000,Predict:22.015109\n",
      "Real:23.800000,Predict:23.339293\n",
      "Real:15.000000,Predict:18.460687\n",
      "Real:20.800000,Predict:22.898130\n",
      "Real:19.100000,Predict:15.987326\n",
      "Real:19.400000,Predict:15.781462\n",
      "Real:34.700000,Predict:33.088390\n",
      "Real:19.500000,Predict:19.059396\n",
      "Real:24.400000,Predict:21.402132\n",
      "Real:23.400000,Predict:24.399984\n",
      "Real:19.700000,Predict:18.204370\n",
      "Real:28.200000,Predict:32.226808\n",
      "Real:50.000000,Predict:47.853357\n",
      "Real:17.400000,Predict:19.564202\n",
      "Real:22.600000,Predict:22.537516\n",
      "Real:15.100000,Predict:15.168305\n",
      "Real:13.100000,Predict:14.200803\n",
      "Real:24.200000,Predict:24.914069\n",
      "Real:19.900000,Predict:19.071373\n",
      "Real:24.000000,Predict:27.479627\n",
      "Real:18.900000,Predict:20.172193\n",
      "Real:35.400000,Predict:36.676384\n",
      "Real:15.200000,Predict:14.291424\n",
      "Real:26.500000,Predict:25.525025\n",
      "Real:43.500000,Predict:44.533071\n",
      "Real:21.200000,Predict:22.844265\n",
      "Real:18.400000,Predict:16.499051\n",
      "Real:28.500000,Predict:35.016619\n",
      "Real:23.900000,Predict:22.486854\n",
      "Real:18.500000,Predict:17.640397\n",
      "Real:25.000000,Predict:22.521138\n",
      "Real:35.400000,Predict:34.357239\n",
      "Real:31.500000,Predict:31.517522\n",
      "Real:20.200000,Predict:20.024841\n",
      "Real:24.100000,Predict:21.979295\n",
      "Real:20.000000,Predict:17.949615\n",
      "Real:13.100000,Predict:13.980528\n",
      "Real:24.800000,Predict:24.014753\n",
      "Real:30.800000,Predict:30.099625\n",
      "Real:12.700000,Predict:15.772724\n",
      "Real:20.000000,Predict:21.121734\n",
      "Real:23.700000,Predict:28.135358\n",
      "Real:10.800000,Predict:11.252428\n",
      "Real:20.600000,Predict:18.923065\n",
      "Real:20.800000,Predict:23.768217\n",
      "Real:5.000000,Predict:4.145943\n",
      "Real:20.100000,Predict:20.197094\n",
      "Real:48.500000,Predict:47.267785\n",
      "Real:10.900000,Predict:11.174310\n",
      "Real:7.000000,Predict:10.015430\n",
      "Real:20.900000,Predict:21.470016\n",
      "Real:17.200000,Predict:12.925189\n",
      "Real:20.900000,Predict:18.829318\n",
      "Real:9.700000,Predict:11.233093\n",
      "Real:19.400000,Predict:20.293342\n",
      "Real:29.000000,Predict:27.987536\n",
      "Real:16.400000,Predict:15.267711\n",
      "Real:25.000000,Predict:23.627120\n",
      "Real:25.000000,Predict:26.994138\n",
      "Real:17.100000,Predict:17.307734\n",
      "Real:23.200000,Predict:23.187686\n",
      "Real:10.400000,Predict:6.319593\n",
      "Real:19.600000,Predict:18.870881\n",
      "Real:17.200000,Predict:15.767482\n",
      "Real:27.500000,Predict:33.333407\n",
      "Real:23.000000,Predict:19.244178\n",
      "Real:50.000000,Predict:30.865334\n",
      "Real:17.900000,Predict:11.471391\n",
      "Real:9.600000,Predict:10.872780\n",
      "Real:17.200000,Predict:12.532870\n",
      "Real:22.500000,Predict:20.880612\n",
      "Real:21.400000,Predict:24.247907\n",
      "Real:12.000000,Predict:13.594027\n",
      "Real:19.900000,Predict:19.886158\n",
      "Real:19.400000,Predict:20.108882\n",
      "Real:13.400000,Predict:12.244631\n",
      "Real:18.200000,Predict:18.981843\n",
      "Real:24.600000,Predict:25.598332\n",
      "Real:21.100000,Predict:20.156461\n",
      "Real:24.700000,Predict:22.491347\n",
      "Real:8.700000,Predict:6.452871\n",
      "Real:27.500000,Predict:14.052738\n",
      "Real:20.700000,Predict:20.659361\n",
      "Real:36.200000,Predict:25.314101\n",
      "Real:31.600000,Predict:32.768132\n",
      "Real:11.700000,Predict:13.555201\n",
      "Real:39.800000,Predict:41.565314\n",
      "Real:13.900000,Predict:14.139761\n",
      "Real:21.800000,Predict:21.627876\n",
      "Real:23.700000,Predict:25.045827\n",
      "Real:17.600000,Predict:18.071655\n",
      "Real:24.400000,Predict:24.169570\n",
      "Real:8.800000,Predict:2.254058\n",
      "Real:19.200000,Predict:20.014576\n",
      "Real:25.300000,Predict:24.118237\n",
      "Real:20.400000,Predict:22.060115\n",
      "Real:23.100000,Predict:23.105555\n",
      "Real:37.900000,Predict:39.295593\n",
      "Real:15.600000,Predict:13.249431\n",
      "Real:45.400000,Predict:45.808124\n",
      "Real:15.700000,Predict:18.285138\n",
      "Real:22.600000,Predict:21.761174\n",
      "Real:14.500000,Predict:17.074799\n",
      "Real:18.700000,Predict:19.179273\n",
      "Real:17.800000,Predict:13.302588\n",
      "Real:16.100000,Predict:20.429102\n",
      "Real:20.600000,Predict:21.139257\n",
      "Real:31.600000,Predict:31.481387\n",
      "Real:29.100000,Predict:29.037901\n",
      "Real:15.600000,Predict:15.439193\n",
      "Real:17.500000,Predict:16.547971\n",
      "Real:22.500000,Predict:24.620444\n",
      "Real:19.400000,Predict:20.870815\n",
      "Real:19.300000,Predict:18.676307\n",
      "Real:8.500000,Predict:4.519066\n",
      "Real:20.600000,Predict:23.209979\n",
      "Real:17.000000,Predict:16.972533\n",
      "Real:17.100000,Predict:13.188404\n",
      "Real:14.500000,Predict:13.435598\n",
      "Real:50.000000,Predict:42.307146\n",
      "Real:14.300000,Predict:14.607685\n",
      "Real:12.600000,Predict:15.388439\n",
      "Real:28.700000,Predict:24.610504\n",
      "Real:21.200000,Predict:20.883032\n",
      "Real:19.300000,Predict:21.147510\n",
      "Real:23.100000,Predict:20.431958\n",
      "Real:19.100000,Predict:16.949789\n",
      "Real:25.000000,Predict:29.021876\n",
      "Real:33.400000,Predict:33.388329\n",
      "Real:5.000000,Predict:4.233576\n",
      "Real:29.600000,Predict:23.898199\n",
      "Real:18.700000,Predict:18.727268\n",
      "Real:21.700000,Predict:21.303346\n",
      "Real:23.100000,Predict:23.202450\n",
      "Real:22.800000,Predict:24.931616\n",
      "Real:21.000000,Predict:19.389763\n",
      "Real:48.800000,Predict:47.817457\n"
     ]
    }
   ],
   "source": [
    "print('MAE:%.2f'%mean_absolute_error(Y_test,pred_stack))   #求平均误差绝对值，也就是看下面Y_test[i]真实值和预测值pred[i]之间误差的平均值\n",
    "#分别将真实值和预测值进行输出并观察比较   其实意义不大\n",
    "for i in range(len(Y_test)):\n",
    "    print('Real:%f,Predict:%f'%(Y_test[i],pred_stack[i]))"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.12.4"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
