{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "参数调优\n",
    "采用5折交叉验证，分别用log似然损失和正确率，对Logistic回归模型的正则超参数调优"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import pandas as pd\n",
    "import matplotlib.pyplot as plt\n",
    "import seaborn as sns\n",
    "import datetime\n",
    "from sklearn.preprocessing import MinMaxScaler\n",
    "\n",
    "#用于把数据分割为训练数据和测试数据\n",
    "from sklearn.model_selection import train_test_split\n",
    "#标准化\n",
    "from sklearn.preprocessing import StandardScaler\n",
    "#线性模型\n",
    "from sklearn.linear_model import LinearRegression,LassoCV,RidgeCV,ElasticNetCV\n",
    "\n",
    "from sklearn.linear_model import LogisticRegression,LogisticRegressionCV\n",
    "from sklearn.model_selection import GridSearchCV\n",
    "from sklearn.model_selection import cross_val_score\n",
    "#模型评估\n",
    "from sklearn.metrics import mean_squared_error\n",
    "#评价回归预测模型的性能\n",
    "from sklearn.metrics import r2_score\n",
    "\n",
    "#用于保存模型\n",
    "import pickle\n",
    "\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>pregnants</th>\n",
       "      <th>Plasma_glucose_concentration</th>\n",
       "      <th>blood_pressure</th>\n",
       "      <th>Triceps_skin_fold_thickness</th>\n",
       "      <th>serum_insulin</th>\n",
       "      <th>BMI</th>\n",
       "      <th>Diabetes_pedigree_function</th>\n",
       "      <th>Age</th>\n",
       "      <th>Target</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.639947</td>\n",
       "      <td>0.866045</td>\n",
       "      <td>-0.031990</td>\n",
       "      <td>0.670643</td>\n",
       "      <td>-0.181541</td>\n",
       "      <td>0.166619</td>\n",
       "      <td>0.468492</td>\n",
       "      <td>1.425995</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>-0.844885</td>\n",
       "      <td>-1.205066</td>\n",
       "      <td>-0.528319</td>\n",
       "      <td>-0.012301</td>\n",
       "      <td>-0.181541</td>\n",
       "      <td>-0.852200</td>\n",
       "      <td>-0.365061</td>\n",
       "      <td>-0.190672</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1.233880</td>\n",
       "      <td>2.016662</td>\n",
       "      <td>-0.693761</td>\n",
       "      <td>-0.012301</td>\n",
       "      <td>-0.181541</td>\n",
       "      <td>-1.332500</td>\n",
       "      <td>0.604397</td>\n",
       "      <td>-0.105584</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>-0.844885</td>\n",
       "      <td>-1.073567</td>\n",
       "      <td>-0.528319</td>\n",
       "      <td>-0.695245</td>\n",
       "      <td>-0.540642</td>\n",
       "      <td>-0.633881</td>\n",
       "      <td>-0.920763</td>\n",
       "      <td>-1.041549</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>-1.141852</td>\n",
       "      <td>0.504422</td>\n",
       "      <td>-2.679076</td>\n",
       "      <td>0.670643</td>\n",
       "      <td>0.316566</td>\n",
       "      <td>1.549303</td>\n",
       "      <td>5.484909</td>\n",
       "      <td>-0.020496</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>0.342981</td>\n",
       "      <td>-0.185948</td>\n",
       "      <td>0.133453</td>\n",
       "      <td>-0.012301</td>\n",
       "      <td>-0.181541</td>\n",
       "      <td>-0.997745</td>\n",
       "      <td>-0.818079</td>\n",
       "      <td>-0.275760</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>-0.250952</td>\n",
       "      <td>-1.435189</td>\n",
       "      <td>-1.851862</td>\n",
       "      <td>0.329171</td>\n",
       "      <td>-0.610145</td>\n",
       "      <td>-0.211799</td>\n",
       "      <td>-0.676133</td>\n",
       "      <td>-0.616111</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>1.827813</td>\n",
       "      <td>-0.218823</td>\n",
       "      <td>-0.031990</td>\n",
       "      <td>-0.012301</td>\n",
       "      <td>-0.181541</td>\n",
       "      <td>0.414047</td>\n",
       "      <td>-1.020427</td>\n",
       "      <td>-0.360847</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>-0.547919</td>\n",
       "      <td>2.476909</td>\n",
       "      <td>-0.197433</td>\n",
       "      <td>1.808882</td>\n",
       "      <td>4.660524</td>\n",
       "      <td>-0.284572</td>\n",
       "      <td>-0.947944</td>\n",
       "      <td>1.681259</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>1.233880</td>\n",
       "      <td>0.109925</td>\n",
       "      <td>1.953325</td>\n",
       "      <td>-0.012301</td>\n",
       "      <td>-0.181541</td>\n",
       "      <td>-0.022590</td>\n",
       "      <td>-0.724455</td>\n",
       "      <td>1.766346</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   pregnants  Plasma_glucose_concentration  blood_pressure  \\\n",
       "0   0.639947                      0.866045       -0.031990   \n",
       "1  -0.844885                     -1.205066       -0.528319   \n",
       "2   1.233880                      2.016662       -0.693761   \n",
       "3  -0.844885                     -1.073567       -0.528319   \n",
       "4  -1.141852                      0.504422       -2.679076   \n",
       "5   0.342981                     -0.185948        0.133453   \n",
       "6  -0.250952                     -1.435189       -1.851862   \n",
       "7   1.827813                     -0.218823       -0.031990   \n",
       "8  -0.547919                      2.476909       -0.197433   \n",
       "9   1.233880                      0.109925        1.953325   \n",
       "\n",
       "   Triceps_skin_fold_thickness  serum_insulin       BMI  \\\n",
       "0                     0.670643      -0.181541  0.166619   \n",
       "1                    -0.012301      -0.181541 -0.852200   \n",
       "2                    -0.012301      -0.181541 -1.332500   \n",
       "3                    -0.695245      -0.540642 -0.633881   \n",
       "4                     0.670643       0.316566  1.549303   \n",
       "5                    -0.012301      -0.181541 -0.997745   \n",
       "6                     0.329171      -0.610145 -0.211799   \n",
       "7                    -0.012301      -0.181541  0.414047   \n",
       "8                     1.808882       4.660524 -0.284572   \n",
       "9                    -0.012301      -0.181541 -0.022590   \n",
       "\n",
       "   Diabetes_pedigree_function       Age  Target  \n",
       "0                    0.468492  1.425995       1  \n",
       "1                   -0.365061 -0.190672       0  \n",
       "2                    0.604397 -0.105584       1  \n",
       "3                   -0.920763 -1.041549       0  \n",
       "4                    5.484909 -0.020496       1  \n",
       "5                   -0.818079 -0.275760       0  \n",
       "6                   -0.676133 -0.616111       1  \n",
       "7                   -1.020427 -0.360847       0  \n",
       "8                   -0.947944  1.681259       1  \n",
       "9                   -0.724455  1.766346       1  "
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dpath='./FE_pima-indians-diabetes.csv'\n",
    "train=pd.read_csv(dpath, encoding='UTF-8')\n",
    "train.head(10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>pregnants</th>\n",
       "      <th>Plasma_glucose_concentration</th>\n",
       "      <th>blood_pressure</th>\n",
       "      <th>Triceps_skin_fold_thickness</th>\n",
       "      <th>serum_insulin</th>\n",
       "      <th>BMI</th>\n",
       "      <th>Diabetes_pedigree_function</th>\n",
       "      <th>Age</th>\n",
       "      <th>Target</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>count</th>\n",
       "      <td>7.680000e+02</td>\n",
       "      <td>7.680000e+02</td>\n",
       "      <td>7.680000e+02</td>\n",
       "      <td>7.680000e+02</td>\n",
       "      <td>7.680000e+02</td>\n",
       "      <td>7.680000e+02</td>\n",
       "      <td>7.680000e+02</td>\n",
       "      <td>7.680000e+02</td>\n",
       "      <td>768.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>mean</th>\n",
       "      <td>2.544261e-17</td>\n",
       "      <td>1.474515e-17</td>\n",
       "      <td>-3.321273e-17</td>\n",
       "      <td>-1.888680e-16</td>\n",
       "      <td>2.941802e-17</td>\n",
       "      <td>2.815312e-16</td>\n",
       "      <td>2.422108e-16</td>\n",
       "      <td>1.528002e-16</td>\n",
       "      <td>0.348958</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>std</th>\n",
       "      <td>1.000652e+00</td>\n",
       "      <td>1.000652e+00</td>\n",
       "      <td>1.000652e+00</td>\n",
       "      <td>1.000652e+00</td>\n",
       "      <td>1.000652e+00</td>\n",
       "      <td>1.000652e+00</td>\n",
       "      <td>1.000652e+00</td>\n",
       "      <td>1.000652e+00</td>\n",
       "      <td>0.476951</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>min</th>\n",
       "      <td>-1.141852e+00</td>\n",
       "      <td>-2.552931e+00</td>\n",
       "      <td>-4.002619e+00</td>\n",
       "      <td>-2.516429e+00</td>\n",
       "      <td>-1.467353e+00</td>\n",
       "      <td>-2.074783e+00</td>\n",
       "      <td>-1.189553e+00</td>\n",
       "      <td>-1.041549e+00</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25%</th>\n",
       "      <td>-8.448851e-01</td>\n",
       "      <td>-7.201630e-01</td>\n",
       "      <td>-6.937615e-01</td>\n",
       "      <td>-4.675972e-01</td>\n",
       "      <td>-2.220849e-01</td>\n",
       "      <td>-7.212087e-01</td>\n",
       "      <td>-6.889685e-01</td>\n",
       "      <td>-7.862862e-01</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>50%</th>\n",
       "      <td>-2.509521e-01</td>\n",
       "      <td>-1.530732e-01</td>\n",
       "      <td>-3.198993e-02</td>\n",
       "      <td>-1.230129e-02</td>\n",
       "      <td>-1.815412e-01</td>\n",
       "      <td>-2.258989e-02</td>\n",
       "      <td>-3.001282e-01</td>\n",
       "      <td>-3.608474e-01</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>75%</th>\n",
       "      <td>6.399473e-01</td>\n",
       "      <td>6.112653e-01</td>\n",
       "      <td>6.297816e-01</td>\n",
       "      <td>3.291706e-01</td>\n",
       "      <td>-1.554775e-01</td>\n",
       "      <td>6.032562e-01</td>\n",
       "      <td>4.662269e-01</td>\n",
       "      <td>6.602056e-01</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>max</th>\n",
       "      <td>3.906578e+00</td>\n",
       "      <td>2.542658e+00</td>\n",
       "      <td>4.104082e+00</td>\n",
       "      <td>7.955377e+00</td>\n",
       "      <td>8.170442e+00</td>\n",
       "      <td>5.042397e+00</td>\n",
       "      <td>5.883565e+00</td>\n",
       "      <td>4.063716e+00</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "          pregnants  Plasma_glucose_concentration  blood_pressure  \\\n",
       "count  7.680000e+02                  7.680000e+02    7.680000e+02   \n",
       "mean   2.544261e-17                  1.474515e-17   -3.321273e-17   \n",
       "std    1.000652e+00                  1.000652e+00    1.000652e+00   \n",
       "min   -1.141852e+00                 -2.552931e+00   -4.002619e+00   \n",
       "25%   -8.448851e-01                 -7.201630e-01   -6.937615e-01   \n",
       "50%   -2.509521e-01                 -1.530732e-01   -3.198993e-02   \n",
       "75%    6.399473e-01                  6.112653e-01    6.297816e-01   \n",
       "max    3.906578e+00                  2.542658e+00    4.104082e+00   \n",
       "\n",
       "       Triceps_skin_fold_thickness  serum_insulin           BMI  \\\n",
       "count                 7.680000e+02   7.680000e+02  7.680000e+02   \n",
       "mean                 -1.888680e-16   2.941802e-17  2.815312e-16   \n",
       "std                   1.000652e+00   1.000652e+00  1.000652e+00   \n",
       "min                  -2.516429e+00  -1.467353e+00 -2.074783e+00   \n",
       "25%                  -4.675972e-01  -2.220849e-01 -7.212087e-01   \n",
       "50%                  -1.230129e-02  -1.815412e-01 -2.258989e-02   \n",
       "75%                   3.291706e-01  -1.554775e-01  6.032562e-01   \n",
       "max                   7.955377e+00   8.170442e+00  5.042397e+00   \n",
       "\n",
       "       Diabetes_pedigree_function           Age      Target  \n",
       "count                7.680000e+02  7.680000e+02  768.000000  \n",
       "mean                 2.422108e-16  1.528002e-16    0.348958  \n",
       "std                  1.000652e+00  1.000652e+00    0.476951  \n",
       "min                 -1.189553e+00 -1.041549e+00    0.000000  \n",
       "25%                 -6.889685e-01 -7.862862e-01    0.000000  \n",
       "50%                 -3.001282e-01 -3.608474e-01    0.000000  \n",
       "75%                  4.662269e-01  6.602056e-01    1.000000  \n",
       "max                  5.883565e+00  4.063716e+00    1.000000  "
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train.describe()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<class 'pandas.core.frame.DataFrame'>\n",
      "RangeIndex: 768 entries, 0 to 767\n",
      "Data columns (total 9 columns):\n",
      "pregnants                       768 non-null float64\n",
      "Plasma_glucose_concentration    768 non-null float64\n",
      "blood_pressure                  768 non-null float64\n",
      "Triceps_skin_fold_thickness     768 non-null float64\n",
      "serum_insulin                   768 non-null float64\n",
      "BMI                             768 non-null float64\n",
      "Diabetes_pedigree_function      768 non-null float64\n",
      "Age                             768 non-null float64\n",
      "Target                          768 non-null int64\n",
      "dtypes: float64(8), int64(1)\n",
      "memory usage: 54.1 KB\n"
     ]
    }
   ],
   "source": [
    "train.info()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "#数据准备\n",
    "y_train=train['Target']\n",
    "X_train=train.drop('Target', axis=1)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "1、使用默认参数的LogisticRegression"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "logloss of each fold is: [0.48797856 0.53011593 0.4562292  0.422546   0.48392885]\n",
      "cv logloss is: 0.47615970944434044\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n",
      "  FutureWarning)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n",
      "  FutureWarning)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n",
      "  FutureWarning)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n",
      "  FutureWarning)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n",
      "  FutureWarning)\n"
     ]
    }
   ],
   "source": [
    "#初始化实例\n",
    "lr=LogisticRegression()\n",
    "\n",
    "#使用交叉验证用于评估模型性能和参数调优\n",
    "#分类任务中交叉验证缺省是采用StratifiedKFold\n",
    "#数据集比较大，这里采用5折交叉验证、评价标准是logloss，因此设置的参数是负logloss，即neg_log_loss（log似然损失）\n",
    "loss=cross_val_score(lr, X_train, y_train, cv=5, scoring='neg_log_loss')\n",
    "\n",
    "print(\"logloss of each fold is:\", -loss)#5折的logloss值\n",
    "print(\"cv logloss is:\", -loss.mean())#中值"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "2、正则化的LogisticRegression及参数调优\n",
    "LogisticRegression+GridSearchCV\n",
    "logistic回归的超参数调优有：c(正则系数，一般再log域(取log后的值)均匀设置候选参数)和正则函数penalty(L2/L1)\n",
    "目标函数为J=C*sum(logloss(f(xi), yi))+penalty\n",
    "\n",
    "在sklean框架下，不同学习器的参数调整步骤相同：\n",
    "a、设置参数搜索范围\n",
    "b、生成学习器实例(参数设置)\n",
    "c、生成GridSearchCV实例(参数设置)\n",
    "d、调用GridSearchCV的fit方法"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "5折交叉验证、负logloss损失对Logistic回归模型的正则超参数调优"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Fitting 5 folds for each of 14 candidates, totalling 70 fits\n",
      "[CV] C=0.001, penalty=l1 .............................................\n",
      "[CV] ... C=0.001, penalty=l1, score=-0.6931471805599453, total=   0.0s\n",
      "[CV] C=0.001, penalty=l1 .............................................\n",
      "[CV] ... C=0.001, penalty=l1, score=-0.6931471805599453, total=   0.0s\n",
      "[CV] C=0.001, penalty=l1 .............................................\n",
      "[CV] ... C=0.001, penalty=l1, score=-0.6931471805599453, total=   0.0s\n",
      "[CV] C=0.001, penalty=l1 .............................................\n",
      "[CV] ... C=0.001, penalty=l1, score=-0.6931471805599453, total=   0.0s\n",
      "[CV] C=0.001, penalty=l1 .............................................\n",
      "[CV] ... C=0.001, penalty=l1, score=-0.6931471805599453, total=   0.0s\n",
      "[CV] C=0.001, penalty=l2 .............................................\n",
      "[CV] ... C=0.001, penalty=l2, score=-0.6252332751246228, total=   0.0s\n",
      "[CV] C=0.001, penalty=l2 .............................................\n",
      "[CV] ... C=0.001, penalty=l2, score=-0.6338686001564596, total=   0.0s\n",
      "[CV] C=0.001, penalty=l2 .............................................\n",
      "[CV] .... C=0.001, penalty=l2, score=-0.625661213962723, total=   0.0s\n",
      "[CV] C=0.001, penalty=l2 .............................................\n",
      "[CV] ... C=0.001, penalty=l2, score=-0.6208734221572547, total=   0.0s\n",
      "[CV] C=0.001, penalty=l2 .............................................\n",
      "[CV] ... C=0.001, penalty=l2, score=-0.6326871610823708, total=   0.0s\n",
      "[CV] C=0.01, penalty=l1 ..............................................\n",
      "[CV] .... C=0.01, penalty=l1, score=-0.6359066018003606, total=   0.0s\n",
      "[CV] C=0.01, penalty=l1 ..............................................\n",
      "[CV] .... C=0.01, penalty=l1, score=-0.6446073600721886, total=   0.0s\n",
      "[CV] C=0.01, penalty=l1 ..............................................\n",
      "[CV] .... C=0.01, penalty=l1, score=-0.6332810905191685, total=   0.0s\n",
      "[CV] C=0.01, penalty=l1 ..............................................\n",
      "[CV] .... C=0.01, penalty=l1, score=-0.6302418522346175, total=   0.0s\n",
      "[CV] C=0.01, penalty=l1 ..............................................\n",
      "[CV] .... C=0.01, penalty=l1, score=-0.6358010482921886, total=   0.0s\n",
      "[CV] C=0.01, penalty=l2 ..............................................\n",
      "[CV] .... C=0.01, penalty=l2, score=-0.5120767708654068, total=   0.0s\n",
      "[CV] C=0.01, penalty=l2 ..............................................\n",
      "[CV] ..... C=0.01, penalty=l2, score=-0.543227572433324, total=   0.0s\n",
      "[CV] C=0.01, penalty=l2 ..............................................\n",
      "[CV] .... C=0.01, penalty=l2, score=-0.5067380443593053, total=   0.0s\n",
      "[CV] C=0.01, penalty=l2 ..............................................\n",
      "[CV] .... C=0.01, penalty=l2, score=-0.4873125612462518, total=   0.0s\n",
      "[CV] C=0.01, penalty=l2 ..............................................\n",
      "[CV] .... C=0.01, penalty=l2, score=-0.5253888382730234, total=   0.0s"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n",
      "[Parallel(n_jobs=1)]: Done   1 out of   1 | elapsed:    0.0s remaining:    0.0s\n",
      "[Parallel(n_jobs=1)]: Done   2 out of   2 | elapsed:    0.0s remaining:    0.0s\n",
      "[Parallel(n_jobs=1)]: Done   3 out of   3 | elapsed:    0.0s remaining:    0.0s\n",
      "[Parallel(n_jobs=1)]: Done   4 out of   4 | elapsed:    0.0s remaining:    0.0s\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "[CV] C=0.1, penalty=l1 ...............................................\n",
      "[CV] ..... C=0.1, penalty=l1, score=-0.4892224716813977, total=   0.0s\n",
      "[CV] C=0.1, penalty=l1 ...............................................\n",
      "[CV] ..... C=0.1, penalty=l1, score=-0.5248877105972539, total=   0.0s\n",
      "[CV] C=0.1, penalty=l1 ...............................................\n",
      "[CV] .... C=0.1, penalty=l1, score=-0.45825081977044424, total=   0.0s\n",
      "[CV] C=0.1, penalty=l1 ...............................................\n",
      "[CV] ..... C=0.1, penalty=l1, score=-0.4333619289834262, total=   0.0s\n",
      "[CV] C=0.1, penalty=l1 ...............................................\n",
      "[CV] ..... C=0.1, penalty=l1, score=-0.4868677784871582, total=   0.0s\n",
      "[CV] C=0.1, penalty=l2 ...............................................\n",
      "[CV] .... C=0.1, penalty=l2, score=-0.48350007746296986, total=   0.0s\n",
      "[CV] C=0.1, penalty=l2 ...............................................\n",
      "[CV] ...... C=0.1, penalty=l2, score=-0.525149433695313, total=   0.0s\n",
      "[CV] C=0.1, penalty=l2 ...............................................\n",
      "[CV] ..... C=0.1, penalty=l2, score=-0.4604874856554894, total=   0.0s\n",
      "[CV] C=0.1, penalty=l2 ...............................................\n",
      "[CV] .... C=0.1, penalty=l2, score=-0.42929826862168724, total=   0.0s\n",
      "[CV] C=0.1, penalty=l2 ...............................................\n",
      "[CV] ...... C=0.1, penalty=l2, score=-0.485373395496449, total=   0.0s\n",
      "[CV] C=1, penalty=l1 .................................................\n",
      "[CV] ....... C=1, penalty=l1, score=-0.4881580182662004, total=   0.0s\n",
      "[CV] C=1, penalty=l1 .................................................\n",
      "[CV] ......... C=1, penalty=l1, score=-0.52916837903425, total=   0.0s\n",
      "[CV] C=1, penalty=l1 .................................................\n",
      "[CV] ...... C=1, penalty=l1, score=-0.45561277478483986, total=   0.0s\n",
      "[CV] C=1, penalty=l1 .................................................\n",
      "[CV] ...... C=1, penalty=l1, score=-0.42237817025550967, total=   0.0s\n",
      "[CV] C=1, penalty=l1 .................................................\n",
      "[CV] ....... C=1, penalty=l1, score=-0.4845206026139884, total=   0.0s\n",
      "[CV] C=1, penalty=l2 .................................................\n",
      "[CV] ....... C=1, penalty=l2, score=-0.4879785610109463, total=   0.0s\n",
      "[CV] C=1, penalty=l2 .................................................\n",
      "[CV] ....... C=1, penalty=l2, score=-0.5301159331731353, total=   0.0s\n",
      "[CV] C=1, penalty=l2 .................................................\n",
      "[CV] ....... C=1, penalty=l2, score=-0.4562291976119336, total=   0.0s\n",
      "[CV] C=1, penalty=l2 .................................................\n",
      "[CV] ...... C=1, penalty=l2, score=-0.42254600245513574, total=   0.0s\n",
      "[CV] C=1, penalty=l2 .................................................\n",
      "[CV] ...... C=1, penalty=l2, score=-0.48392885297055127, total=   0.0s\n",
      "[CV] C=10, penalty=l1 ................................................\n",
      "[CV] ...... C=10, penalty=l1, score=-0.4890403645847581, total=   0.0s\n",
      "[CV] C=10, penalty=l1 ................................................\n",
      "[CV] ....... C=10, penalty=l1, score=-0.531037194761332, total=   0.0s\n",
      "[CV] C=10, penalty=l1 ................................................\n",
      "[CV] ...... C=10, penalty=l1, score=-0.4558874917831669, total=   0.0s\n",
      "[CV] C=10, penalty=l1 ................................................\n",
      "[CV] ...... C=10, penalty=l1, score=-0.4219554110270119, total=   0.0s\n",
      "[CV] C=10, penalty=l1 ................................................\n",
      "[CV] ..... C=10, penalty=l1, score=-0.48411054651070956, total=   0.0s\n",
      "[CV] C=10, penalty=l2 ................................................\n",
      "[CV] ...... C=10, penalty=l2, score=-0.4890294879696647, total=   0.0s\n",
      "[CV] C=10, penalty=l2 ................................................\n",
      "[CV] ...... C=10, penalty=l2, score=-0.5311363222164094, total=   0.0s\n",
      "[CV] C=10, penalty=l2 ................................................\n",
      "[CV] ..... C=10, penalty=l2, score=-0.45595951416730635, total=   0.0s\n",
      "[CV] C=10, penalty=l2 ................................................\n",
      "[CV] ...... C=10, penalty=l2, score=-0.4219795332800078, total=   0.0s\n",
      "[CV] C=10, penalty=l2 ................................................\n",
      "[CV] ..... C=10, penalty=l2, score=-0.48408647346409145, total=   0.0s\n",
      "[CV] C=100, penalty=l1 ...............................................\n",
      "[CV] ..... C=100, penalty=l1, score=-0.4891446421140608, total=   0.0s\n",
      "[CV] C=100, penalty=l1 ...............................................\n",
      "[CV] ..... C=100, penalty=l1, score=-0.5312329141256065, total=   0.0s\n",
      "[CV] C=100, penalty=l1 ...............................................\n",
      "[CV] ..... C=100, penalty=l1, score=-0.4559277419448099, total=   0.0s\n",
      "[CV] C=100, penalty=l1 ...............................................\n",
      "[CV] ..... C=100, penalty=l1, score=-0.4219214776140511, total=   0.0s\n",
      "[CV] C=100, penalty=l1 ...............................................\n",
      "[CV] .... C=100, penalty=l1, score=-0.48410916419748795, total=   0.0s\n",
      "[CV] C=100, penalty=l2 ...............................................\n",
      "[CV] .... C=100, penalty=l2, score=-0.48914440233874235, total=   0.0s\n",
      "[CV] C=100, penalty=l2 ...............................................\n",
      "[CV] ...... C=100, penalty=l2, score=-0.531246919864776, total=   0.0s\n",
      "[CV] C=100, penalty=l2 ...............................................\n",
      "[CV] .... C=100, penalty=l2, score=-0.45593525922772243, total=   0.0s\n",
      "[CV] C=100, penalty=l2 ...............................................\n",
      "[CV] .... C=100, penalty=l2, score=-0.42192490874569916, total=   0.0s\n",
      "[CV] C=100, penalty=l2 ...............................................\n",
      "[CV] .... C=100, penalty=l2, score=-0.48410727360689465, total=   0.0s\n",
      "[CV] C=1000, penalty=l1 ..............................................\n",
      "[CV] .... C=1000, penalty=l1, score=-0.4891548729121637, total=   0.0s\n",
      "[CV] C=1000, penalty=l1 ..............................................\n",
      "[CV] .... C=1000, penalty=l1, score=-0.5312515938290424, total=   0.0s\n",
      "[CV] C=1000, penalty=l1 ..............................................\n",
      "[CV] ... C=1000, penalty=l1, score=-0.45593487097943697, total=   0.0s\n",
      "[CV] C=1000, penalty=l1 ..............................................\n",
      "[CV] .... C=1000, penalty=l1, score=-0.4219203572713143, total=   0.0s\n",
      "[CV] C=1000, penalty=l1 ..............................................\n",
      "[CV] ... C=1000, penalty=l1, score=-0.48411115676012545, total=   0.0s\n",
      "[CV] C=1000, penalty=l2 ..............................................\n",
      "[CV] .... C=1000, penalty=l2, score=-0.4891559997762149, total=   0.0s\n",
      "[CV] C=1000, penalty=l2 ..............................................\n",
      "[CV] .... C=1000, penalty=l2, score=-0.5312580709718574, total=   0.0s\n",
      "[CV] C=1000, penalty=l2 ..............................................\n",
      "[CV] .... C=1000, penalty=l2, score=-0.4559328627319946, total=   0.0s\n",
      "[CV] C=1000, penalty=l2 ..............................................\n",
      "[CV] ... C=1000, penalty=l2, score=-0.42191946802406416, total=   0.0s\n",
      "[CV] C=1000, penalty=l2 ..............................................\n",
      "[CV] ..... C=1000, penalty=l2, score=-0.484109407281101, total=   0.0s\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[Parallel(n_jobs=1)]: Done  70 out of  70 | elapsed:    0.5s finished\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "GridSearchCV(cv=5, error_score='raise-deprecating',\n",
       "       estimator=LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,\n",
       "          intercept_scaling=1, max_iter=100, multi_class='warn',\n",
       "          n_jobs=None, penalty='l2', random_state=None, solver='liblinear',\n",
       "          tol=0.0001, verbose=0, warm_start=False),\n",
       "       fit_params=None, iid='warn', n_jobs=None,\n",
       "       param_grid={'penalty': ['l1', 'l2'], 'C': [0.001, 0.01, 0.1, 1, 10, 100, 1000]},\n",
       "       pre_dispatch='2*n_jobs', refit=True, return_train_score='warn',\n",
       "       scoring='neg_log_loss', verbose=5)"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#设置参数搜索范围\n",
    "#正则函数为L1和L2\n",
    "penaltys=['l1', 'l2']\n",
    "Cs=[0.001, 0.01, 0.1, 1, 10, 100, 1000]\n",
    "#调优参数集合\n",
    "tuned_parameters=dict(penalty=penaltys, C=Cs)\n",
    "\n",
    "lr_penalty=LogisticRegression(solver='liblinear')\n",
    "#评价指标scoring：负logloss\n",
    "grid=GridSearchCV(lr_penalty, tuned_parameters, cv=5, scoring='neg_log_loss', verbose=5)\n",
    "\n",
    "grid.fit(X_train, y_train)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "最佳分数： 0.47602623014344775\n",
      "最佳参数： {'C': 1, 'penalty': 'l1'}\n"
     ]
    }
   ],
   "source": [
    "#打印结果\n",
    "print(\"最佳分数：\", -grid.best_score_)\n",
    "print(\"最佳参数：\", grid.best_params_)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[ 0.40967259,  1.12852594, -0.09298017,  0.02475017, -0.0840696 ,\n",
       "         0.62914839,  0.27933685,  0.14381241]])"
      ]
     },
     "execution_count": 13,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#正则稀疏系数\n",
    "grid.best_estimator_.coef_"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'mean_fit_time': array([0.00320029, 0.00400052, 0.00250039, 0.00270028, 0.00470066,\n",
       "        0.00300059, 0.00270038, 0.00290031, 0.00410061, 0.0030004 ,\n",
       "        0.0031004 , 0.00290036, 0.00320034, 0.0031003 ]),\n",
       " 'std_fit_time': array([6.78415850e-04, 1.54941964e-03, 1.16800773e-07, 2.45028582e-04,\n",
       "        1.16630213e-03, 1.16800773e-07, 2.44950714e-04, 2.00009367e-04,\n",
       "        1.01999184e-03, 3.16129500e-04, 2.00033216e-04, 3.74164125e-04,\n",
       "        4.00042573e-04, 4.89950043e-04]),\n",
       " 'mean_score_time': array([0.00220037, 0.0025003 , 0.00200024, 0.00190024, 0.0024003 ,\n",
       "        0.00210018, 0.00210028, 0.00200024, 0.00230026, 0.00190029,\n",
       "        0.00190024, 0.00200028, 0.00220037, 0.00230045]),\n",
       " 'std_score_time': array([2.44989633e-04, 4.47181182e-04, 1.16800773e-07, 2.00057051e-04,\n",
       "        5.83134718e-04, 2.00057051e-04, 4.90018206e-04, 1.16800773e-07,\n",
       "        2.44989633e-04, 1.99961740e-04, 2.00057108e-04, 1.78416128e-07,\n",
       "        2.44989633e-04, 2.44950714e-04]),\n",
       " 'param_C': masked_array(data=[0.001, 0.001, 0.01, 0.01, 0.1, 0.1, 1, 1, 10, 10, 100,\n",
       "                    100, 1000, 1000],\n",
       "              mask=[False, False, False, False, False, False, False, False,\n",
       "                    False, False, False, False, False, False],\n",
       "        fill_value='?',\n",
       "             dtype=object),\n",
       " 'param_penalty': masked_array(data=['l1', 'l2', 'l1', 'l2', 'l1', 'l2', 'l1', 'l2', 'l1',\n",
       "                    'l2', 'l1', 'l2', 'l1', 'l2'],\n",
       "              mask=[False, False, False, False, False, False, False, False,\n",
       "                    False, False, False, False, False, False],\n",
       "        fill_value='?',\n",
       "             dtype=object),\n",
       " 'params': [{'C': 0.001, 'penalty': 'l1'},\n",
       "  {'C': 0.001, 'penalty': 'l2'},\n",
       "  {'C': 0.01, 'penalty': 'l1'},\n",
       "  {'C': 0.01, 'penalty': 'l2'},\n",
       "  {'C': 0.1, 'penalty': 'l1'},\n",
       "  {'C': 0.1, 'penalty': 'l2'},\n",
       "  {'C': 1, 'penalty': 'l1'},\n",
       "  {'C': 1, 'penalty': 'l2'},\n",
       "  {'C': 10, 'penalty': 'l1'},\n",
       "  {'C': 10, 'penalty': 'l2'},\n",
       "  {'C': 100, 'penalty': 'l1'},\n",
       "  {'C': 100, 'penalty': 'l2'},\n",
       "  {'C': 1000, 'penalty': 'l1'},\n",
       "  {'C': 1000, 'penalty': 'l2'}],\n",
       " 'split0_test_score': array([-0.69314718, -0.62523328, -0.6359066 , -0.51207677, -0.48922247,\n",
       "        -0.48350008, -0.48815802, -0.48797856, -0.48904036, -0.48902949,\n",
       "        -0.48914464, -0.4891444 , -0.48915487, -0.489156  ]),\n",
       " 'split1_test_score': array([-0.69314718, -0.6338686 , -0.64460736, -0.54322757, -0.52488771,\n",
       "        -0.52514943, -0.52916838, -0.53011593, -0.53103719, -0.53113632,\n",
       "        -0.53123291, -0.53124692, -0.53125159, -0.53125807]),\n",
       " 'split2_test_score': array([-0.69314718, -0.62566121, -0.63328109, -0.50673804, -0.45825082,\n",
       "        -0.46048749, -0.45561277, -0.4562292 , -0.45588749, -0.45595951,\n",
       "        -0.45592774, -0.45593526, -0.45593487, -0.45593286]),\n",
       " 'split3_test_score': array([-0.69314718, -0.62087342, -0.63024185, -0.48731256, -0.43336193,\n",
       "        -0.42929827, -0.42237817, -0.422546  , -0.42195541, -0.42197953,\n",
       "        -0.42192148, -0.42192491, -0.42192036, -0.42191947]),\n",
       " 'split4_test_score': array([-0.69314718, -0.63268716, -0.63580105, -0.52538884, -0.48686778,\n",
       "        -0.4853734 , -0.4845206 , -0.48392885, -0.48411055, -0.48408647,\n",
       "        -0.48410916, -0.48410727, -0.48411116, -0.48410941]),\n",
       " 'mean_test_score': array([-0.69314718, -0.62766704, -0.63597526, -0.51497115, -0.47856607,\n",
       "        -0.47681232, -0.47602623, -0.4762194 , -0.47646707, -0.47649922,\n",
       "        -0.47652826, -0.47653284, -0.47653566, -0.47653626]),\n",
       " 'std_test_score': array([1.11022302e-16, 4.89097299e-03, 4.79089060e-03, 1.86921266e-02,\n",
       "        3.09289584e-02, 3.15479636e-02, 3.55964731e-02, 3.57224624e-02,\n",
       "        3.62956824e-02, 3.63084245e-02, 3.63674711e-02, 3.63697206e-02,\n",
       "        3.63734254e-02, 3.63758791e-02]),\n",
       " 'rank_test_score': array([14, 12, 13, 11, 10,  9,  1,  2,  3,  4,  5,  6,  7,  8]),\n",
       " 'split0_train_score': array([-0.69314718, -0.62745539, -0.63279271, -0.50890565, -0.46651483,\n",
       "        -0.46237618, -0.45927687, -0.45918946, -0.4591377 , -0.45913682,\n",
       "        -0.45913626, -0.45913625, -0.45913624, -0.45913624]),\n",
       " 'split1_train_score': array([-0.69314718, -0.6232271 , -0.62102867, -0.50035915, -0.4584199 ,\n",
       "        -0.45417418, -0.45114254, -0.45108423, -0.45103429, -0.4510337 ,\n",
       "        -0.45103317, -0.45103316, -0.45103315, -0.45103315]),\n",
       " 'split2_train_score': array([-0.69314718, -0.62777054, -0.64122227, -0.5128013 , -0.47459882,\n",
       "        -0.46996298, -0.46735813, -0.46727381, -0.46723246, -0.46723158,\n",
       "        -0.46723114, -0.46723113, -0.46723113, -0.46723113]),\n",
       " 'split3_train_score': array([-0.69314718, -0.6300762 , -0.64391789, -0.5193933 , -0.48292689,\n",
       "        -0.47815891, -0.47566808, -0.47558312, -0.47554385, -0.47554298,\n",
       "        -0.47554256, -0.47554255, -0.47554255, -0.47554255]),\n",
       " 'split4_train_score': array([-0.69314718, -0.62408812, -0.63418802, -0.50711554, -0.4669441 ,\n",
       "        -0.46359351, -0.46084077, -0.4607505 , -0.46070617, -0.46070497,\n",
       "        -0.4607045 , -0.46070449, -0.46070448, -0.46070448]),\n",
       " 'mean_train_score': array([-0.69314718, -0.62652347, -0.63462991, -0.50971499, -0.46988091,\n",
       "        -0.46565315, -0.46285728, -0.46277622, -0.46273089, -0.46273001,\n",
       "        -0.46272953, -0.46272951, -0.46272951, -0.46272951]),\n",
       " 'std_train_score': array([0.        , 0.00252359, 0.0079786 , 0.00629506, 0.00829176,\n",
       "        0.00802142, 0.00822794, 0.0082209 , 0.00822505, 0.00822498,\n",
       "        0.00822503, 0.00822503, 0.00822503, 0.00822503])}"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#训练结果\n",
    "grid.cv_results_"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "数据对比：\n",
    "默认参数的LogisticRegression的中值是： 0.47615970944434044，5折的logloss值是[0.48797856 0.53011593 0.4562292  0.422546   0.48392885]\n",
    "GridSearchCV的最佳分数是： 0.47602552188807734，最佳参数是{'C': 1, 'penalty': 'l1'}\n",
    "可以看出两个调优的参数结果相差不是很大，相差0.0001"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEKCAYAAADjDHn2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJzt3Xd8VFX+//HXZya9UBMw9CIoNZQICIoNEREDoQmKFOsqCujK2vZnAb8rWFjcFVcFRRQRsVAEBUFFBKUEpBdBpIQWWhJIT+b8/pgJhBCYtMnNJJ+nj3lk7p1zZ94XcD655ZwjxhiUUkqpy7FZHUAppVTZp8VCKaWUW1oslFJKuaXFQimllFtaLJRSSrmlxUIppZRbWiyUUkq5pcVCKaWUW1oslFJKueVjdYCSEhYWZho0aGB1DKWU8irr168/YYwJd9fOo8VCRHoAbwF2YJoxZkKe1/8N3ORaDAJqGGOquF4bBvzT9dorxpgZl/usBg0aEBsbW5LxlVKq3BOR/QVp57FiISJ2YApwKxAHrBORBcaY7TltjDFP5Gr/ONDW9bwa8CIQBRhgvWvb057Kq5RS6tI8ec2iA7DHGLPXGJMBzAZ6X6b9YOAz1/PbgKXGmFOuArEU6OHBrEoppS7Dk8WiNnAw13Kca91FRKQ+0BD4sbDbKqWU8jxPXrOQfNZdajz0QcCXxpjswmwrIg8BDwHUq1evKBmVUuVAZmYmcXFxpKWlWR2lzAoICKBOnTr4+voWaXtPFos4oG6u5TrA4Uu0HQSMzLPtjXm2XZ53I2PM+8D7AFFRUToxh1IVVFxcHKGhoTRo0ACR/H7XrNiMMZw8eZK4uDgaNmxYpPfw5GmodUATEWkoIn44C8KCvI1E5CqgKvBbrtVLgO4iUlVEqgLdXeuUUuoiaWlpVK9eXQvFJYgI1atXL9aRl8eOLIwxWSLyGM4veTvwoTFmm4iMA2KNMTmFYzAw2+Sass8Yc0pExuMsOADjjDGnPJVVKeX9tFBcXnH/fDzaz8IY8y3wbZ51L+RZfukS234IfOixcC7ZWVmsmzaKej1GU6vBVZ7+OKVUGXHXe86TGZ8/fK3FSbxDhR/u4/Bf22h+dB6+H93G7s2rrY6jlPJSISEh55736NGDKlWq0KtXr3zbjhw5kjZt2tC8eXMCAwNp06YNbdq04csvvyzUZ27YsIHFixcXK3dBVfhiUbdJJKcGzscg1Pwqht9/+cbqSEopLzd27Fg++eSTS74+ZcoUNm7cyLfffkvjxo3ZuHEjGzdupH///oX6HC0WpaxB82uQB5aSYK9O82UjWLXgA6sjKaW82C233EJoaGiRtt29eze33XYb7du3p2vXrvzxxx8AzJ49m5YtWxIZGclNN91Eamoq48aN49NPPy3SUUlhlZuBBIsrvM6VnB31Ewfeieba9X9nWcIxbrn3Wb1oppSXefmbbWw/nOS23fYjzjY51y4up3mtSrx4Z4tiZyuIhx56iGnTptG4cWNWrVrFY489xvfff8/LL7/M8uXLqVmzJgkJCQQGBvLCCy+wdetWJk+e7PFcWixyCakSToMnl7JjygC67Z3I0ilHuPFvk/H1sVsdTSlVASQkJLB69Wr69et3bl1WVhYAXbp0YejQoQwYMIC+ffuWejYtFnn4BoTQfMwCtrx3P7fGf8xP/z5O1MjphAYFWh1NKVUABT0CKIt3QxljCAsLY+PGjRe9NnXqVNasWcPChQuJjIxk8+bNpZpNr1nkQ+y+tHpkBtuufIibkr9j6797c+ykdvNQSnlW1apViYiIYO7cuQA4HA42bdoEwN69e+nUqRPjx4+natWqHDp0iNDQUM6cOVMq2bRYXIoILYa8zh9RL9ExYy3H3u7Bnv0HrE6llPIC119/PQMGDOCHH36gTp06LFlS8AEoZs+ezbvvvktkZCQtWrRg4cKFADzxxBO0atWKVq1a0a1bN1q2bMnNN9/Mpk2baNu2rccvcEuujtNeLSoqynhq8qMDKz/jimWPcZCaJPT9nPatW3nkc5RSRbNjxw6aNWtWqG3K4mkoT8vvz0lE1htjotxtq9csCqDedYOJrxTOFV/fS/BX0fyUOJ2brr/R6lhKqWKoSEWiJOhpqAKq0bobjuHf4WeHdssGM3/+F5SXozKllHJHi0UhhDZoQ9CjP5HqV50eGx7hs4//R7ZDC4ZSqvzTYlFIAWENqDF6OSdCmnLX3ueY9c5LpGZku99QKaW8mBaLIrCFhFF79FIOh3fh3hOTmT/5MU6e0Rm6lFLllxaLovILpu4j84ir35dBKbNY9da97It3P8SAUqqMmH6H86EKRItFcdh9qTP8Q460fpTorO/Z+04/Nu49YnUqpZQFSnuI8rlz5/L6668XO3dB6a2zxSVCRN9XOVE5ght/eYENM3rzY59PuLmtTqSkVEU1duxYUlJSeO+99/J9fcqUKQDs27ePXr165Tu8BzjHhfLxyf9rOiYmpmTCFpAeWZSQsFtGcbbX+0TKn9Se25evflpjdSSllEWKM0T5ddddx/PPP0/Xrl15++23mT9/Ph07dqRt27Z0796d+Ph4AKZNm8aYMWMAGDJkCKNHj6Zz5840atTo3HAhJUmPLEpQpaiBpFUKo95n9xCyfDBTT73N/TG3Y7PpMOdKlZrvnoGjW9y3O+oaiK8g1y2uaAW3TyherkJISkpixYoVAJw+fZro6GhEhHfffZc333yTiRMnXrRNfHw8q1atYsuWLQwcOLDEjzz0yKKEBTS9Gd8HFlPJFwZsfoC3PppJepbeWquUKrhBgwade37gwAG6d+9Oq1atmDRpEtu2bct3mz59+iAitG7dmkOHDpV4Jj2y8ACf2pGEPPoDiVOjeWT/E/znneM89ODjVA70tTqaUuVfQY8Aco4oRizyXJYiCg4OPvd85MiRPPfcc/Ts2ZNly5YxYUL+++fv73/uuSdGl9AjCw+Rag2p8thPpFS9iidPjuOD/7zEoYRUq2MppbxMYmIitWvXxhjDjBkzLMuhxcKTgsOo9sgSkmpdx5Opb/PNf59g26EEq1MppTysOEOU5/XSSy8RExPDDTfcQM2aNUswZeHoEOWlITuTxM8fpvIfX/GZ6U6du//L9VddYXUqpcqNogxRXpZPQ3lKcYYo1yOL0mD3pfKgaZxt/yiD5XuSP72Xr9f+aXUqpSq2EYsqVKEoLi0WpcVmI+TOV0m7eTw9bGuptfAe3l2yQYc5V0p5BS0WpSyg6yiy+kwlyrabG1YN5V+f/0RmtsPqWEopdVlaLCzg02Yg9iFf0NjnJMN2PMgLH3xNcnqW1bGUUuqStFhYRK68Gb8HvqW6v4Oxh0bzwpTpxOsw50qpMkqLhZVqtSXwbz8QEFqNVxKf4/X//oc98WetTqVUhTBi8QhGLB5hdQyv4dFiISI9RGSXiOwRkWcu0WagiGwXkW0iMivX+mwR2eh6LPBkTktVa0TQ337AhDXl1YxX+fid/2PdvlNWp1JKFVLOEOUbN27k2muvpUWLFrRu3ZrPP//8orYlMUQ5wIYNG1i8eHGJ5HfHY8N9iIgdmALcCsQB60RkgTFme642TYBngS7GmNMiUiPXW6QaY9p4Kl+ZElKDoIcWkzrzbsYdfIc3PzhFfP8XuSOyltXJlFKFFBQUxMcff0yTJk04fPgw7du357bbbqNKlSrn2hR0iHJ3NmzYwNatW+nRo0eJZL8cTx5ZdAD2GGP2GmMygNlA7zxtHgSmGGNOAxhj4j2Yp2zzDyVw2FdkNOvL3+2zOf7FGKat2GN1KqVUITVt2pQmTZoAUKtWLWrUqMHx48cLvP3u3bu57bbbaN++PV27duWPP/4AYPbs2bRs2ZLIyEhuuukmUlNTGTduHJ9++mmRjkoKy5MDCdYGDuZajgM65mnTFEBEVgF24CVjTM4xVYCIxAJZwARjzDwPZi0bfPzwG/ABWYtrMnzt/1i4NJFXTk/g2TvbYNdhzpUqkIlrJ7Lz1E637XLaFOS6xdXVrubpDk8XOsvatWvJyMigcePGBd7moYceYtq0aTRu3JhVq1bx2GOP8f333/Pyyy+zfPlyatasSUJCAoGBgbzwwgts3bqVyZMnFzpbYXmyWOT37Za3B5oP0AS4EagD/CIiLY0xCUA9Y8xhEWkE/CgiW4wxF3R7FpGHgIcA6tWrV9L5rWGz4dNzAo5Ktei17P9RLfZRnkqYwKt3X0eAr93qdEqpAjpy5Aj33nsvM2bMwGYr2EmchIQEVq9eTb9+/c6ty8py3lbfpUsXhg4dyoABA+jbt69HMl+OJ4tFHFA313Id4HA+bVYbYzKBv0RkF87isc4YcxjAGLNXRJYDbYELioUx5n3gfXCODeWJnbCK7bpREFqDTvNGUuXPxxn53r94Y0R3qgb7WR1NqTKtoEcAOUcU03tML/EMSUlJ3HHHHbzyyit06tSpwNsZYwgLC8v3GsbUqVNZs2YNCxcuJDIyks2bN5dkZLc8ec1iHdBERBqKiB8wCMh7V9M84CYAEQnDeVpqr4hUFRH/XOu7ANupaCIHYbv7c5r6Huel42MYPeULDp5KsTqVUuoyMjIyiImJOXcUUBhVq1YlIiLi3LSoDoeDTZs2AbB37146derE+PHjqVq1KocOHSI0NJQzZ86U+D7kx2PFwhiTBTwGLAF2AHOMMdtEZJyIRLuaLQFOish24CdgrDHmJNAMiBWRTa71E3LfRVWhNOmGz30LuSIgm8kpT/P8lBlsjtNhzpUqq+bMmcOKFSv46KOPzt0SW5i7nWbPns27775LZGQkLVq0YOHChQA88cQTtGrVilatWtGtWzdatmzJzTffzKZNm2jbtq3HL3DrEOXe4uSfZM7oQ1bSMUZlP8nge0Zw89XWjW2vVFlSlCHKPXkaqqzSIcorguqN8X1wGT7hTfif/TUWfvJvZq05YHUqpbzW9B7TK1ShKC4tFt4ktCa+93+H1O/MJN//sW/Bv3hj8c7zw5xPv+P8hC5KKVWCtFh4m4BK2O/9CkfzGJ7z/YwqK1/iqc9/JyPLwbYjiWw7kmh1QqVUOaTFwhv5+GPr/yGmw8M84PMdXbc9zwPTV5Fs/K1OppQqp7RYeCubDbl9ItzyIr3tv/LQwWd4Mbk/JxyhVidTSpVDWiy8mQhc/yT0focu9h284fM//pXcm4SUDKuTKVXm7b93KPvvHWp1DK+hxaI8aHsPMng2V9nieM93EvNWux8XRylVskp7iPK5c+fy+uuvl1h+dzw53IcqTU27E+dTjwZZf3F69Sc4bnwNmw4+qFSpK8khyrOysvDxyf9rOiYmpuTDX4YeWZQjyRLMCapwa9r3rNxzwuo4SlVIxR2i/LrrruP555+na9euvP3228yfP5+OHTvStm1bunfvTny8cyaHadOmMWbMGACGDBnC6NGj6dy5M40aNTo3XEhJ0iMLylFPThHSbMG0ZB+v/LyUrk3vtjqRUqXu6L/+RfoO96di03Y62xTkuoV/s6u54rnnCp2lKEOUg3MgwhUrVgBw+vRpoqOjERHeffdd3nzzTSZOnHjRNvHx8axatYotW7YwcODAEj/y0GJRjrSIqAyOYDIPnaDRgS85nBBDrSqBVsdSqkIqyhDlOQYNGnTu+YEDBxg4cCBHjx4lPT2dpk2b5rtNnz59EBFat27NoUOHipU9PxW+WDiMgybzN3HVMTt4fmZCz7P5kNE0mjt3LGT6bzsZdXtbqxMpVaoKegSQc0RR/5OPSzxDUYcozxEcHHzu+ciRI3nuuefo2bMny5YtY8KECflu4+9/vp+VJ8b8q/DXLA4c30OqI51fG5af202Dr72fUEklIXYOGVkOq+MoVaEUZ4jy/CQmJlK7dm2MMcyYMaMEEhZNhS8WtbNCGPiLg6b7Mtl1apfVcYpnxCLno14nkis1plfm93y//ajVqZSqUIo7RHleL730EjExMdxwww3UrGndSNM6RDnwS5fWhJ3M5JOnWvN/989GxPtvOXWs+i+2pf/kqfB3eWPkYKvjKOVRRRmi3JOnocoqHaK8mGaOjeRssI12czbz4/4frI5TImxt7iZbfGhxZC67j5XOTFpKeZP6n3xcoQpFcWmxAN7r8wkNn3yWFgfgu5njSc9OtzpS8QVXJ6vpHcTYVzL71z+sTqOU8nJaLFyqD7yL7LpX0OPbeGZu/sjqOCXCv8MIqkgyZzfOIzk9y+o4SnlUeTml7inF/fPRYuEivr7Uf/YFap+CPR+/y/GUgve4LLMa3kB6SF36OJYxf+Nhq9Mo5TEBAQGcPHlSC8YlGGM4efIkAQEBRX6PCt/PIreQm27E1r41fX7ezDsr3+DF7hf3kvQqNht+HYZz7Y/j+WDVrwzuULdcXLxXKq86deoQFxdXqGE1KpqAgADq1KlT5O21WOQiItR79gUc/fvj+9k3bG13Ly3DWlodq1ik7RAcP/2LqFML2XDgNtrXr2p1JKVKnK+vLw0bNrQ6Rrmmp6HyCGzZgqBePem1zvC/JeO8/7A29AocV3ZngH0Fs3/bY3UapZSX0mKRj1pP/h27zYfIr7fy3V/fWR2n2HyuGUF1SSR16yJOJZefnupKqdKjxSIfvrVqETZsONdvM3yxYAIpmSlWRyqeK7uRGRzBAPmBObEHrU6jlPJCWiwuIezhhzFVKnHHt8f5aKuXD11us+MbNZTr7VtY9ts6HA4vP7WmlCp1WiwuwR4SQsToMbQ4AL/PncqRs0esjlQ8bYcgwPVnl/Dzbr1jRClVOFosLqNK//7YGtRj8LIMJq990+o4xVOlHqbRzdzl8zOzfttrdRqllJfRYnEZ4utLrX88Q8QpQ/a879hwbIPVkYrFFjWMKzhJ9u5lHDzl5ddhlFKlSouFGyE33Yh/hygGroTJK/6Fw3jx/BBNbyc7KIxBth/5bO0Bq9MopbyIFgs3RISIp58hNMVBs0Xbmb9nvtWRis7HD3vbe7jF/js/rNtMela21YmUUl7Co8VCRHqIyC4R2SMiz1yizUAR2S4i20RkVq71w0Rkt+sxzJM53Qls0YJK0dH0ioVPfpzE2YyzVsYpnnbDsOPglrSlLN6qEyMppQrGY8VCROzAFOB2oDkwWESa52nTBHgW6GKMaQGMca2vBrwIdAQ6AC+KiKXjVNR4Ygx2mw89vj/J1C1TrYxSPNUbY+pfxxC/n5n12z6LwyilvIUnjyw6AHuMMXuNMRnAbKB3njYPAlOMMacBjDHxrvW3AUuNMadcry0Fengwq1u+ERGEjbiP67cZVi6bwcEk7+3cJu2HU8scw35wJTuPJlkdRynlBTxZLGoDub9R41zrcmsKNBWRVSKyWkR6FGLbUlf9wQeQqlUY8kMWb6x73eo4RdfsThwBVbjH5yc+Xa0XupVS7nmyWOQ3FnbersM+QBPgRmAwME1EqhRwW0TkIRGJFZHY0hia2B4SQs1Ro7j6QDZJP/7A6iOrPf6ZHuEbgC1yELfZY/lxw3bO6sRISik3PFks4oC6uZbrAHln4IkD5htjMo0xfwG7cBaPgmyLMeZ9Y0yUMSYqPDy8RMNfSpUBA/Bt1JDhP9t4/bcJZDm89Iu23TB8TCY9spcz9/dDVqdRSpVxniwW64AmItJQRPyAQcCCPG3mATcBiEgYztNSe4ElQHcRqeq6sN3dtc5y4uNDzbFjqXEii4bLd/PlH19aHaloajbH1LmG4f4/8+lv+7x/KHallEd5rFgYY7KAx3B+ye8A5hhjtonIOBGJdjVbApwUke3AT8BYY8xJY8wpYDzOgrMOGOdaVyaE3HgjQR07MniVjQ9X/5fE9ESrIxWJtBtGXUccIfGxxO4/bXUcpVQZJuXlN8qoqCgTGxtbap+Xum0b+/oPYH4ngUfu5ZkO+XYjKdsykjFvNGVBRjt+vHocbw1qa3UipVQpE5H1xpgod+20B3cRBbZoQeXoaHqtg2VrPuPPhD+tjlR4fsFIqwH0lDX8smUPJ86mW51IKVVGabEohvAxo7HbfbjnZ3h93eveed6//TB8TTo9WcXn67y374hSyrO0WBSDb0QE1YePoNPWDI7GruSXQ79YHanwarWFK1rzQOAKZq3eT7ZOjKSUyocWi2Kq/uAD2KtX48EVvry2diKZ2ZlWRyq8dkNpkPUn1ZK2s3xXvPv2SqkKR4tFMdlDQgh//HEa/ZVG+Pp9zNo5y/1GZU3rgRifQEYE/szM1futTqOUKoO0WJSAKv3749eoEQ/+4s+0De9yMvWk1ZEKJ6Ay0iKGXqxi7R8HdWIkpdRFCl0sRMQmIpU8EcZbiY8PNcY+RdX4VDqvO8vbG9+2OlLhtR+GnyOFO+2r+XSNjhellLpQgYqFiMwSkUoiEgxsB3aJyFjPRvMuITfeSFCnTgz+1c63m79k56mdVkcqnLodIewqHgr+hTmxB0nL1ImRlFLnFfTIorkxJgnoA3wL1APu9VgqLyQi1PzHWPyTMxi01peJayd61620ItB+GI3SdxCesofvth6xOpFSqgwpaLHwFRFfnMVivjEmk3xGga3oApo3p3J0NN3XZrJ/1zqWHVhmdaTCaT0IY/fjoZCVzNShy5VSuRS0WLwH7AOCgRUiUh/QWXPyET5mNHabnQd/C+bN2DdJz/aiXtHB1ZFmd3KHWcHW/cfYflj/ipVSTgUqFsaY/xhjahtjehqn/bhGi1UX8o2IoNqI4bTZmETAH3F8vO1jqyMVTrthBGQlcadvLDPX6G20Simngl7gHu26wC0i8oGIbABu9nA2r1X9gQexV6/OqFWVmLr5feJTvKijW4ProWoDHgldybzfD5GU5oWdDJVSJa6gp6Huc13g7g6EAyOACR5L5eXsIcGEP/4YEXtOE7kzg7c2vGV1pIKz2aDdUBqnbKRmZhxzN+jESEqpgheLnGlOewLTjTGbyH/qU+VSpX9//Bo35uGVgSz6Yz6bj2+2OlLBtbkHxM7jVX5l5ur93nVXl1LKIwpaLNaLyPc4i8USEQkFHJ6L5f1yOuoFH00kZmsQE9dOxGG85I8s9Aq46nZ6Zv/EvvgE1vxVZuadUkpZpKDF4n7gGeAaY0wK4IfzVJS6jJAbbiCoUyf6rsxmT9wmFu1dZHWkgms3jICMU0QHbNTxopRSBb4bygHUAf4pIm8AnY0xXnRexRo5HfV8zqTy4O/VmLx+MimZXjLu0pW3QKU6PFJpFYu3HiX+TJrViZRSFiro3VATgNE4h/rYDowSkVc9Gay8CGjenMq9e9P51wTMkWN8sPUDqyMVjM0ObYfQOGktV5h45ujESEpVaAU9DdUTuNUY86Ex5kOgB3CH52KVL+FjRmMTG39ffwUzts3g8NnDVkcqmLZDEODvYWuZteYAWdlecs1FKVXiCjPqbJVczyuXdJDyzPeKK6g2YjiN1h6i8SEHk9ZPsjpSwVSpC1d24/asZRxLTObHnV7UX0QpVaIKWixeBX4XkY9EZAawHviX52KVPzkd9cb8VpUlfy0m9mis1ZEKpt1QAlKP0SdkBzN16HKlKqyCXuD+DOgEfO16XGuMme3JYOWNs6Pe41TZeZjuB6rw2rrXyHZ4wTDgV90OwTV4tNJKVvxxnH0nkq1OpJSywGWLhYi0y3kAEUAccBCo5VqnCqFK/374NW7MsOXCH8e3M2/PPKsjuWf3hTZ30+j0KiJsp5m1Vo8ulKqI3B1ZvHmZxxuejVb+5HTU8z18ghG7a/Of3//DmYwzVsdyr91QxGTzbMR6nRhJqQrqssXCGHPTZR46kGARhNxwA0HXduLWH0+TnnCK9ze/b3Uk96o3hgbX0z19KYkp6SzcrBMjKVXRFLSfRd98HreISA1PByxvnB31/oEkneUfOxozc8dM9id5QQ/p9sMJOHuQ/lX/1B7dSlVAhRnuYxpwj+sxFXgSWCUiOr1qIQU0a0bl3r1p9sNeap3x5Y11XnBG7+peEFiVRyutYuPBBLYeSrQ6kVKqFBW0WDiAZsaYfsaYfkBzIB3oCDztqXDlWfiY0YjdzjMb6rA8bjm/HvrV6kiX5xsAkYNpcPwnavkm69GFUhVMQYtFA2PMsVzL8UBTY8wpQGfHKYKcjnphK3fQOaEGr617jSxHltWxLq/dUMSRyfO1NzJv4yESU/WvXqmKoqDF4hcRWSgiw0RkGLAA51zcwUDCpTYSkR4isktE9ojIM/m8PlxEjovIRtfjgVyvZedav6CwO+YNqt//APbq1Xn0l0D+TNjDnF1zrI50eTWaQZ0OdEtdTFpmNl9viLM6kVKqlBS0WIwEpgNtgLbADGCkMSbZGJPvXNwiYgemALfjPG01WESa59P0c2NMG9djWq71qbnWRxd0h7xJTkc9v61/cs/xJkzZOIWEtEvW3rKh/TD8E//k7pqH+EQnRlKqwihoD24DrAR+BJYBK4z7b4kOwB5jzF5jTAYwG+hdnLDlUZX+/fC7sjG9lySSmnaGdza9Y3Wky2sRA/6VeDh0JXuPJ/PbnyetTqSUKgUFvXV2ILAW6A8MBNaISH83m9XG2ds7R5xrXV79RGSziHwpInVzrQ8QkVgRWS0ifQqS0xuJjw81x46FuCOMjWvNnF1z2HN6j9WxLs0vGFr1p97R76kbmM7MNXqhW6mKoKCnoZ7HOUveMGPMUJxHDf/PzTb5zdGd92jkG5wXz1vjPGKZkeu1esaYKOBuYLKINL7oA0QechWU2OPHjxdwV8qe4K5dCbq2E20X7iYsO5DX1r1Wtk/vtBuGZKXxz3rbWLLtGMeSdGIkpcq7ghYLmzEm9/jUJwuwbRyQ+0ihDnDBRA7GmJPGmHTX4lSgfa7XDrt+7gWW47xWQp7t3zfGRBljosLDwwu4K2VPTkc9k3SGf+5qxm9HfuPnuJ+tjnVptdpARCQ3JX9LtsPB7LU6MZJS5V1Bi8ViEVniuntpOLAI+NbNNuuAJiLSUET8gEE476I6R0Qici1GAztc66uKiL/reRjQBecMfeVWQLNmVO7Th5qLYmmfXZfX171ORnaG1bEurd0w/E5sZ1j908xau59MnRhJqXKtoBe4xwLvA62BSOB9Y8xlO+MZY7KAx4AlOIvAHGPMNhEZJyI5dzeNEpFtIrIJGAUMd61vBsS61v8ETDDGlOtiARA+ehRitzNmXTgHzhycU2gdAAAcgklEQVRg1o5ZVke6tFYDwDeIB4NXcCwpnR92HHO/jVLKa0mZPjdeCFFRUSY21ksmFLqM+Lfe4uT/3uXLp6JYFLSbhTELCQsMszpW/uY9itk+n268R0SNcGY+0NHqREqpQhKR9a7rw5flbj6LMyKSlM/jjIgklVxclaP6/Q9gDwtj4NJU0rPSePv3t62OdGnthiEZZ3m+/g5W7jnB3uNnrU6klPIQd0OUhxpjKuXzCDXGVCqtkBVJTkc9x6Zt/P3sdXy9+2t2nNxhdaz81e0A4Vdz/ZlF+NiET3XaVaXKrYJe4FalqEq/vvhd2ZiO8/4gzKcKE9ZOKJu30opAu2H4Hv2d+5ok80XsQVIzdGIkpcojLRZlUE5HvawDB/nn0WvYEL+BJfuXWB0rf5GDwO7HiMBfSErL4pvNh91vo5TyOlosyqjgrl0J7nwtteesIjLgSibFTiItqwx2fguqBs2iuWLfPFqE++rQ5UqVU1osyigRocbYsTiSkhi7vSFHko/w0baPrI6Vv/bDkLREnmnwB5vjEtl0sIwPhqiUKjQtFmVYTkc9/6+X0Te4Cx9u/ZCjyUetjnWxBtdDtUZcm7CQID+7Hl0oVQ5psSjjwseMBrudIStsZDuymbxhstWRLiYC7Ybic/A3HmiWzYJNh0lIKcO9z5VShabFoozzrVmT6veNIOv7n3jcvweL9i5iY/zGfNuOWDyCEYtHlHJCl8i7webDUP+fSc9y8OV6nRhJqfJEi4UXqHbf/djDwug690/CA8KYuHYiDlPGxmIKrQlNexC25ys61gvh0zUHcDjK4O2+Sqki0WLhBXI66qX/vol/pt/K1pNbWbh3odWxLtZ+OKSc4O/19vDXiWRW/XnC6kRKqRKixcJLVOnXF/8mV1Jv5s9EVmnB5PWTSclMsTrWhRrfDJXrEnXqG6oF++mFbqXKES0WXkJ8fKgxdiyZBw7w9KFIjqceZ9qWae43LE02O7Qdgm3vch5sZWfp9mMcSUy1OpVSqgRosfAiwddfT3Dna/GfMZ++NbszY9sM4s6UsQvJbYeACHf7/owBPtOJkZQqF7RYeBERocY//kF2UhLD14dit9mZtH6S1bEuVLkOXNmNyjs/5+Ym1Zi99oBOjKRUOaDFwssEXH01lWNiSJ/9NSNr9Gfp/qWsO7rO6lgXajcMzhxhVL19xJ9JZ+l2nRhJKW+nxcILhY8eBXY7Ny06RK3gWkxcO5FsRxka7bXpbRBcg9bx86hdJZBPftML3Up5Oy0WXiino17y4u95Jrgvu07v4us9X1sd6zy7L7S9B9n9PQ+1DeC3vSfZE3/G6lRKqWLQYuGlcjrqNfxkBe1rtOO/G/5LliPL6ljntRsKxkF/28/42oWZq3ViJKW8mRYLL2UPCSZ81OOkbtjA08k3kJCewJHkI1bHOq9aI2jYleBts7ijZU2+Wh9HSkYZKmZKqULRYuHFqvR1dtTzff9z+jfsTXxKfNma86LdMEg4wCP14ziTnsWCjToxklLeSouFF8vdUW/E7lrYxMbBMwfLzhSsze6EwGo0jfuaq68I5ePf9pedbEqpQtFi4eWcHfU6k/r+DPqshcSMRIYvHs6uU7usjgY+/hA5GNm5iPvbhbL9SBK/68RISnklLRZeztlRbyzZSUn0WOegf6wvfyX+xcCFA3ll9SskpidaG7D9MHBkEs3PBOvESEp5LS0W5UBOR71KiZl02WXjm5hvGHTVIL7840vumHsHc3bNsa4fRvhVULcT/ptm0rdtbRZuPsLpZJ0YyVt1nN6PjtP7WR2j2MrLfkDp7YsWi3IifPQojECVU+lU9q/Msx2fZc6dc2hatSnjV49n0KJBbDi2wZpw7YbCyd08WP8oGVkOvlhfscaLKk9fTKri0mJRTvjWrMmZKn4En83iyEsv4UhOpmnVpnzQ/QPeuOENEtITGLZ4GM/88gzHkkt5+I0WfcC/EvX2fUGHBtWYuVonRlLK22ixKEcSq/qRVNmXhM/nsLdPDCnr1iEi3NbgNub3ns/DrR9m6b6l3DnvTqZtmUZGdimdDvILhlYDYPt8hrerzIFTKazYffyym9z13m/c9d5vpZNPKeWWFovyRISEsADqf/IxAPuHDuPYhIk40tII8g3isbaPMa/PPK6NuJa3NrxFzPwYVsStKJ1s7YdBVhq3Zq8gLMTPbY/ufX5vsM/vjdLJppRyS4tFORQUFUWjeXOpMuguTn30EX/17Ufq5s0A1A2ty1s3v8V73d7DJjZG/jCSkT+MZH+Sh+9SioiEiDb4bvyEu6Lq8OPOYxxK0ImRlPIWWizKKVtwMBEvvkjdD6bhSElh3+C7iZ88GZPhPPXUuXZnvo7+mqeinmL9sfX0md+Hf6//t2enam0/DI5tZWj9U86JkdboeFFKeQuPFgsR6SEiu0Rkj4g8k8/rw0XkuIhsdD0eyPXaMBHZ7XoM82TO8mL24y2Y/XiLC9aFdOlCowXzqRwdzcl33+OvgXeRtnMnAL52X4a1GMbCmIXc0fAOPtz6IXfOvZNFexd5pqd1y/7gG0TN3bO55eoazF53gIwsnRhJKW/gsWIhInZgCnA70BwYLCLN82n6uTGmjesxzbVtNeBFoCPQAXhRRKp6Kmt5Z69UiVqv/os677xD1okT/DVgICfefReT5RzYLywwjFeue4WZPWcSHhTOM788w7DFw9hxckfJBgmoBC37wpavGNq+OifOZrBk29GS/QyllEd48siiA7DHGLPXGJMBzAZ6F3Db24ClxphTxpjTwFKgh4dyVhihN99Eo28WUOnWbhyf/Bb7Bt9N+t69516PDI9k1h2zeLnzy+xP2s+gRYMY/9t4EtJKcIiOdsMhM5nr0lZQt1ogn2iPbqW8gieLRW0gd++rONe6vPqJyGYR+VJE6hZmWxF5SERiRST2+PHL34qpnHyqVqX2pEnUnvQmmQcO8FdMX05+9BHG4TwdZBMbfZv05ZuYb7j76rv5avdX3DH3DmbvnF0y82XUiYLwZtg2zGBIx/qs/esUu47qxEhKlXWeLBaSz7q8J8K/ARoYY1oDy4AZhdgWY8z7xpgoY0xUeHh4scJWNJV69qTRwm8I7tKF+AkTOTB0GBkHz9fnSn6VeLrD03x555c0q9aM/1vzfwxaOIjYo7HF+2AR54XuwxsYVC8RPx8bn67RowulyjpPFos4oG6u5TrABRMaGGNOGmPSXYtTgfYF3VYVn094OHWmvE3Eq6+StnMne3v34fTs2Rdc3L6y6pVM7T6VSTdOIikjiRFLRvCPn//B0eRiXGtofRfY/am84zN6tYrg6w2HSE7XiZGUKss8WSzWAU1EpKGI+AGDgAW5G4hIRK7FaCDniuoSoLuIVHVd2O7uWqcuY3qP6UzvMb1Q24gIVWL60OibBQS1acPRl17m4P0PkHnkyAVtbq1/K/P7zOeRyEf48eCPRM+LZurmqaRnp1/m3S8hqBo0j4bNnzMkqiZn07OYt/FQ4d9HKVVqPFYsjDFZwGM4v+R3AHOMMdtEZJyIRLuajRKRbSKyCRgFDHdtewoYj7PgrAPGudYpD/GNiKDuB9O44qUXSdm4kb13RpMwd94FRxmBPoE82uZR5veZT5daXfjP7/8hZn4Myw8uL/yttu2GQVoibc8sp3lEJT7RiZGUKtM82s/CGPOtMaapMaaxMeb/XOteMMYscD1/1hjTwhgTaYy5yRizM9e2HxpjrnQ9CvfrsioSEaHqoEE0mjcX/6uv4sizzxI38jGy8tw8UDukNv++6d+8d+t7+Np8efzHx3nkh0fYl7iv4B/W4Dqo1hjZ8DFDOtVn59EzrN9/umR3SClVYrQHt7qIX7161J8xgxpPP03yypXsvTOapMWLL2rXuVZnvoz+krFRY9kUv4mYBTFMWj+J5Mxk9x8i4hy6/MCv9Kl7llB/H50YSakyTIuFypfY7VQfMZyGc7/Gt25dDo15gkNPPknW6Qt/+/e1+TK0xVC+ifmGXo16MX3rdHrN7cU3f37j/rRSm7vB5kPQlk/p174O3245ysmzRbgGopTyOC0W6rL8GzemwWezCB8zhqSly9h7ZzRnfvzponZhgWGM7zKeWT1nEREcwXMrn2Pod0PZfnL7pd88pAZcdTts+owhUTXJyHYwJzbOg3ujlCoqLRbKLfHxIexvD9Pwizn4VK9O3KOPcvjZ58g+c3FnulbhrZjZcybjOo/jwJkDDFo4iJd/e5nTaZe4HtFuOKSc5MrTv9CpUTU+XbOfbJ0YSakyR4uFKrCAq6+m4RdzqP63h0lcsIC90b1J/vXXi9rZxEZMkxgWxixkSPMhzNs9jzvm3sGsHbMu7gXe+CaoXBfWz2BIp/rEnU5lxR/aG1+pskaLhSoU8fOjxpgxNPhsFrbAQA7cdz9Hx43DkXzxRe1Qv1D+cc0/+DL6S1pUb8Gra19l4MKBrDu67nwjmx3a3gt7f6J7RDrhof46XpRSZZAWC1Ukga1b0/Drr6g2fDinP5vtnMY1Nv+hQBpXacz7t77P5Bsnk5KZwn1L7uOpn5863wu87RAQG36bZzL4mrr8tCue7IyQUtwbpZQ7WixUkdkCAqj5zNPnp3G9dyjHJr6GIy3torYiwi31b2Fe73k82uZRlh9czp1z7+S9Te+RHhIGV94KGz9lUFQtBEhJvKqU90YpdTlaLFSxXTCN6/Tpzmlct2zJt22ATwCPRD7Cgj4LuL7O9by98W16z+vNjw2jMGeOUCv+F7o1q0lqYlOMQ/95KlVW6P+NqkRcNI3roMHEv/XWuWlc86oVUotJN05iavepBNgDGL37Ex6pXYe9699nSKf6mOxA0s42KN2dUEpdko/VAVT5kjON67FXJ3Dyf+9y9qfl1Jo4gYCr8j+t1CmiE19Ef8HnOz/nndhJ9Mvcw93xb2P3a8CZY9cS/fZKAnztBOY8/Oznl/1sBPq6lv3Otwnwu7B93jZ2W34j4CulLkeLhSpxOdO4ht7ajSMvvMhf/QcQPnIk1R+4H/G5+J+cr82XIc2HcHvV5vznq3588ufXhDS0Y840JyP4KpKzISsTstIMmdk4H1mGjCyDMTYMAggYm/MnAkbOPTc5640ANnxsNvzsdgJ8fPD39cHPbsffx4cAX+dPfx8fAlzLAT6+zud+PgT6+BDo60OArw+Bvr4E+foQ6OdDoN/558F+vgT6+hLo64PdZseGDeOaiiXTkQkGTM5/5vxPAIdxXPAacL5NnvYO48BhDA6HIdsYDM7lbIdx/jTZOFzPHefag8Ph/Ixs4yDbGOeyydnG+fOC9zeGbIfj3Pr0dD8Avtiywpkv199jzvNL9dzP+XO4+OULV+R93Vw8lc25Nuff81Kfmf/7pqUHAIYP13+f73beJC3dHymF33+kvIz0GRUVZWIvcTeOsk7W6dMcGz+epG+/I6B1a2pNeBX/Ro0uvcGMaLYm/sV9oZCqJ0mVKhBH2hVse3hpkbYVkfXGmCh37fTIQnlUzjSuod26cfTlcfwV05fwJ8ZQbehQxJZPNWg/jJZf3kc0NdkYEMgn963G5PymbBznnhtyfvvN54EDh8P581x712/LOdtmO86/R+7XL3gP1/Nsh4P0rCzSs7JJz8oiLSuLjKxs0rOzSM/MIj07m8xs52sZ2dlkZGeTmev5tuN7AKFptQaI2FwXCgWbCJLzwLVebM71OO8gsyGI2BDBtW3Oaznt5PxrrmWb5Ly3czvne7iWcXaatF/wug1b7veQXO+BLdf7CdN+/xyAh9sPPvdXlvuXWslZusRvujmvSz6/Cl/wPhe9LOffO297V+NL/XJ94Xs5Fyav+RCAJzvdf4mtvMek1R/gUwpHFlosVKmo1LMnQddcw5EXXiR+wkTOLvuBiFf/hV/duhc2vLoXBFbjlpQzbA4IItAn0JrAJajj9H4AzLvnJWuDlICZO52zBTzasafFSYrnf5veAWBo25stTlJ8UzZOKZXP0QN9VWp8wsOp886UPNO4fn7h+WYff2hzNx1TUnhobqp1YZVSF9BioUrVxdO4vsTBBx68YBpX2g3FZoPaNVKsC6qUuoBe4FaWMcaQ8PnnHHvtdcRup+bzz1G5d29EhFNPhFOtcgYEhUGlWhc+QnOe14ZKEeAfavWuXNa33ZoD0HPZZYZr9xLlZV/Ky35A8fdFL3CrMi9nGtfgzp05/NxzHHnmWc4sXUbEyy+xZU8VIsJSadq3FyQdhsRDcHAtpOYzFbt/pXwKSa5iUqk2BFbN76qpUqqAtFgoy+VM43rq4084/u9/s7fXnRibD3tSK9H0zrcubJyZCmeOOAvIBY9Dzp/xO+DMUfLeu49PAIS6CkfeI5WcIhNSwzkKrlLqIlosVJmQM41rSNfrOfzMs9TckkDaWcPBRx7FFhiILTgICQzEFhiELSjIuS4oEFtQUyQwElv1YOdyYCA2fz/EnMWWeRpJjUfOHIEzuQrLwTXOgpOdkTeEq6BE5DoyqXVhkQmNAB8/a/6QlLKQFgtVpuRM4/pr51YEpUHm0aOYlBQcKSk4UlNxpKZCdnbB39BudxaQwEBsQUFIUBC2wFbYgjpg87Vj8zGITzY2ycBGGraEFGyOM0j2ZmyZy53rfAw2H4errcFWqTq2ahFI5dr5n/IKjQB/HWJdlS9aLFSZIz4+JFYSEitB27lfX/CaMQaTkYEjJQWTmnq+iKSk4khJdq5LTcWRnFNcUs63TT5fcLKTzpCVmuLazrneXDS0erDrkZ8TiP0ENp+NiD3bVVByPfx8kKBAbMEhNLWl4gCOj+oNF3Vay3UdJW/nsbxt8l5zcbN8vuObXKKNm/W2i9s1kjQMcOK5oXizhuL8u/b2/QDnvmSVwnhnWiyUVxERxN8fm78/VK1aou9tsrNxpKZhUnMKkKuY5C4454pLrnVnz+JIOonjbBKO5DNkJp/FJKfhOJmBIyMBR4Y/GOHEgT9KNK81/AE4fnCdm3ZlXXnZDwB/AqtkevxTtFioMildSr/nttjt2EOCIeRSRxNFU9hbG/O9nf1St7gXZn3udedG4jP5rjc48mzj/Pl9dAcAus9bnf/neonv+3QCvH8/IGdffLnSw5+jxUKpMia/cZNK+7bfS32aw/WKLaRy6YXxgPKyH3B+XzxNe3ArpZRyS48sVJk04e4GAMRYG0Mp5aJHFkoppdzSYqGUUsotj56GEpEewFuAHZhmjJlwiXb9gS+Aa4wxsSLSANgB7HI1WW2M+ZsnsyrlKS/f0wwA754Bwqm87Et52Q8ovX3xWLEQETswBbgViAPWicgCY8z2PO1CgVHAmjxv8acxpo2n8imllCo4T56G6gDsMcbsNcZkALOB3vm0Gw+8BuTtPquUUqqM8GSxqA0czLUc51p3joi0BeoaYxbms31DEfldRH4Wkevz+wAReUhEYkUk9vjx4yUWXCml1IU8WSzy6ylyrquoiNiAfwN/z6fdEaCeMaYt8CQwS0QqXfRmxrxvjIkyxkSFh4eXUGyllFJ5efICdxxQN9dyHeBwruVQoCWw3NVj9QpggYhEG2NigXQAY8x6EfkTaAroVHgVRPOIi343UEpZyJNHFuuAJiLSUET8gEHAgpwXjTGJxpgwY0wDY0wDYDUQ7bobKtx1gRwRaQQ0AfZ6MKtSSqnL8NiRhTEmS0QeA5bgvHX2Q2PMNhEZB8QaYxZcZvOuwDgRyQKygb8ZY/KZT1OVV9N7TLc6glIqF4/2szDGfAt8m2fdC5doe2Ou518BX3kym1JKqYLTHtxKKaXc0mKhlFLKLcl3ohUvFBUVZWJj9WYppZQqDBFZb4yJctdOjyyUUkq5pcVCKaWUW1oslFJKuaXFQimllFtaLJRSSrmlxUIppZRbWiyUUkq5pcVCKaWUW1oslFJKuVVuenCLyHFgfzHeIgw4UUJxrFRe9gN0X8qq8rIv5WU/oHj7Ut8Y43b2uHJTLIpLRGIL0uW9rCsv+wG6L2VVedmX8rIfUDr7oqehlFJKuaXFQimllFtaLM573+oAJaS87AfovpRV5WVfyst+QCnsi16zUEop5ZYeWSillHJLi4WLiIwXkc0islFEvheRWlZnKioReV1Edrr2Z66IVLE6U1GJyAAR2SYiDhHxujtXRKSHiOwSkT0i8ozVeYpDRD4UkXgR2Wp1luIQkboi8pOI7HD92xptdaaiEpEAEVkrIptc+/Kyxz5LT0M5iUglY0yS6/kooLkx5m8WxyoSEekO/GiMyRKRiQDGmKctjlUkItIMcADvAU8ZY7xmOkQRsQN/ALcCccA6YLAxZrulwYpIRLoCZ4GPjTEtrc5TVCISAUQYYzaISCiwHujjjX8vIiJAsDHmrIj4AiuB0caY1SX9WXpk4ZJTKFyCAa+tosaY740xWa7F1UAdK/MUhzFmhzFml9U5iqgDsMcYs9cYkwHMBnpbnKnIjDErgFNW5yguY8wRY8wG1/MzwA6gtrWpisY4nXUt+roeHvnu0mKRi4j8n4gcBO4BXrA6Twm5D/jO6hAVVG3gYK7lOLz0S6m8EpEGQFtgjbVJik5E7CKyEYgHlhpjPLIvFapYiMgyEdmaz6M3gDHmeWNMXeBT4DFr016eu31xtXkeyMK5P2VWQfbFS0k+67z2iLW8EZEQ4CtgTJ4zC17FGJNtjGmD8wxCBxHxyClCH0+8aVlljOlWwKazgEXAix6MUyzu9kVEhgG9gFtMGb8wVYi/F28TB9TNtVwHOGxRFpWL6/z+V8Cnxpivrc5TEowxCSKyHOgBlPhNCBXqyOJyRKRJrsVoYKdVWYpLRHoATwPRxpgUq/NUYOuAJiLSUET8gEHAAoszVXiui8IfADuMMZOszlMcIhKec7ejiAQC3fDQd5feDeUiIl8BV+G882Y/8DdjzCFrUxWNiOwB/IGTrlWrvfjOrhjgv0A4kABsNMbcZm2qghORnsBkwA58aIz5P4sjFZmIfAbciHOE02PAi8aYDywNVQQich3wC7AF5//vAM8ZY761LlXRiEhrYAbOf182YI4xZpxHPkuLhVJKKXf0NJRSSim3tFgopZRyS4uFUkopt7RYKKWUckuLhVJKKbe0WChVCCJy1n2ry27/pYg0cj0PEZH3RORP14ihK0Sko4j4uZ5XqE6zqmzTYqFUKRGRFoDdGLPXtWoazoH5mhhjWgDDgTDXoIM/AHdZElSpfGixUKoIxOl11xhWW0TkLtd6m4i84zpSWCgi34pIf9dm9wDzXe0aAx2BfxpjHACu0WkXudrOc7VXqkzQw1yliqYv0AaIxNmjeZ2IrAC6AA2AVkANnMNff+japgvwmet5C5y90bMv8f5bgWs8klypItAjC6WK5jrgM9eIn8eAn3F+uV8HfGGMcRhjjgI/5domAjhekDd3FZEM1+Q8SllOi4VSRZPf8OOXWw+QCgS4nm8DIkXkcv8P+gNpRcimVInTYqFU0awA7nJNPBMOdAXW4pzWsp/r2kVNnAPv5dgBXAlgjPkTiAVedo2Ciog0yZnDQ0SqA8eNMZmltUNKXY4WC6WKZi6wGdgE/Aj8w3Xa6Suc81hsxTlv+Bog0bXNIi4sHg8AVwB7RGQLMJXz813cBHjdKKiq/NJRZ5UqYSISYow56zo6WAt0McYcdc038JNr+VIXtnPe42vgWS+ef1yVM3o3lFIlb6FrQho/YLzriANjTKqIvIhzHu4Dl9rYNVHSPC0UqizRIwullFJu6TULpZRSbmmxUEop5ZYWC6WUUm5psVBKKeWWFgullFJuabFQSinl1v8HxGBcpzu+v7AAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "#plot CV误差曲线\n",
    "\n",
    "#获取测试数据的均值和方差\n",
    "test_means=-grid.cv_results_['mean_test_score']\n",
    "test_stds=grid.cv_results_['std_test_score']\n",
    "#获取训练数据的方差和均值\n",
    "train_means=-grid.cv_results_['mean_train_score']\n",
    "train_stds=grid.cv_results_['std_train_score']\n",
    "\n",
    "\n",
    "#plot results\n",
    "n_Cs=len(Cs)\n",
    "nmber_penaltys=len(penaltys)\n",
    "#数据变形处理reshape\n",
    "test_scores=np.array(test_means).reshape(n_Cs, nmber_penaltys)\n",
    "train_scores=np.array(train_means).reshape(n_Cs, nmber_penaltys)\n",
    "test_stds=np.array(test_stds).reshape(n_Cs, nmber_penaltys)\n",
    "train_stds=np.array(train_stds).reshape(n_Cs, nmber_penaltys)\n",
    "\n",
    "\n",
    "x_axis = np.log10(Cs)\n",
    "for i, value in enumerate(penaltys):\n",
    "    #pyplot.plot(log(Cs), test_scores[i], label= 'penalty:'   + str(value))\n",
    "    plt.errorbar(x_axis, test_scores[:,i], yerr=test_stds[:,i] ,label = penaltys[i] +' Test')\n",
    "    plt.errorbar(x_axis, train_scores[:,i], yerr=train_stds[:,i] ,label = penaltys[i] +' Train')\n",
    "    \n",
    "plt.legend()\n",
    "plt.xlabel('log(C)')\n",
    "plt.ylabel('logloss')\n",
    "plt.savefig('./LogisticGridSearchCV_c.png')\n",
    "\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "上图给出了L1正则和L2正则下、不同正则参数C对应的模型在训练集上测试集上的正确率（score）。\n",
    "C很小的时候，正则集是大，此时模型比较简单，在训练集合测试集上的数据都比较差，此时模型属于欠拟合状态。\n",
    "随着C的增加，训练集上的误差权重越来越大，使得模型越来越复杂，模型的训练集和测试集会拟合的越来越好\n",
    "可以看出在训练集上C越大（正则越少）的模型性能越好；但在测试集上当C=1时性能最好（L1正则和L2正则均是）"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "#保存模型，用于后续测试使用\n",
    "pickle.dump(grid.best_estimator_, open(\"./L1_org_pk1\", \"wb\"))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "5折交叉验证、正确率(accuracy)对Logistic回归模型的正则超参数调优"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Fitting 5 folds for each of 14 candidates, totalling 70 fits\n",
      "[CV] C=0.001, penalty=l1 .............................................\n",
      "[CV] .... C=0.001, penalty=l1, score=0.6493506493506493, total=   0.0s\n",
      "[CV] C=0.001, penalty=l1 .............................................\n",
      "[CV] .... C=0.001, penalty=l1, score=0.6493506493506493, total=   0.0s\n",
      "[CV] C=0.001, penalty=l1 .............................................\n",
      "[CV] .... C=0.001, penalty=l1, score=0.6493506493506493, total=   0.0s\n",
      "[CV] C=0.001, penalty=l1 .............................................\n",
      "[CV] .... C=0.001, penalty=l1, score=0.6535947712418301, total=   0.0s\n",
      "[CV] C=0.001, penalty=l1 .............................................\n",
      "[CV] .... C=0.001, penalty=l1, score=0.6535947712418301, total=   0.0s\n",
      "[CV] C=0.001, penalty=l2 .............................................\n",
      "[CV] .... C=0.001, penalty=l2, score=0.7337662337662337, total=   0.0s\n",
      "[CV] C=0.001, penalty=l2 .............................................\n",
      "[CV] .... C=0.001, penalty=l2, score=0.7142857142857143, total=   0.0s\n",
      "[CV] C=0.001, penalty=l2 .............................................\n",
      "[CV] .... C=0.001, penalty=l2, score=0.7532467532467533, total=   0.0s\n",
      "[CV] C=0.001, penalty=l2 .............................................\n",
      "[CV] .... C=0.001, penalty=l2, score=0.7843137254901961, total=   0.0s\n",
      "[CV] C=0.001, penalty=l2 .............................................\n",
      "[CV] .... C=0.001, penalty=l2, score=0.7516339869281046, total=   0.0s\n",
      "[CV] C=0.01, penalty=l1 ..............................................\n",
      "[CV] ..... C=0.01, penalty=l1, score=0.6818181818181818, total=   0.0s\n",
      "[CV] C=0.01, penalty=l1 ..............................................\n",
      "[CV] ..... C=0.01, penalty=l1, score=0.6818181818181818, total=   0.0s\n",
      "[CV] C=0.01, penalty=l1 ..............................................\n",
      "[CV] ..... C=0.01, penalty=l1, score=0.7337662337662337, total=   0.0s\n",
      "[CV] C=0.01, penalty=l1 ..............................................\n",
      "[CV] ..... C=0.01, penalty=l1, score=0.7254901960784313, total=   0.0s\n",
      "[CV] C=0.01, penalty=l1 ..............................................\n",
      "[CV] ..... C=0.01, penalty=l1, score=0.7124183006535948, total=   0.0s\n",
      "[CV] C=0.01, penalty=l2 ..............................................\n",
      "[CV] ..... C=0.01, penalty=l2, score=0.7532467532467533, total=   0.0s\n",
      "[CV] C=0.01, penalty=l2 ..............................................\n",
      "[CV] ..... C=0.01, penalty=l2, score=0.7077922077922078, total=   0.0s\n",
      "[CV] C=0.01, penalty=l2 ..............................................\n",
      "[CV] ..... C=0.01, penalty=l2, score=0.7727272727272727, total=   0.0s\n",
      "[CV] C=0.01, penalty=l2 ..............................................\n",
      "[CV] ...... C=0.01, penalty=l2, score=0.803921568627451, total=   0.0s\n",
      "[CV] C=0.01, penalty=l2 ..............................................\n",
      "[CV] ..... C=0.01, penalty=l2, score=0.7712418300653595, total=   0.0s\n",
      "[CV] C=0.1, penalty=l1 ...............................................\n",
      "[CV] ...... C=0.1, penalty=l1, score=0.7597402597402597, total=   0.0s\n",
      "[CV] C=0.1, penalty=l1 ...............................................\n",
      "[CV] ...... C=0.1, penalty=l1, score=0.7402597402597403, total=   0.0s\n",
      "[CV] C=0.1, penalty=l1 ...............................................\n",
      "[CV] ...... C=0.1, penalty=l1, score=0.7727272727272727, total=   0.0s\n",
      "[CV] C=0.1, penalty=l1 ...............................................\n",
      "[CV] ...... C=0.1, penalty=l1, score=0.7843137254901961, total=   0.0s\n",
      "[CV] C=0.1, penalty=l1 ...............................................\n",
      "[CV] ...... C=0.1, penalty=l1, score=0.7581699346405228, total=   0.0s\n",
      "[CV] C=0.1, penalty=l2 ...............................................\n",
      "[CV] ...... C=0.1, penalty=l2, score=0.7662337662337663, total=   0.0s"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.\n",
      "[Parallel(n_jobs=1)]: Done   1 out of   1 | elapsed:    0.0s remaining:    0.0s\n",
      "[Parallel(n_jobs=1)]: Done   2 out of   2 | elapsed:    0.0s remaining:    0.0s\n",
      "[Parallel(n_jobs=1)]: Done   3 out of   3 | elapsed:    0.0s remaining:    0.0s\n",
      "[Parallel(n_jobs=1)]: Done   4 out of   4 | elapsed:    0.0s remaining:    0.0s\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "[CV] C=0.1, penalty=l2 ...............................................\n",
      "[CV] ...... C=0.1, penalty=l2, score=0.7467532467532467, total=   0.0s\n",
      "[CV] C=0.1, penalty=l2 ...............................................\n",
      "[CV] ...... C=0.1, penalty=l2, score=0.7922077922077922, total=   0.0s\n",
      "[CV] C=0.1, penalty=l2 ...............................................\n",
      "[CV] ...... C=0.1, penalty=l2, score=0.7973856209150327, total=   0.0s\n",
      "[CV] C=0.1, penalty=l2 ...............................................\n",
      "[CV] ...... C=0.1, penalty=l2, score=0.7712418300653595, total=   0.0s\n",
      "[CV] C=1, penalty=l1 .................................................\n",
      "[CV] ........ C=1, penalty=l1, score=0.7597402597402597, total=   0.0s\n",
      "[CV] C=1, penalty=l1 .................................................\n",
      "[CV] ........ C=1, penalty=l1, score=0.7467532467532467, total=   0.0s\n",
      "[CV] C=1, penalty=l1 .................................................\n",
      "[CV] ........ C=1, penalty=l1, score=0.7857142857142857, total=   0.0s\n",
      "[CV] C=1, penalty=l1 .................................................\n",
      "[CV] ........ C=1, penalty=l1, score=0.7973856209150327, total=   0.0s\n",
      "[CV] C=1, penalty=l1 .................................................\n",
      "[CV] ........ C=1, penalty=l1, score=0.7712418300653595, total=   0.0s\n",
      "[CV] C=1, penalty=l2 .................................................\n",
      "[CV] ........ C=1, penalty=l2, score=0.7597402597402597, total=   0.0s\n",
      "[CV] C=1, penalty=l2 .................................................\n",
      "[CV] ........ C=1, penalty=l2, score=0.7402597402597403, total=   0.0s\n",
      "[CV] C=1, penalty=l2 .................................................\n",
      "[CV] ........ C=1, penalty=l2, score=0.7857142857142857, total=   0.0s\n",
      "[CV] C=1, penalty=l2 .................................................\n",
      "[CV] ........ C=1, penalty=l2, score=0.7973856209150327, total=   0.0s\n",
      "[CV] C=1, penalty=l2 .................................................\n",
      "[CV] ........ C=1, penalty=l2, score=0.7712418300653595, total=   0.0s\n",
      "[CV] C=10, penalty=l1 ................................................\n",
      "[CV] ....... C=10, penalty=l1, score=0.7597402597402597, total=   0.0s\n",
      "[CV] C=10, penalty=l1 ................................................\n",
      "[CV] ....... C=10, penalty=l1, score=0.7402597402597403, total=   0.0s\n",
      "[CV] C=10, penalty=l1 ................................................\n",
      "[CV] ....... C=10, penalty=l1, score=0.7857142857142857, total=   0.0s\n",
      "[CV] C=10, penalty=l1 ................................................\n",
      "[CV] ....... C=10, penalty=l1, score=0.7973856209150327, total=   0.0s\n",
      "[CV] C=10, penalty=l1 ................................................\n",
      "[CV] ....... C=10, penalty=l1, score=0.7712418300653595, total=   0.0s\n",
      "[CV] C=10, penalty=l2 ................................................\n",
      "[CV] ....... C=10, penalty=l2, score=0.7597402597402597, total=   0.0s\n",
      "[CV] C=10, penalty=l2 ................................................\n",
      "[CV] ....... C=10, penalty=l2, score=0.7402597402597403, total=   0.0s\n",
      "[CV] C=10, penalty=l2 ................................................\n",
      "[CV] ....... C=10, penalty=l2, score=0.7857142857142857, total=   0.0s\n",
      "[CV] C=10, penalty=l2 ................................................\n",
      "[CV] ....... C=10, penalty=l2, score=0.7973856209150327, total=   0.0s\n",
      "[CV] C=10, penalty=l2 ................................................\n",
      "[CV] ....... C=10, penalty=l2, score=0.7712418300653595, total=   0.0s\n",
      "[CV] C=100, penalty=l1 ...............................................\n",
      "[CV] ...... C=100, penalty=l1, score=0.7597402597402597, total=   0.0s\n",
      "[CV] C=100, penalty=l1 ...............................................\n",
      "[CV] ...... C=100, penalty=l1, score=0.7402597402597403, total=   0.0s\n",
      "[CV] C=100, penalty=l1 ...............................................\n",
      "[CV] ...... C=100, penalty=l1, score=0.7857142857142857, total=   0.0s\n",
      "[CV] C=100, penalty=l1 ...............................................\n",
      "[CV] ...... C=100, penalty=l1, score=0.7973856209150327, total=   0.0s\n",
      "[CV] C=100, penalty=l1 ...............................................\n",
      "[CV] ...... C=100, penalty=l1, score=0.7712418300653595, total=   0.0s\n",
      "[CV] C=100, penalty=l2 ...............................................\n",
      "[CV] ...... C=100, penalty=l2, score=0.7597402597402597, total=   0.0s\n",
      "[CV] C=100, penalty=l2 ...............................................\n",
      "[CV] ...... C=100, penalty=l2, score=0.7402597402597403, total=   0.0s\n",
      "[CV] C=100, penalty=l2 ...............................................\n",
      "[CV] ...... C=100, penalty=l2, score=0.7857142857142857, total=   0.0s\n",
      "[CV] C=100, penalty=l2 ...............................................\n",
      "[CV] ...... C=100, penalty=l2, score=0.7973856209150327, total=   0.0s\n",
      "[CV] C=100, penalty=l2 ...............................................\n",
      "[CV] ...... C=100, penalty=l2, score=0.7712418300653595, total=   0.0s\n",
      "[CV] C=1000, penalty=l1 ..............................................\n",
      "[CV] ..... C=1000, penalty=l1, score=0.7597402597402597, total=   0.0s\n",
      "[CV] C=1000, penalty=l1 ..............................................\n",
      "[CV] ..... C=1000, penalty=l1, score=0.7402597402597403, total=   0.0s\n",
      "[CV] C=1000, penalty=l1 ..............................................\n",
      "[CV] ..... C=1000, penalty=l1, score=0.7857142857142857, total=   0.0s\n",
      "[CV] C=1000, penalty=l1 ..............................................\n",
      "[CV] ..... C=1000, penalty=l1, score=0.7973856209150327, total=   0.0s\n",
      "[CV] C=1000, penalty=l1 ..............................................\n",
      "[CV] ..... C=1000, penalty=l1, score=0.7712418300653595, total=   0.0s\n",
      "[CV] C=1000, penalty=l2 ..............................................\n",
      "[CV] ..... C=1000, penalty=l2, score=0.7597402597402597, total=   0.0s\n",
      "[CV] C=1000, penalty=l2 ..............................................\n",
      "[CV] ..... C=1000, penalty=l2, score=0.7402597402597403, total=   0.0s\n",
      "[CV] C=1000, penalty=l2 ..............................................\n",
      "[CV] ..... C=1000, penalty=l2, score=0.7857142857142857, total=   0.0s\n",
      "[CV] C=1000, penalty=l2 ..............................................\n",
      "[CV] ..... C=1000, penalty=l2, score=0.7973856209150327, total=   0.0s\n",
      "[CV] C=1000, penalty=l2 ..............................................\n",
      "[CV] ..... C=1000, penalty=l2, score=0.7712418300653595, total=   0.0s\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[Parallel(n_jobs=1)]: Done  70 out of  70 | elapsed:    0.4s finished\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "GridSearchCV(cv=5, error_score='raise-deprecating',\n",
       "       estimator=LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,\n",
       "          intercept_scaling=1, max_iter=100, multi_class='warn',\n",
       "          n_jobs=None, penalty='l2', random_state=None, solver='liblinear',\n",
       "          tol=0.0001, verbose=0, warm_start=False),\n",
       "       fit_params=None, iid='warn', n_jobs=None,\n",
       "       param_grid={'penalty': ['l1', 'l2'], 'C': [0.001, 0.01, 0.1, 1, 10, 100, 1000]},\n",
       "       pre_dispatch='2*n_jobs', refit=True, return_train_score='warn',\n",
       "       scoring='accuracy', verbose=5)"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#设置参数搜索范围\n",
    "#正则函数为L1和L2\n",
    "penaltys2=['l1', 'l2']\n",
    "Cs2=[0.001, 0.01, 0.1, 1, 10, 100, 1000]\n",
    "#调优参数集合\n",
    "tuned_parameters2=dict(penalty=penaltys2, C=Cs2)\n",
    "\n",
    "lr_penalty2=LogisticRegression(solver='liblinear')\n",
    "#评价指标scoring：负logloss\n",
    "grid2=GridSearchCV(lr_penalty2, tuned_parameters2, cv=5, scoring='accuracy', verbose=5)\n",
    "\n",
    "grid2.fit(X_train, y_train)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "最佳分数： 0.7747395833333334\n",
      "最佳参数： {'C': 0.1, 'penalty': 'l2'}\n"
     ]
    }
   ],
   "source": [
    "#打印结果\n",
    "print(\"最佳分数：\", grid2.best_score_)\n",
    "print(\"最佳参数：\", grid2.best_params_)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[ 0.36167135,  0.99773092, -0.0648661 ,  0.05966521, -0.04513553,\n",
       "         0.54600449,  0.25990383,  0.15544357]])"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#正则稀疏系数\n",
    "grid2.best_estimator_.coef_"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:125: FutureWarning: You are accessing a training score ('split0_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:125: FutureWarning: You are accessing a training score ('split1_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:125: FutureWarning: You are accessing a training score ('split2_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:125: FutureWarning: You are accessing a training score ('split3_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:125: FutureWarning: You are accessing a training score ('split4_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:125: FutureWarning: You are accessing a training score ('mean_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:125: FutureWarning: You are accessing a training score ('std_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "{'mean_fit_time': array([0.00400057, 0.00290055, 0.00250034, 0.00290041, 0.00270042,\n",
       "        0.00390043, 0.00350037, 0.00350037, 0.00350046, 0.00300045,\n",
       "        0.00330029, 0.00310049, 0.00340047, 0.00290031]),\n",
       " 'std_fit_time': array([0.00176099, 0.00049002, 0.00031628, 0.00019994, 0.00024511,\n",
       "        0.00049001, 0.00054777, 0.0006326 , 0.00063249, 0.00054773,\n",
       "        0.00060005, 0.00048986, 0.00058326, 0.00020001]),\n",
       " 'mean_score_time': array([0.00200024, 0.0013001 , 0.00110006, 0.00110016, 0.0013001 ,\n",
       "        0.00140028, 0.00160022, 0.00130019, 0.00140018, 0.0013001 ,\n",
       "        0.00110025, 0.00140014, 0.00110016, 0.00110021]),\n",
       " 'std_score_time': array([0.00063249, 0.00024499, 0.00020003, 0.00019999, 0.00024499,\n",
       "        0.00048984, 0.00048994, 0.00040017, 0.00037422, 0.00024499,\n",
       "        0.00020006, 0.0002001 , 0.00019999, 0.00019996]),\n",
       " 'param_C': masked_array(data=[0.001, 0.001, 0.01, 0.01, 0.1, 0.1, 1, 1, 10, 10, 100,\n",
       "                    100, 1000, 1000],\n",
       "              mask=[False, False, False, False, False, False, False, False,\n",
       "                    False, False, False, False, False, False],\n",
       "        fill_value='?',\n",
       "             dtype=object),\n",
       " 'param_penalty': masked_array(data=['l1', 'l2', 'l1', 'l2', 'l1', 'l2', 'l1', 'l2', 'l1',\n",
       "                    'l2', 'l1', 'l2', 'l1', 'l2'],\n",
       "              mask=[False, False, False, False, False, False, False, False,\n",
       "                    False, False, False, False, False, False],\n",
       "        fill_value='?',\n",
       "             dtype=object),\n",
       " 'params': [{'C': 0.001, 'penalty': 'l1'},\n",
       "  {'C': 0.001, 'penalty': 'l2'},\n",
       "  {'C': 0.01, 'penalty': 'l1'},\n",
       "  {'C': 0.01, 'penalty': 'l2'},\n",
       "  {'C': 0.1, 'penalty': 'l1'},\n",
       "  {'C': 0.1, 'penalty': 'l2'},\n",
       "  {'C': 1, 'penalty': 'l1'},\n",
       "  {'C': 1, 'penalty': 'l2'},\n",
       "  {'C': 10, 'penalty': 'l1'},\n",
       "  {'C': 10, 'penalty': 'l2'},\n",
       "  {'C': 100, 'penalty': 'l1'},\n",
       "  {'C': 100, 'penalty': 'l2'},\n",
       "  {'C': 1000, 'penalty': 'l1'},\n",
       "  {'C': 1000, 'penalty': 'l2'}],\n",
       " 'split0_test_score': array([0.64935065, 0.73376623, 0.68181818, 0.75324675, 0.75974026,\n",
       "        0.76623377, 0.75974026, 0.75974026, 0.75974026, 0.75974026,\n",
       "        0.75974026, 0.75974026, 0.75974026, 0.75974026]),\n",
       " 'split1_test_score': array([0.64935065, 0.71428571, 0.68181818, 0.70779221, 0.74025974,\n",
       "        0.74675325, 0.74675325, 0.74025974, 0.74025974, 0.74025974,\n",
       "        0.74025974, 0.74025974, 0.74025974, 0.74025974]),\n",
       " 'split2_test_score': array([0.64935065, 0.75324675, 0.73376623, 0.77272727, 0.77272727,\n",
       "        0.79220779, 0.78571429, 0.78571429, 0.78571429, 0.78571429,\n",
       "        0.78571429, 0.78571429, 0.78571429, 0.78571429]),\n",
       " 'split3_test_score': array([0.65359477, 0.78431373, 0.7254902 , 0.80392157, 0.78431373,\n",
       "        0.79738562, 0.79738562, 0.79738562, 0.79738562, 0.79738562,\n",
       "        0.79738562, 0.79738562, 0.79738562, 0.79738562]),\n",
       " 'split4_test_score': array([0.65359477, 0.75163399, 0.7124183 , 0.77124183, 0.75816993,\n",
       "        0.77124183, 0.77124183, 0.77124183, 0.77124183, 0.77124183,\n",
       "        0.77124183, 0.77124183, 0.77124183, 0.77124183]),\n",
       " 'mean_test_score': array([0.65104167, 0.74739583, 0.70703125, 0.76171875, 0.76302083,\n",
       "        0.77473958, 0.77213542, 0.77083333, 0.77083333, 0.77083333,\n",
       "        0.77083333, 0.77083333, 0.77083333, 0.77083333]),\n",
       " 'std_test_score': array([0.00207782, 0.02321726, 0.02172389, 0.03153907, 0.01483053,\n",
       "        0.01836721, 0.01800398, 0.01992491, 0.01992491, 0.01992491,\n",
       "        0.01992491, 0.01992491, 0.01992491, 0.01992491]),\n",
       " 'rank_test_score': array([14, 12, 13, 11, 10,  1,  2,  3,  3,  3,  3,  3,  3,  3]),\n",
       " 'split0_train_score': array([0.6514658 , 0.75081433, 0.71335505, 0.76710098, 0.76547231,\n",
       "        0.77035831, 0.77035831, 0.77198697, 0.77361564, 0.77361564,\n",
       "        0.77361564, 0.77361564, 0.77361564, 0.77361564]),\n",
       " 'split1_train_score': array([0.6514658 , 0.75732899, 0.71335505, 0.77361564, 0.7752443 ,\n",
       "        0.78338762, 0.77850163, 0.77850163, 0.77850163, 0.77850163,\n",
       "        0.77850163, 0.77850163, 0.77850163, 0.77850163]),\n",
       " 'split2_train_score': array([0.6514658 , 0.74592834, 0.70032573, 0.76058632, 0.77361564,\n",
       "        0.77035831, 0.7752443 , 0.77198697, 0.7752443 , 0.77361564,\n",
       "        0.77361564, 0.77361564, 0.77361564, 0.77361564]),\n",
       " 'split3_train_score': array([0.6504065 , 0.73333333, 0.70243902, 0.75609756, 0.75934959,\n",
       "        0.76422764, 0.76747967, 0.76747967, 0.76747967, 0.76747967,\n",
       "        0.76747967, 0.76747967, 0.76747967, 0.76747967]),\n",
       " 'split4_train_score': array([0.6504065 , 0.74634146, 0.70569106, 0.76260163, 0.77560976,\n",
       "        0.77723577, 0.78211382, 0.77886179, 0.77886179, 0.77886179,\n",
       "        0.77886179, 0.77886179, 0.77886179, 0.77886179]),\n",
       " 'mean_train_score': array([0.65104208, 0.74674929, 0.70703318, 0.76400042, 0.76985832,\n",
       "        0.77311353, 0.77473955, 0.77376341, 0.77474061, 0.77441487,\n",
       "        0.77441487, 0.77441487, 0.77441487, 0.77441487]),\n",
       " 'std_train_score': array([0.00051895, 0.00786489, 0.00543745, 0.00596881, 0.00641678,\n",
       "        0.00658318, 0.00530451, 0.00434145, 0.00413207, 0.0041437 ,\n",
       "        0.0041437 , 0.0041437 , 0.0041437 , 0.0041437 ])}"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#训练结果\n",
    "grid2.cv_results_"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "数据对比：\n",
    "默认参数的LogisticRegression的中值是： 0.47615970944434044，5折的logloss值是[0.48797856 0.53011593 0.4562292  0.422546   0.48392885]\n",
    "GridSearchCV的最佳分数(负logloss损失参数下)是： 0.47602552188807734，最佳参数是{'C': 1, 'penalty': 'l1'}\n",
    "GridSearchCV的最佳分数(准确率参数下)是：0.7747395833333334  最佳参数： {'C': 0.1, 'penalty': 'l2'}\n",
    "可以看出默认的LogisticRegression与GridSearchCV(负logloss损失前提下)的最佳分数两个调优的参数结果相差不是很大，相差0.0001\n",
    "但是GridSearchCV(准确率前提下)调优是三者中评价分数最大的，故相比较该方法最好，对应的参数是0.1和L2正则"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEKCAYAAADjDHn2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJzt3Xl4FeXZx/HvnZM9BAghQSAsYZUdJBAUwaIoiIqgFcG6dkHfV6xai2uLiq1LrbVvW1tUXHCpqLhRRBBERSlLANk32QlbQkgIIYQs537/OAcMMZCTZTJZ7s91nYuZOc+c8xvUczvPM/OMqCrGGGPM2QS5HcAYY0zNZ8XCGGNMmaxYGGOMKZMVC2OMMWWyYmGMMaZMViyMMcaUydFiISLDRWSziGwVkQdLeb+1iHwpIt+JyBoRGVHsvYf8+20WkWFO5jTGGHN24tR9FiLiAbYAlwKpQAowTlU3FGvzEvCdqv5LRLoCs1W1rX/5HaA/0AKYD3RS1SJHwhpjjDkrJ88s+gNbVXW7quYD04GrS7RRoKF/uRGwz798NTBdVU+o6g5gq//zjDHGuCDYwc9uCewptp4KJJdo8xjwuYjcBUQBQ4vtu6TEvi3P9mVNmzbVtm3bViKuMcbUPytWrDikqnFltXOyWEgp20r2eY0DXlfV50TkfOBNEeke4L6IyHhgPEDr1q1Zvnx5JSMbY0z9IiK7AmnnZDdUKtCq2HoCP3QznfQL4D0AVV0MhANNA9wXVX1JVZNUNSkurszCaIwxpoKcLBYpQEcRSRSRUGAsMLNEm93AJQAi0gVfsUj3txsrImEikgh0BJY5mNUYY8xZONYNpaqFIjIBmAt4gFdVdb2ITAaWq+pM4D7gZRG5F183063quzxrvYi8B2wACoE77UooY4xxj2OXzla3pKQktTELY4wpHxFZoapJZbWzO7iNMcaUyYqFMcaYMlmxMMYYUyYrFsYYY8pkxaIOuW3Obdw25za3Y1SJunQsxtQFViyMMcaUyYqFMcaYMlmxMMYYUyYrFsY4rC6NvyS/di3Jr13rdoxKqyvHAdV3LE7OOmuMATbsz3Y7gjGVZmcWxhhjymTFwhhjTJmsWBhjjCmTFQtjjDFlsmJhjDGmTFYsjDHGlMmKhTHGmDJZsTDGGFMmKxbGGGPK5GixEJHhIrJZRLaKyIOlvP+8iKzyv7aISFax9/4kIutFZKOI/E1ExMmsxhhjzsyx6T5ExAO8AFwKpAIpIjJTVTecbKOq9xZrfxfQx798ATAQ6Ol/+1vgIuArp/IaY4w5MyfPLPoDW1V1u6rmA9OBq8/Sfhzwjn9ZgXAgFAgDQoCDDmY1xhhzFk5OJNgS2FNsPRVILq2hiLQBEoEFAKq6WES+BPYDAvxDVTeWst94YDxA69atqzS8MVWlTcE2tyMYU2lOFovSxhj0DG3HAjNUtQhARDoAXYAE//vzRGSwqi487cNUXwJeAkhKSjrTZ5taaOzf1/sWhrubwxjj42SxSAVaFVtPAPadoe1Y4M5i66OBJaqaAyAinwEDgIWl7GuMqSaPvu0/wa/lj+eoK8cB1XcsTo5ZpAAdRSRRRELxFYSZJRuJSGcgBlhcbPNu4CIRCRaREHyD2z/qhjJ1V25+Ebn5RW7HqBK3vpfPre/lux3DmEpxrFioaiEwAZiL74f+PVVdLyKTRWRksabjgOmqWrwbaQawDVgLrAZWq+p/nMpqjDHm7Bx9Up6qzgZml9g2qcT6Y6XsVwTc7mQ2Y4wxgbPHqpoaySPHyQ2DIm8RniCP23EqzuslLKQIj0cho/ZfFRUZXuhbyNwFEuR7BXl+WC7t9aP37f7a2siKhakxirxFpBxMYfb22Xx2XRB5ocJTb51H0/CmxEfGn3o1i2p2+npkM6JCotwLfuKo78czcydk+f88+crazSXJeb52fz8voI9TBfUCKqgX1P8nxZeLvYcX1Cv+/QS0+Hopn3OGfYt//qn3SuzT3t9ZvOenF1XiL0x++FPkh3UpuZ1S3j9TmzN9XunLnU6c8B3H1aVezV+rdDpxAm+o8xeDWrEwrlJV1mes59PtnzJn5xwOHT9EVEgU3XYrCRlKs1+NJy03jbTcNHYf3c3yA8s4lneU4CII8kKw1/dnw6BI4sJiiQ9tQlxoE2JDGhMb0pgmIY1oEtyImOBooj1RBBV50aIitKAQigrRwkK0sAgtLAD/di0qsV6QD7lZ6LFMNDcTzT0Cudlo3lE0LwcK8v0/sP4fa4JRTzgaFIZKe46nH0K9QnjTZr7PLPKeehVfx5/tjBeYOylIEE8Q4vGAJ8i/HAQez6llCQri+GHfvbER8c1P/hP84c9TubXYH1piW/GDK7HtZPtS9yu+rfjnnOX78ZaSyfennvAVjoLDx0r5y6hd9IQgHisWpo7acWQHs3fMZvb22ew+upuQoBCGxg7kKmlP+x0nOPjJ64QUQkjKm2hhIRT6fsTxes/wiUf9r52lvpvtf1WIKCIgQer7n9cgfD+kwcFIcAMkNBQJCYPwCCQ0AgkNh2APEhxCkMfD8YOLUKBRt/MQjwcJCQZPMBIcjAR7fJ/jX/ft518PCfb9WAeHIP7tpe4XEuz7kQ/2v1d8+dR68c/wICEhviwn14MCu9Zl9tCuAIyY+1VF/zZrhFPHMX+dy0kq7+SxdHT4e6xYmGpz8NhB5uycw6fbP2Xj4Y1E5wlX5XRgcPoAmm05RMGmBeCdz5GQEBA4FgFtRo0q9Ue15I9lyR9VDQoiR/PIKswmqzCHzPzDZOTu53BeGhn5hzlUmE265pEd5KUoCN/LA0UC4eIlFogNDqNpWEOaRcYTH92S+Jj2NGt6LvFNu9MkKr7MsZTj+UWs3J3J25Of4ERQGF1G/ILwkCDCQzyEhXgID/Yt+17+5WBPsTY/bAvxCDaXpnGTFQvjqCMnjjBv1zxm75jN5u0pnLvHy4j0WB7Y04TIXemgG5HQUEJ69aLRHXcQ2b8fEb16MeeqvgAkP/JwYF+kCjlpp40ZNMrcScuTYwnZezmtG8MTBjFtyG3UirRGzUiLbMzBsEjSPEGkUUjaiSzSctPYlnuQQ8e+pyhnE+z/4tTuwRJMbEQszSJ/GD9pHNaUnGNR7M8I5ft9HjalCgWFIUjzoYR6C/jyq614K9hbECT8UFiCixWckKDTCszJwhMWXLIIlXi/RGEKP+2zPIQFBxEUZMXJ/MCKhalyxwuP8/Wer1mw+kOyly7h3F2F3LQ3hHMO+q6kkfCjRPTpTeTI64nq14/wnj0JCgsr+4Pzj0HW7tMHkIsPLBfknt4+ujk0bgNtL4SYtv5XG9+fDc6BoCAigbb+15kUeYvIyMsgLTeNg7kHT42hpB49wPbDe1mWuZGjhd+ikvfDThEQ3hFiPA1okJFDeKHS6NwevnENBa8qXvWN2Zy+fPI9PdWuePuT63mq5JbcP0/x5vnbeP3bAvxnVhoR341YEiQEiRAk4B3dHAEmvXpVJT7ZfUWjfWMutf04wHcsDXIiGOHw91ixAG6b47tP/rXhr7mcpPYq8BawbM1nrJ//HoUr19BpZwG3Hfa/GRFOVN8kIm/oR2S/fkR074aEhp718yLDC2kZnwsf/OqHwnAs7fRGIVG+H/4m7aD9xacXg8atISSiSo7NE+QhPjKeUBqz72BTtu5oztIdGWzYl41XIdQTRK9WjTivTSQdWhQRH5PHkQJ/cTl2kLU73qHAA43CGlVJnnLxF5gi7w9FyOuFolPLvj+LlFPLpb1/ctvxwkJACAtqUP3HUoXyCwsACK3lxwG+YwkrcP4s0IpFHVLdk++dSE1l04IP2PvNPKLW7SA+08sgID8ihKBevYi7cChR/fsT3rWrbyA1UIX5JHXNICqiEHYv8RWATsOKnR34X5Gxjl6zn5FzgmU7DrN0x2GWbM9g88GjqEJYcBB9Wjfmros7ktyuCee1jiE85MzjF7N/9zYAIx6e4ljW6pL82rUALLr1nTJa1mx15TjAdyxHGzr/PVYsTEBUlYI9e8hNSeHAt1+Qm5JCxKEcQoG4cMjo3IzsMRfSdeh1NOja3XcFTkUt+ScNIgtJWR9Lv/fXVtkxlCXtaB5Ltx9m6Y4Mlm4/zPdpOQBEhHjo2yaGK3o0J7ldLL1aNSIsOPDje/xnXQAc7yYwxklWLEypVJX8HTvJTUkhNyWFo8uWoGmHADgSCZtaBXH8Jx1o95OrGDhoHA3Coqvmi7P3w8JnOZgRTnpmeNV85hnsP3L8tOKw/ZDvmvuoUA9JbZsw+ryWJCfG0qNlI0KD7XH1pn6zYmEAf3HYupVj/uKQu3w5Rem+4pATHcLqhEI29AlCe3clecA1jEwcRpPwJlUfZP6jUFTAxh2Nq/yj9xzOZemOwyzdnsHSHYfZfdg3IB4dHkz/tk0Y278VyYmxdGvRkGCPFQdjirNiUU+p18uJLVvIXVasOGRmAlDQtBHbEsP4JimYda2UBu3bM6LdFdyXeDktG7R0LtTuJbDmXRj0W3K/rlxfsqqyKyP31FnD0h2H2Zt1HIDGkSH0b9uEWy5oS3JiE7o0b4jHLhM15qysWNQTWlRE3sZNp7qVcleswHvkCADBLVtwNKkTy5sf55PoreyJzqFFg0aMaPdL7ki8nE4xnZwP6C2C2b+Fhgkw6De8vu0DIPB+flVlW/qxYsUhg4PZvvl/YqNCSW7XhPGD25Hcrgmd4qPtHgJjysmKRR2lBQXkbdhAbkoKx1JSOL5iJd4c34BtSJvWNBh6CQc7NWV+k318nLOIo/kriAmLYVjba7ii3RX0iutVvXcMr3gdDqyFn74GoWVPCuj1Kt+n5Zx25nAox1cc4qPDSG4XS3JiEwa0a0L7uAZ297MxlWTFog7xFHqJOlrA7l/8ktzvvkNzfX3yoe3a0fCKK4jsl8TeDo34+OhSPtv5GWm5aURmR3JJ60sY0W4Eyc2TCQkKqf7guYdhwRPQdhB0G11qE69X2Xgg+9RZw7Idh8nM9V0r36JROIM6NiU5sQnJ7WJpGxtpxcGYKmbFog44sXUrGVNfocWuYwhQmJZG41GjiOzfj8ikJPaGHuPjHbOZvf1Fdi7dSXBQMINaDmJiv4lclHAREcFVc/NahS34A+Rlw+XPnLpvQlU4lt+ClxduP1UcsvN8d4C3ahLBJV2a+c8cYkmIibDiYIzDrFjUYrkrvyNj6lRyFixAIiLIaRRCdqNQhv5nJum56Xyycw6zl9zFuox1CEK/c/pxa7dbGdpmqDt3E5dm/xpY8Rr0+xU063Zq89pDt5F3tD0b9m4ksWkUI3o0J7ldE5ITY2nR2OXiZkw95GixEJHhwP8BHmCqqj5d4v3ngSH+1UggXlUb+99rDUwFWuGb4maEqu50Mm9toKrkfP01GS9P5fiKFXgaNaLpnXcSc+PP+PTWy1jTqojpn/+SZfuXoShdY7vy26TfMrztcJpFNXM7/ulU4bP7ISIGhjx0avOna/aTd7Q9kTFr+fJ/7qFZQ2fvtzDGlM2xYiEiHuAF4FIgFUgRkZmquuFkG1W9t1j7u4A+xT7iDeCPqjpPRBoAZ3qQQb2gBQVkz55NxtRXOPH99wS3aE6zhx+m0bXXsCpnE39d/2c+G5lHQTC0ztnPHb3u4PLEy0lslOh29DNb+z7sXgxX/c1XMPBNsTHpk3UEh6fTIG65FQpjaggnzyz6A1tVdTuAiEwHrgY2nKH9OOBRf9uuQLCqzgNQ1RwHc9Zo3txcsmZ8QMbrr1G4bz9hHTvS4pmnyR+SzEe7PuXjz8eyM3snUSFR9N3lod+OYH71xqya34d/4ih8/nto0Qf63HRq82P/2UB2XgGNWn2DiBuPjDPGlMbJYtES2FNsPRUo9YG3ItIGSAQW+Dd1ArJE5EP/9vnAg6pa5FzcmqUwM5PMt94m8+23KcrKIiKpL/G//x0r28Hz2z7im48fpUiLOC/+PH7R4xdc1uYyvhn1E4CaXygAFv4Zcg7A2LfB/5S2OesO8J/V+7jv0k68tTvL5YDGmOKcLBal/WKd6X8VxwIzihWDYGAQvm6p3cC7wK3AK6d9gch4YDxA69atK5+4BijYu5eM16eRNWMGevw4DS6+mPxxV/BR5GZmbnuCQ7sPERsey83dbmZ0h9E1u5vpTA5thcUvQO+fQUISAJnH8vndx+vo1qIhd/ykPW+94XJGY8xpnCwWqfgGp09KAPadoe1Y4M4S+35XrAvrY2AAJYqFqr4EvASQlJRUq/ss8jZvIeOVqWR/OhtEiBoxnPXDOzL9xCJWbn8Aj3gYlDCI0R1GMyhhkDv3Q1QFVZjzoO9ZE0MfO7V58qwNZOXm88bP+xNi8zIZU+M4WSxSgI4ikgjsxVcQbijZSEQ6AzHA4hL7xohInKqmAxcDyx3M6gpV5fiKFWS8PJWcr79GIiPwXjucT/sH8WH2Qo7tmUObhm2457x7GNl+JHGRcW5Hrrwtc2DrPLjsj9AgHoD5Gw7y0Xd7ufuSjnRtUQ0T8xtjys2xYqGqhSIyAZiL79LZV1V1vYhMBpar6kx/03HAdFXVYvsWichvgS/E1wG/AnjZqazVTb1ecr780nf566pVSExj9o4dzKud97M2fy7hWeFc1vYyRncYTd9mfQMeg5h+l+8+hWFOhq+MgjzfWUXTzpB8OwBHcgt4+KO1nHtONHcO6eByQGPMmTh6n4WqzgZml9g2qcT6Y2fYdx7Q07FwLtD8fI7M+pSMV14hf9s2is5pypKxXZnaejvHPP+le3R3ft/x91yeeDnRoVX0fIiaZPHffY9Hvelj8Pi60Z74dAMZx/J59dZ+9swIY2owu4O7GhTlHCPr/fc5PG0ahQcOcLR1LB9e14TP2mUSHaGMbjeW0R1HV8/srm45kgrf/AW6XAXtffdhfrk5jRkrUpkwpAPdW55+R3nb/N+6kdIYcwZWLBxUePgwh998k8y3/403O5s9HRrx5hgPq9sd4fyWF/BMx9Fc3OpiQj2hbkd13ue/A/X6xiqA7LwCHvpgLZ2aNeCuS6z7yZiazooFMPbv630Lw6vm8/JTUzn86mtkzpiBFuTzXedQZozykNupEaM63MIzHa6mRYMWVfNltcGOb2D9R/CThyCmDQBPfrqRtKN5vHjTwHI9z9oY4w4rFlUob9MmDrz4L3LnzsMr8FV3+Oz8MLr2uZSJHUczoPkAgqSe9csXFcJnD0Dj1jDwbgAWbklnesoe7rioPb1aVf3jU2uars3tCi9T+1mxqCRV5diyZex44S8EL1vD8VCY10/YOLQDlyZdz9uJV9A4vO7/IJ7R8lcgbT1c/xaERJBzopCHPlxL+7go7hna0e10xpgAWbGoIPV6SZ39IfunvED01gPkRMIXQ8IJvvZKruo9lvtju9aOaTecdOwQfPlHaDcEzr0SgKdmb2TfkePMuOMCwkOs+8mY2sKKRTnl5x1jxbS/oP/+mJiDuRxrDIuvS6T9uF9yX6fL3X+QUE3yxeOQf+zUQ43+u/UQby/dza8GJdK3TYzb6Ywx5WDFIkDb9q5j9dRnaT5rOY2PetndPJjdEy5hwA2/YUiTdm7Hq3n2roSVb8L5d0JcZ46dKOT+D9aQ2DSK+y7r7Ha6avXa8NfcjlBllt72gdsRqkRdOQ6ovmOxYnEWuQW5zP9uBmnTXqP3twfocgJSOzdBb7uRi6/6JSGeWjo/k9O8Xpg9EaLi4KIHAPjTnE3szTrOe7efb91PxtRCVixKUFVWp69m3n/fJPL9eVy4qoD2XshM7kTLCRPpknSh2xFrvtXvwN7lMOpfEN6QJdszmLZ4F7cNbEu/tk3cTmeMqQArFn45Ycq09dNY+tU79J2/mxGbFPUEoZcPocP/TiSsXS2cCtwNeUdg/qOQ0A96juV4fhEPfLCG1k0imTisfnU/GVOX1PtikZabxrQBeUhhEVfd9wx37VSKIsNodNv1NLv1F4TEx7sdsXb5+k++q6B+9j4EBfHs3A3sysjlnV8NIDI08H/d3r39fAdDGmPKq94Xi4hDOdz8eSFt0hViY4i77zZixo7FE10HJ/JzWvpmWDoFzrsZWvRh+c7DvPbfHdw0oA3nt491O50xphLqfbFocE4CzY4GkREXzAXzvyQoLMztSLWTKnx2P4RGwSWTyCso4v4Za2jZOIIHLz/X7XTGmEqq98VCQkNJaxkJYIWiMjb+B7Z/BZc/C1FN+cvsjWw/dIy3f5lMVFi9/9fMmFqvnk1UZByRnwtzH4H4bpD0c1buzmTqN9sZ1781Azs0dTudMaYK2P/ymcpb9H9wZDfc+il5XmHi+6s5p2E4D4+w7idj6go7szCVk7kLFv0Vul0DbS/k/774nm3px3jq2p5Eh9tNi8bUFVYsTOXMfRgkCC77A6v3ZPHi19sYk5TARZ3i3E5mjKlCjhYLERkuIptFZKuIPFjK+8+LyCr/a4uIZJV4v6GI7BWRfziZ01TQtgWwaRYMuo8TUecwccZq4qPDeeSKrm4nM8ZUMcfGLETEA7wAXAqkAikiMlNVN5xso6r3Fmt/F9CnxMc8AXztVEZTCUUF8NmDEJMIF9zFPxZsZcvBHF67tR+NIqz7yZi6xskzi/7AVlXdrqr5wHTg6rO0Hwe8c3JFRPoCzYDPHcxoKmrpi3BoMwx/mnUH8/jnV9u45ryWDDnX7ng3pi5ysli0BPYUW0/1b/sREWkDJAIL/OtBwHPAxLN9gYiMF5HlIrI8PT29SkKbABw9CF89DR0vI7/9ZUycsYbYqFAevbKb28mMMQ5xsliU9pg4PUPbscAMVS3yr/8vMFtV95yhve/DVF9S1SRVTYqLswHVajP/MSjMg2FP8c+vtrJxfzZ/HN2DRpHW/WRMXeXkfRapQKti6wnAvjO0HQvcWWz9fGCQiPwv0AAIFZEcVf3RIHlVmH6X7/+Ihznx4XXNnmWw+t8w8B42FsTzjwXfcnXvFlzatZnbyYwxDnKyWKQAHUUkEdiLryDcULKRiHQGYoDFJ7ep6s+KvX8rkORUoTDl4C3yPdQoujkFF97HxKmraRwZwmNXWfeTMXWdY91QqloITADmAhuB91R1vYhMFpGRxZqOA6ar6pm6qExN8d2bsH8VXPoELy4+yLq92fxhVHdiokLdTmaMcZij032o6mxgdoltk0qsP1bGZ7wOvF7F0Ux5Hc+ELyZD6wvYEj+Mv727iCt6Nmd49+ZuJzPGVAO7g9sE5sun4HgmhcOeZuKMNTQID2bySOt+Mqa+sGJhynZwPaRMhaSf8/L3DVideoTJV3cjtoFN6W5MfWHFwpydKsy+H8Ibsb3HPTw/fwvDu53DFT2s+8mY+sSmKDdnt/5D2PUt3iue575Zu4kM9fDEqO6IlHYbjTGmrrIzC3Nm+cfg89/DOT15NXcQ3+3O4vGR3YiLtu4nY+qbchcLEQkSkYZOhDE1zDfPQfZe9l4wmWfnbWVol2aM7NXC7VTGGBcEVCxE5N/+6cKjgA3AZhE567xNppbL2Ab//TvaYwz3/DeMsOAgnhxt3U/G1FeBnll0VdVsYBS++yZaAzc5lsq4b+7D4Anl3ZhfkbIzk0ev6kZ8w3C3UxljXBLoAHeIiITgKxb/UNUCEbE7rmuY14a/VjUftOVz2DKHzIG/47EvMxjSOY5rzit1wmBjTD0R6JnFi8BOIApY6J9SPNupUMZFhSdgzoNobEfu3JZMSFAQT17Tw7qfjKnnAioWqvo3VW2pqiPUZxcwxOFsppyuf3Ex17+4uOyGZ7Pkn3B4G1+0vZf/7jzK76/sSvNGEVUT0BhTawU6wH23f4BbROQVEVkJXOxwNlPdsvfB18+S224Yv06JZXCnOK5LSnA7lTGmBgi0G+rn/gHuy4A44DbgacdSGXfMm4R6C3k4dxxBIjxl3U/GGL9Ai8XJX4wRwGuquprSn4Rnaqtd/4W177O+7S18vDOUh0acS8vG1v1kjPEJtFisEJHP8RWLuSISDXidi2WqlbcIZt9PYXRLbt06iAvax3JD/9ZupzLG1CCBXjr7C6A3sF1Vc0UkFl9XlKkLlr8KB9fyr9jfkauhPHNtT+t+MsacJqBioapeEUkAbvD/iHytqv9xNFk1qrL7E2qjYxmw4A8cjO3Pc3u7MPnqc2nVJNLtVMaYGiagYiEiTwP9gLf9m34tIheo6kOOJTPVY8ET6Imj3H78epITY7kxuY3biYwxNVCgYxYjgEtV9VVVfRUYDlxR1k4iMlxENovIVhF5sJT3nxeRVf7XFhHJ8m/vLSKLRWS9iKwRkevLc1AmQPtXoyteZ16DkWz2JvCnn/YkKMi6n4wxP1ae51k0Bg77lxuV1VhEPMALwKVAKpAiIjNVdcPJNqp6b7H2dwF9/Ku5wM2q+r2ItMA3wD5XVbPKkdecjf+hRidCY/ht+ggmXtmZNrFRbqcyxtRQgRaLp4DvRORLfJfMDgbK6oLqD2xV1e0AIjIduBrfrLWlGQc8CqCqW05uVNV9IpKG7/4OKxZVZc17sGcJT3IHndsmcOsFbd1OZIypwQId4H5HRL7CN24hwAOqeqCM3VoCe4qtpwLJpTX0zzWVCCwo5b3+QCiwLZCsJgAnjqLzJrEztDPv5Q7ms5/2su4nY8xZnbVYiMh5JTal+v9sISItVHXl2XYvZduZZqodC8xQ1aIS398ceBO4RVV/dF+HiIwHxgO0bm33BQTs6z8hOQe498T/ct/lXUhsat1PxpizK+vM4rmzvKecfX6oVKBVsfUEYN8Z2o4F7iy+wf80vk+B36nqklIDqL4EvASQlJRkU6YH4tD36JJ/8QlDkFZJ/PzCRLcTGWNqgbMWC1WtzMyyKUBHEUkE9uIrCDeUbCQinYEYYHGxbaHAR8Abqvp+JTKY4lTRzx4gj1CeKbyeN3/aC491PxljAhDofRbXlLL5CLBWVdNK20dVC0VkAjAX8ACvqup6EZkMLFfVmf6m44Dpqlr8zGAMvkH0WBG51b/tVlVdFUhecwabZyPbvuDZgpu4+dL+dIhv4HYiY0wtUZ7pPs4HvvSv/wRYAnQSkcmq+mZpO6nqbHyPYS2+bVKJ9cdK2e8t4K0As5lAFByn6LMH2UkCq875Ke8Nsu4nY0zgAr0pzwt0UdVrVfVaoCtwAt/VTQ84Fc5Uof/+A88YB/rPAAAaj0lEQVSR3TxeeCtPXdeXYE+g/+iNMSbwYtFWVQ8WW08DOqnqYaCg6mOZKpW1h6KFf+bTov70v3gUnc+JdjuRMaaWCbQb6hsRmQWcHGz+Kb5ncUdhN8rVePmfPYy3yMv7Te7g5Yvaux3HGFMLBVos7gSuAS7Ed//ENOAD/6C0PYu7Jtv+NaGbZ/J84XU8MPZSQqz7yRhTAYHewa0i8i2Qj+/+imUlrl4yNVFRATkf30emN47gQb+mS/OGbicyxtRSgV46OwZ4FvgK35nF30VkoqrOcDCbKadJGRP9S98CkLtoCg2yv+e5Bg/z0CXd3QtmjKn1Au2GegTod/KeChGJA+YDVixqqpx05KunWejtybU33E5osHU/GWMqLtBfkKASN99llGNf44J9HzxIcNFxtp33O7onNHY7jjGmlgv0zGKOiMwF3vGvX0+Jm+1MzZGzbSktdszg/bDR3HDlULfjGGPqgEAHuCeKyLXAQHxjFi+p6keOJjMVo8rhGXeTq43pMvYJwoI9bicyxtQBAT8pT1U/AD5wMIupAlmFwXQrWMesDo9yZbtWZe9gjDEBKOt5Fkcp/RkUgu+KWrsWswY54fVwrncb64LPZej1v3Y7jjGmDilrinKbF6IWOZbvpbHkkHbFnwkPLc/j1Y0x5uzsiqY6Yt03nzAoaA0L6UOX8wa5HccYU8dYsagDjmVnErvgPnZrPI1DisrewRhjysmKRR2w7o3f0Mx7iK1BiYQG/ehR5cYYU2lWLGq59Ys+JfnQhyxrNoZmIbluxzHG1FFWLGqx3JwjNJ5/L6lyDr1uec7tOMaYOszRYiEiw0Vks4hsFZEHS3n/eRFZ5X9tEZGsYu/dIiLf+1+3OJmztlr7xn201IMcueyvRETZhWvGGOc4dn2liHiAF4BLgVQgRURmquqGk21U9d5i7e8C+viXmwCPAkn47vNY4d8306m8tc3GpXPpd3AGS+OuJfn8y92OY4yp45y8GL8/sFVVtwOIyHTgamDDGdqPw1cgAIYB8/yPbUVE5gHD+WFuqnrt+LGjNJhzDweC4ul+y19ObZ8c+ywA77oVzBhTZznZDdUS2FNsPdW/7UdEpA2QCCwo77710eo3JtJK93H4kj8TFW0zyhpjnOdksZBStp3p6XpjgRmqevImgYD2FZHxIrJcRJanp6dXMGbtsillPv0PTGdp7Ci6XzjS7TjGmHrCyWKRChSfyS4B2HeGtmM5vYspoH1V9SVVTVLVpLi4uErGrfnyjh8j4rO7SZOmdLvlr27HMcbUI04WixSgo4gkikgovoIws2QjEekMxACLi22eC1wmIjEiEgNc5t9Wr333xoO08aaSPuRPNGgY43YcY0w94tgAt6oWisgEfD/yHuBVVV0vIpOB5ap6snCMA6arqhbb97CIPIGv4ABMPjnYXV9tWfkV/fe9ybImV9L/omvcjmOMqWccnZpUVWdT4ol6qjqpxPpjZ9j3VeBVx8LVIifycgmdNYFD0oRzb/mb23GMMfWQ3cFdC6x882Haevdw4KJnaNg41u04xph6yIpFDbd11Tf0S53GskaX02vIdW7HMcbUU1YsarD8E3l4Zk4gUxrR+Za/ux3HGFOPWbGowVa89TsSvTvZe+FTNGpS9y8NNsbUXFYsaqhta5eQtPtVUhpeSu+h49yOY4yp56xY1EAF+Sfg4//hiETT8eYX3I5jjDFWLGqi5W9Pon3Rdnaf/wcaN23mdhxjjHH2PgtTfjs2pNB358usaDiEvsNucjuOMbVCQUEBqamp5OXluR2lxgoPDychIYGQkJAK7W/FogYpLMin8MM7yJEoEm+y7idjApWamkp0dDRt27ZFpLR5SOs3VSUjI4PU1FQSExMr9BnWDVWDpPz7cToWbmVH/8k0ibcZ2Y0JVF5eHrGxsVYozkBEiI2NrdSZlxWLGmLXphX03T6FlVGD6TviNrfjGFPrlLdQXP/iYq5/cXHZDeuIyhZSKxY1QFFhIXkz/odjEkHrm/7pdhxjTAU0aNDg1PLw4cNp3LgxV155Zalt77zzTnr37k3Xrl2JiIigd+/e9O7dmxkzZpTrO1euXMmcOXMqlTtQNmZRA6RMf4IBhZtZnvQsSee0KnsHY0yNNnHiRHJzc3nxxRdLff+FF3xjkjt37uTKK69k1apVFfqelStXsm7dOoYPH17hrIGyMwuX7fl+Nb2/f4HvIi+g74hfuh3HGFMFLrnkEqKjoyu07/fff8+wYcPo27cvgwcPZsuWLQBMnz6d7t2706tXL4YMGcLx48eZPHkyb7/9doXOSsrLzixcVFRYSM57d9BQQml14xQkyGq3MZX1+H/Ws2FfdpntNuz3tQlk3KJri4Y8elW3SmcLxPjx45k6dSrt27dn0aJFTJgwgc8//5zHH3+cr776imbNmpGVlUVERASTJk1i3bp1/PWvzj8504qFi1Lee4oBBRtI6fMU/Vq0qfTnvXv7+VWQyhjjlqysLJYsWcK11157althYSEAAwcO5Oabb+a6667jmmuq/wFoVixckrp1Hb02/43VkckkXXWH23GMqTMCPQM4eUZRk/4nS1Vp2rRpqWMYL7/8MkuXLmXWrFn06tWLNWvWVGs26/dwgbeoiOz37qBAgml+44vW/WSMASAmJobmzZvz0UcfAeD1elm9ejUA27dvZ8CAATzxxBPExMSwd+9eoqOjOXr0aLVks18pF6S8/ye65q9lU6+HiG9ZsbspjTE116BBg7juuuv44osvSEhIYO7cuQHvO336dKZMmUKvXr3o1q0bs2bNAuDee++lR48e9OjRg6FDh9K9e3cuvvhiVq9eTZ8+fWr3ALeIDAf+D/AAU1X16VLajAEeAxRYrao3+Lf/CbgCX0GbB9ytqupk3uqwb8cmemx8njURSfS7eoLbcYwxVSQnJ+fU8jfffBPQPm3btmXdunWnbWvXrl2pxWXmzJk/2hYXF8fy5cvLmbRiHCsWIuIBXgAuBVKBFBGZqaobirXpCDwEDFTVTBGJ92+/ABgI9PQ3/Ra4CPjKqbzVQb1FZL4znoYEEf8z634yxk01aayiNnDy16o/sFVVt6tqPjAduLpEm18BL6hqJoCqpvm3KxAOhAJhQAhw0MGs1SLlg7/QLX8163vczzmtOrgdxxhjAuZksWgJ7Cm2nurfVlwnoJOILBKRJf5uK1R1MfAlsN//mquqG0t+gYiMF5HlIrI8PT3dkYOoKvt3babbuj+zNqwP/a+5x+04xhhTLk4Wi9JmrSo55hAMdAR+AowDpopIYxHpAHQBEvAVmItFZPCPPkz1JVVNUtWkuLia+4xq9Xo59O87EJTYcdb9ZIypfZz81UoFik90lADsK6XNJ6paoKo7gM34isdoYImq5qhqDvAZMMDBrI5K+ehv9DixkrVdf0uLtp3djmOMMeXmZLFIATqKSKKIhAJjgZLD+R8DQwBEpCm+bqntwG7gIhEJFpEQfIPbP+qGqg0Opm6jy5qnWR/ak34/vc/tOMaYk167wvcyAXGsWKhqITABmIvvh/49VV0vIpNFZKS/2VwgQ0Q24BujmKiqGcAMYBuwFliN75La/ziV1Snq9XLgrdvx4KXx2JcI8njcjmSMcUh1T1H+0Ucf8eyzz1Y6d6Acvc9CVWcDs0tsm1RsWYHf+F/F2xQBtzuZrTosn/lP+uWlsOTc+xnQrovbcYwx1aSqpigvLCwkOLj0n+nRo0dXTdgA2UirQ9L37aTzqifZENKd/mMedDuOMaYaVWaK8gsvvJBHHnmEwYMH849//INPPvmE5ORk+vTpw2WXXUZamu8Og6lTp3LPPb4rK2+88UbuvvtuLrjgAtq1a3dqupCqZBMJOkC9Xva+dQfnaj7R10+x7idjqtNnD8KBtWW3O+CfiC+QcYtzesDlP5qAwjHZ2dksXLgQgMzMTEaOHImIMGXKFJ577jmeeeaZH+2TlpbGokWLWLt2LWPGjKnyMw8rFg5YMeslknIXs6TTbxjQoYfbcYwxtczYsWNPLe/evZsxY8Zw4MABTpw4QadOnUrdZ9SoUYgIPXv2ZO/evVWeyYpFFTt0YDcdV05mU3AX+l3/iNtxjKl/Aj0DOHlGcdunzmWpoKioqFPLd955Jw8//DAjRoxg/vz5PP106ccXFhZ2atmJafRszKIKqdfLnjf/h3DNJ+K6KXjOMDBljDGBOnLkCC1btkRVmTZtmms5rFhUoZWfvUqfY9/yXfv/oU3n3m7HMca4pDJTlJf02GOPMXr0aC666CKaNWtWhSnLR+rArN8AJCUlaXVN1VuajIOpBP3rfNKDz6HdA4sIDgl1LYsx9c3GjRvp0qWcl6fX4G4op5T29yQiK1Q1qax9rZ+kiux683/prrkcuXaKFQpjaoN6VCSqgnVDVYGVc17nvJyvWZl4O2279HU7jjHGVDkrFpWUmb6fNksmsdXTnr43POp2HGOMcYQVi0ra9sadRGsOQaP/RUhoWNk7GGNMLWTFohK++/wtko5+wYo2v6Rd92S34xhjjGOsWFTQkYyDtPrvI2zzJJJ04xNuxzHGlNNtc27jtjm3uR2j1rBiUUFb3phAIz0KV//Tup+MMaemKF+1ahXnn38+3bp1o2fPnrz77rs/alsVU5QDrFy5kjlz5lRJ/rLYpbMVsHrBdPod+ZwlrX7BgJ4XuB3HGFODREZG8sYbb9CxY0f27dtH3759GTZsGI0bNz7VJtApysuycuVK1q1bx/Dhw6sk+9nYmUU5Hck8RPOFD7EjqA3n3fSk23GMMTVMp06d6NixIwAtWrQgPj6e9PT0gPf//vvvGTZsGH379mXw4MFs2bIFgOnTp9O9e3d69erFkCFDOH78OJMnT+btt9+u0FlJedmZRTltnnYX52kWR66aRmhYuNtxjDElPLPsGTYd3lRmu5NtAhm3OLfJuTzQ/4FyZ1m2bBn5+fm0b98+4H3Gjx/P1KlTad++PYsWLWLChAl8/vnnPP7443z11Vc0a9aMrKwsIiIimDRpEuvWreOvf/1rubOVlxWLcljz5Qz6Z81mcctbOL/PYLfjGGNqsP3793PTTTcxbdo0goIC68TJyspiyZIlXHvttae2FRYWAjBw4EBuvvlmrrvuOq655hpHMp+No8VCRIYD/wd4gKmq+qO5dUVkDPAYoPietX2Df3trYCrQyv/eCFXd6WTeszl65DDNvr6fXUGt6HPTU27FMMaUIdAzgJNnFK8Nf63KM2RnZ3PFFVfwhz/8gQEDBgS8n6rStGnTUscwXn75ZZYuXcqsWbPo1asXa9asqcrIZXJszEJEPMALwOVAV2CciHQt0aYj8BAwUFW7AfcUe/sN4FlV7QL0B9KcyhqIDdPupqkeJm/E3wiPiCp7B2NMvZSfn8/o0aNPnQWUR0xMDM2bNz/1WFSv18vq1asB2L59OwMGDOCJJ54gJiaGvXv3Eh0dzdGjR6v8GErj5AB3f2Crqm5X1XxgOnB1iTa/Al5Q1UwAVU0D8BeVYFWd59+eo6q5DmY9q7ULPyH58ExSmt9A56SL3YphjKkF3nvvPRYuXMjrr79+6pLY8lztNH36dKZMmUKvXr3o1q0bs2bNAuDee++lR48e9OjRg6FDh9K9e3cuvvhiVq9eTZ8+fWr1AHdLYE+x9VSg5G3OnQBEZBG+rqrHVHWOf3uWiHwIJALzgQdVtcjBvKXKyc6k6YL72CMt6H3zn6r7640xtUROTg4AN954IzfeeGNA+7Rt25Z169adtq1du3alPv9i5syZP9oWFxdHdT2awcliIaVsK/nwjGCgI/ATIAH4RkS6+7cPAvoAu4F3gVuBV077ApHxwHiA1q1bV13yYta/8Rv66SG2jHifVpENHPkOY0z1c2Ksoi5zshsqFd/g9EkJwL5S2nyiqgWqugPYjK94pALf+buwCoGPgfNKfoGqvqSqSaqaFBcXV+UHsG7Rf0g+9CHLzrmec/tfWuWfb4wxtYWTxSIF6CgiiSISCowFSp5HfQwMARCRpvi6n7b7940RkZMV4GJgg4NZfyQ35wgx8+8jVZrT6+Y/V+dXG2NMjeNYsfCfEUwA5gIbgfdUdb2ITBaRkf5mc4EMEdkAfAlMVNUM/9jEb4EvRGQtvi6tl53KWpo10+6jpR4k+7LniYiKrs6vNsaYGsfR+yxUdTYwu8S2ScWWFfiN/1Vy33lATyfzncmGJXPonzaDpfE/Jfn8y92IYIwxNYrNDVXC8WNHaTj3Hg4ExdPjlr+4HccY45BdN93MrptudjtGrWHFooTVb0wkQfdzeOhzRDZo5HYcY0wtUd1TlH/00Uc8++yzVZa/LDY3VDGbls2j/4HpLG06iuSBV7kdxxhTC1XlFOWFhYUEB5f+Mz169OiqD38WViz88nJziJxzN2nSlG63OD+DozGmburUqdOp5eJTlBcvFmdz4YUXctFFF/HNN99wzTXXkJiYyJNPPkl+fj5xcXG89dZbxMfHM3Xq1FMzzt54443ExsaSkpLCgQMHeO6556q8mFixAK5/cTFXpk3hJu9e1l78Ouc0jHE7kjGmgg48+SQnNpY9RXneJl+bQMYtwrqcyzkPP1zuLBWZohx8ExEuXLgQgMzMTEaOHImIMGXKFJ577jmeeeaZH+2TlpbGokWLWLt2LWPGjLFi4YQWx9ZzQ+EnLGtyJf0HV++pnTGmbqrIFOUnjR079tTy7t27GTNmDAcOHODEiROnnbkUN2rUKESEnj17snfv3kplL029LxYn8nL5ddazpEtjzr3lb27HMcZUUqBnACfPKNq8+UaVZ6joFOUnRUX9MLP1nXfeycMPP8yIESOYP38+Tz/9oyc9ABAWFnZq2XdXQtWq91dDZRzwzXW4NqgzDRvHupzGGFPbVWaK8tIcOXKEli1boqpMmzatChJWTL0/s2jRtjNjm09Fg4Kx2Z+MMZV1coryjIwMXn/9dYBT05VXxGOPPcbo0aNJSEigf//+7N+/vwrTBk6cOF1xQ1JSklZ0qt7rX1wMwLu3n1+VkYwx1WTjxo106dKlXPs42Q1VU5X29yQiK1Q1qax96/2ZBViRMKY+qk9FoirU+zELY4wxZbNiYYwxpkxWLIwxdUJdGX91SmX/fqxYGGNqvfDwcDIyMqxgnIGqkpGRQXh4eIU/wwa4jTG1XkJCAqmpqaSnp7sdpcYKDw8nISGhwvtbsTDG1HohISEkJia6HaNOs24oY4wxZbJiYYwxpkxWLIwxxpSpzkz3ISLpwK5KfERT4FAVxXFTXTkOsGOpqerKsdSV44DKHUsbVY0rq1GdKRaVJSLLA5kfpaarK8cBdiw1VV05lrpyHFA9x2LdUMYYY8pkxcIYY0yZrFj84CW3A1SRunIcYMdSU9WVY6krxwHVcCw2ZmGMMaZMdmZhjDGmTFYs/ETkCRFZIyKrRORzEWnhdqaKEpFnRWST/3g+EpHGbmeqKBG5TkTWi4hXRGrdlSsiMlxENovIVhF50O08lSEir4pImoiscztLZYhIKxH5UkQ2+v/dutvtTBUlIuEiskxEVvuP5XHHvsu6oXxEpKGqZvuXfw10VdU7XI5VISJyGbBAVQtF5BkAVX3A5VgVIiJdAC/wIvBbVa3Ys3NdICIeYAtwKZAKpADjVHWDq8EqSEQGAznAG6ra3e08FSUizYHmqrpSRKKBFcCo2vjPRUQEiFLVHBEJAb4F7lbVJVX9XXZm4XeyUPhFAbW2iqrq56pa6F9dAlR8qkmXqepGVd3sdo4K6g9sVdXtqpoPTAeudjlThanqQuCw2zkqS1X3q+pK//JRYCPQ0t1UFaM+Of7VEP/Lkd8uKxbFiMgfRWQP8DNgktt5qsjPgc/cDlFPtQT2FFtPpZb+KNVVItIW6AMsdTdJxYmIR0RWAWnAPFV15FjqVbEQkfkisq6U19UAqvqIqrYC3gYmuJv27Mo6Fn+bR4BCfMdTYwVyLLWUlLKt1p6x1jUi0gD4ALinRM9CraKqRaraG18PQn8RcaSLsF49z0JVhwbY9N/Ap8CjDsaplLKORURuAa4ELtEaPjBVjn8utU0q0KrYegKwz6Usphh///4HwNuq+qHbeaqCqmaJyFfAcKDKL0KoV2cWZyMiHYutjgQ2uZWlskRkOPAAMFJVc93OU4+lAB1FJFFEQoGxwEyXM9V7/kHhV4CNqvoXt/NUhojEnbzaUUQigKE49NtlV0P5icgHQGd8V97sAu5Q1b3upqoYEdkKhAEZ/k1LavGVXaOBvwNxQBawSlWHuZsqcCIyAvgr4AFeVdU/uhypwkTkHeAn+GY4PQg8qqqvuBqqAkTkQuAbYC2+/94BHlbV2e6lqhgR6QlMw/fvVxDwnqpOduS7rFgYY4wpi3VDGWOMKZMVC2OMMWWyYmGMMaZMViyMMcaUyYqFMcaYMlmxMKYcRCSn7FZn3X+GiLTzLzcQkRdFZJt/xtCFIpIsIqH+5Xp106yp2axYGFNNRKQb4FHV7f5NU/FNzNdRVbsBtwJN/ZMOfgFc70pQY0phxcKYChCfZ/1zWK0Vkev924NE5J/+M4VZIjJbRH7q3+1nwCf+du2BZOB3quoF8M9O+6m/7cf+9sbUCHaaa0zFXAP0Bnrhu6M5RUQWAgOBtkAPIB7f9Nev+vcZCLzjX+6G7270ojN8/jqgnyPJjakAO7MwpmIuBN7xz/h5EPga34/7hcD7qupV1QPAl8X2aQ6kB/Lh/iKS7384jzGus2JhTMWUNv342bYDHAfC/cvrgV4icrb/BsOAvApkM6bKWbEwpmIWAtf7HzwTBwwGluF7rOW1/rGLZvgm3jtpI9ABQFW3AcuBx/2zoCIiHU8+w0NEYoF0VS2orgMy5mysWBhTMR8Ba4DVwALgfn+30wf4nmOxDt9zw5cCR/z7fMrpxeOXwDnAVhFZC7zMD8+7GALUullQTd1ls84aU8VEpIGq5vjPDpYBA1X1gP95A1/61880sH3yMz4EHqrFzx83dYxdDWVM1ZvlfyBNKPCE/4wDVT0uIo/iew737jPt7H9Q0sdWKExNYmcWxhhjymRjFsYYY8pkxcIYY0yZrFgYY4wpkxULY4wxZbJiYYwxpkxWLIwxxpTp/wEYHOWY8qzwtwAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "#plot CV误差曲线\n",
    "\n",
    "#获取测试数据的均值和方差\n",
    "test_means2=grid2.cv_results_['mean_test_score']\n",
    "test_stds2=grid2.cv_results_['std_test_score']\n",
    "#获取训练数据的方差和均值\n",
    "train_means2=grid2.cv_results_['mean_train_score']\n",
    "train_stds2=grid2.cv_results_['std_train_score']\n",
    "\n",
    "\n",
    "#plot results\n",
    "n_Cs2=len(Cs2)\n",
    "nmber_penaltys2=len(penaltys2)\n",
    "#数据变形处理reshape\n",
    "test_scores2=np.array(test_means2).reshape(n_Cs2, nmber_penaltys2)\n",
    "train_scores2=np.array(train_means2).reshape(n_Cs2, nmber_penaltys2)\n",
    "test_stds2=np.array(test_stds2).reshape(n_Cs2, nmber_penaltys2)\n",
    "train_stds2=np.array(train_stds2).reshape(n_Cs2, nmber_penaltys2)\n",
    "\n",
    "\n",
    "x_axis2 = np.log10(Cs2)\n",
    "for i, value in enumerate(penaltys2):\n",
    "    #pyplot.plot(log(Cs), test_scores[i], label= 'penalty:'   + str(value))\n",
    "    plt.errorbar(x_axis2, test_scores2[:,i], yerr=test_stds2[:,i] ,label = penaltys2[i] +' Test')\n",
    "    plt.errorbar(x_axis2, train_scores2[:,i], yerr=train_stds2[:,i] ,label = penaltys2[i] +' Train')\n",
    "    \n",
    "plt.legend()\n",
    "plt.xlabel('log(C)')\n",
    "plt.ylabel('logloss')\n",
    "plt.savefig('./LogisticGridSearchCV_c2.png')\n",
    "\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "上图给出了L1正则和L2正则下、不同正则参数C对应的模型在训练集上测试集上的正确率（score）。\n",
    "C很小的时候，正则集是大，此时模型比较简单，在训练集合测试集上的数据都比较差，此时模型属于欠拟合状态。\n",
    "随着C的增加，训练集上的误差权重越来越大，使得模型越来越复杂，模型的训练集和测试集会拟合的越来越好\n",
    "可以看出在训练集上C越大（正则越少）的模型性能越好；但在测试集上当C=0.1时性能最好（L1正则和L2正则均是）"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
