{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Pima Indians Diabetes Data Set----logistic回归\n",
    "\n",
    "数据说明：\n",
    "Pima Indians Diabetes Data Set（皮马印第安人糖尿病数据集） 根据现有的医疗信息预测5年内皮马印第安人糖尿病发作的概率。   \n",
    "\n",
    "数据集共9个字段: \n",
    "0列为怀孕次数；\n",
    "1列为口服葡萄糖耐量试验中2小时后的血浆葡萄糖浓度；\n",
    "2列为舒张压（单位:mm Hg）\n",
    "3列为三头肌皮褶厚度（单位：mm）\n",
    "4列为餐后血清胰岛素（单位:mm）\n",
    "5列为体重指数（体重（公斤）/ 身高（米）^2）\n",
    "6列为糖尿病家系作用\n",
    "7列为年龄\n",
    "8列为分类变量（0或1）\n",
    "\n",
    "数据链接：https://archive.ics.uci.edu/ml/datasets/Pima+Indians+Diabetes"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "# 首先 import 必要的模块\n",
    "import pandas as pd \n",
    "import numpy as np\n",
    "\n",
    "from sklearn.model_selection import GridSearchCV  #模型参数调优\n",
    "\n",
    "#评价指标为logloss\n",
    "from sklearn.metrics import log_loss\n",
    "\n",
    "import matplotlib.pyplot as plt\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "数据文件路径和文件名"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "collapsed": false,
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>pregnants</th>\n",
       "      <th>Plasma_glucose_concentration</th>\n",
       "      <th>blood_pressure</th>\n",
       "      <th>Triceps_skin_fold_thickness</th>\n",
       "      <th>serum_insulin</th>\n",
       "      <th>BMI</th>\n",
       "      <th>Diabetes_pedigree_function</th>\n",
       "      <th>Age</th>\n",
       "      <th>Target</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.639947</td>\n",
       "      <td>0.866045</td>\n",
       "      <td>-0.031990</td>\n",
       "      <td>0.670643</td>\n",
       "      <td>-0.181541</td>\n",
       "      <td>0.166619</td>\n",
       "      <td>0.468492</td>\n",
       "      <td>1.425995</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>-0.844885</td>\n",
       "      <td>-1.205066</td>\n",
       "      <td>-0.528319</td>\n",
       "      <td>-0.012301</td>\n",
       "      <td>-0.181541</td>\n",
       "      <td>-0.852200</td>\n",
       "      <td>-0.365061</td>\n",
       "      <td>-0.190672</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1.233880</td>\n",
       "      <td>2.016662</td>\n",
       "      <td>-0.693761</td>\n",
       "      <td>-0.012301</td>\n",
       "      <td>-0.181541</td>\n",
       "      <td>-1.332500</td>\n",
       "      <td>0.604397</td>\n",
       "      <td>-0.105584</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>-0.844885</td>\n",
       "      <td>-1.073567</td>\n",
       "      <td>-0.528319</td>\n",
       "      <td>-0.695245</td>\n",
       "      <td>-0.540642</td>\n",
       "      <td>-0.633881</td>\n",
       "      <td>-0.920763</td>\n",
       "      <td>-1.041549</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>-1.141852</td>\n",
       "      <td>0.504422</td>\n",
       "      <td>-2.679076</td>\n",
       "      <td>0.670643</td>\n",
       "      <td>0.316566</td>\n",
       "      <td>1.549303</td>\n",
       "      <td>5.484909</td>\n",
       "      <td>-0.020496</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   pregnants  Plasma_glucose_concentration  blood_pressure  \\\n",
       "0   0.639947                      0.866045       -0.031990   \n",
       "1  -0.844885                     -1.205066       -0.528319   \n",
       "2   1.233880                      2.016662       -0.693761   \n",
       "3  -0.844885                     -1.073567       -0.528319   \n",
       "4  -1.141852                      0.504422       -2.679076   \n",
       "\n",
       "   Triceps_skin_fold_thickness  serum_insulin       BMI  \\\n",
       "0                     0.670643      -0.181541  0.166619   \n",
       "1                    -0.012301      -0.181541 -0.852200   \n",
       "2                    -0.012301      -0.181541 -1.332500   \n",
       "3                    -0.695245      -0.540642 -0.633881   \n",
       "4                     0.670643       0.316566  1.549303   \n",
       "\n",
       "   Diabetes_pedigree_function       Age  Target  \n",
       "0                    0.468492  1.425995       1  \n",
       "1                   -0.365061 -0.190672       0  \n",
       "2                    0.604397 -0.105584       1  \n",
       "3                   -0.920763 -1.041549       0  \n",
       "4                    5.484909 -0.020496       1  "
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#input data\n",
    "train = pd.read_csv(\"FE_pima-indians-diabetes.csv\")\n",
    "train.head()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 准备数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {
    "collapsed": false,
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "y_train = train['Target']   \n",
    "X_train = train.drop([\"Target\"], axis=1)\n",
    "\n",
    "#保存特征名字以备后用（可视化）\n",
    "feat_names = X_train.columns \n",
    "\n",
    "#sklearn的学习器大多之一稀疏数据输入，模型训练会快很多\n",
    "#查看一个学习器是否支持稀疏数据，可以看fit函数是否支持: X: {array-like, sparse matrix}.\n",
    "#可自行用timeit比较稠密数据和稀疏数据的训练时间\n",
    "from scipy.sparse import csr_matrix\n",
    "X_train = csr_matrix(X_train)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 默认参数的Logistic Regression"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from sklearn.linear_model import LogisticRegression\n",
    "lr = LogisticRegression()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "logloss of each fold is:  [ 0.48797856  0.53011593  0.4562292   0.422546    0.48392885]\n",
      "cv logloss is: 0.476159709444\n"
     ]
    }
   ],
   "source": [
    "# 交叉验证用于评估模型性能和进行参数调优（模型选择）\n",
    "#分类任务中交叉验证缺省是采用StratifiedKFold\n",
    "#数据集比较大，采用5折交叉验证\n",
    "from sklearn.model_selection import cross_val_score\n",
    "loss = cross_val_score(lr, X_train, y_train, cv=5, scoring='neg_log_loss')\n",
    "#%timeit loss_sparse = cross_val_score(lr, X_train_sparse, y_train, cv=5, scoring='neg_log_loss')\n",
    "print ('logloss of each fold is: ',-loss)\n",
    "print ('cv logloss is:', -loss.mean())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Logistic Regression + GridSearchCV\n",
    "logistic回归的需要调整超参数有：C（正则系数，一般在log域（取log后的值）均匀设置候选参数）和正则函数penalty（L2/L1） 目标函数为：J = C* sum(logloss(f(xi), yi)) + penalty\n",
    "\n",
    "在sklearn框架下，不同学习器的参数调整步骤相同：\n",
    "\n",
    "设置参数搜索范围\n",
    "生成学习器实例（参数设置）\n",
    "生成GridSearchCV的实例（参数设置）\n",
    "调用GridSearchCV的fit方法"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Fitting 5 folds for each of 14 candidates, totalling 70 fits\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[Parallel(n_jobs=4)]: Done  10 tasks      | elapsed:  1.6min\n",
      "[Parallel(n_jobs=4)]: Done  70 out of  70 | elapsed:  1.6min finished\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "GridSearchCV(cv=5, error_score='raise',\n",
       "       estimator=LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,\n",
       "          intercept_scaling=1, max_iter=100, multi_class='ovr', n_jobs=1,\n",
       "          penalty='l2', random_state=None, solver='liblinear', tol=0.0001,\n",
       "          verbose=0, warm_start=False),\n",
       "       fit_params=None, iid=True, n_jobs=4,\n",
       "       param_grid={'C': [0.001, 0.01, 0.1, 1, 10, 100, 1000], 'penalty': ['l1', 'l2']},\n",
       "       pre_dispatch='2*n_jobs', refit=True, return_train_score='warn',\n",
       "       scoring='neg_log_loss', verbose=5)"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.model_selection import GridSearchCV  # GridSearchCV 网格搜索\n",
    "from sklearn.linear_model import LogisticRegression\n",
    "\n",
    "#需要调优的参数\n",
    "# 请尝试将L1正则和L2正则分开，并配合合适的优化求解算法（slover）\n",
    "#tuned_parameters = {'penalty':['l1','l2'],\n",
    "#                   'C': [0.001, 0.01, 0.1, 1, 10, 100, 1000]\n",
    "#                   }\n",
    "penaltys = ['l1','l2']\n",
    "Cs = [ 0.001,0.01,0.1, 1, 10, 100, 1000]\n",
    "tuned_parameters = dict(penalty = penaltys, C = Cs)\n",
    "\n",
    "lr_penalty= LogisticRegression(solver='liblinear')\n",
    "# n_jobs 线程数和自身机器核数相对应，-1表示全部 verbose 可视化打印\n",
    "grid= GridSearchCV(lr_penalty, tuned_parameters,cv=5, scoring='neg_log_loss',n_jobs = 4,verbose=5)\n",
    "grid.fit(X_train,y_train)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.476026746289\n",
      "{'C': 1, 'penalty': 'l1'}\n"
     ]
    }
   ],
   "source": [
    "# examine the best model\n",
    "print(-grid.best_score_)\n",
    "print(grid.best_params_)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 59,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "#df=pd.DataFrame({\"columns\":list(feat_names),\"coeffient\":list(grid.best_estimator_.alpha)})\n",
    "#df.sort_values(by=['coeffient'],ascending=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# plot CV误差曲线\n",
    "test_means = grid.cv_results_[ 'mean_test_score' ]\n",
    "test_stds = grid.cv_results_[ 'std_test_score' ]\n",
    "train_means = grid.cv_results_[ 'mean_train_score' ]\n",
    "train_stds = grid.cv_results_[ 'std_train_score' ]\n",
    "\n",
    "# plot results\n",
    "n_Cs = len(Cs)\n",
    "number_penaltys = len(penaltys)\n",
    "test_scores = np.array(test_means).reshape(n_Cs,number_penaltys)\n",
    "train_scores = np.array(train_means).reshape(n_Cs,number_penaltys)\n",
    "test_stds = np.array(test_stds).reshape(n_Cs,number_penaltys)\n",
    "train_stds = np.array(train_stds).reshape(n_Cs,number_penaltys)\n",
    "\n",
    "x_axis = np.log10(Cs)\n",
    "for i, value in enumerate(penaltys):\n",
    "    #pyplot.plot(log(Cs), test_scores[i], label= 'penalty:'   + str(value))\n",
    "    plt.errorbar(x_axis, -test_scores[:,i], yerr=test_stds[:,i] ,label = penaltys[i] +' Test')\n",
    "    plt.errorbar(x_axis, -train_scores[:,i], yerr=train_stds[:,i] ,label = penaltys[i] +' Train')\n",
    "    \n",
    "plt.legend()\n",
    "plt.xlabel( 'log(C)' )                                                                                                      \n",
    "plt.ylabel( 'logloss' )\n",
    "plt.savefig('LogisticGridSearchCV_C.png' )\n",
    "\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 上图给出了L1正则和L2正则下，不同正则参数c对应的模型在训练集上的正确率（score）\n",
    "可以看出在训练集上C越大（正则越少）的模型性能越好，但在测试集上当C=1是性能最好（L1正则）"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 换正确率做评价指标"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 54,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.774739583333\n",
      "{'C': 0.1, 'penalty': 'l2'}\n"
     ]
    }
   ],
   "source": [
    "penaltys = ['l1','l2']\n",
    "Cs = [ 0.001,0.01,0.1, 1, 10, 100, 1000]\n",
    "tuned_parameters = dict(penalty = penaltys, C = Cs)\n",
    "\n",
    "lr_penalty= LogisticRegression(solver='liblinear')\n",
    "# 缺省scoring为正确率\n",
    "grid= GridSearchCV(lr_penalty, tuned_parameters,cv=5, scoring='accuracy')\n",
    "grid.fit(X_train,y_train)\n",
    "\n",
    "print(grid.best_score_)\n",
    "print(grid.best_params_)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "E:\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('mean_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "E:\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('std_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEKCAYAAADjDHn2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3Xl8FfW9//HXJyEh7EtYZCdAUBYB\nNYKKG7hRtbhXaLVqb9Xeiteft+72umC1VW+Xe2+9ty61VmtFBRcUFHHfEUS2sAYQCCCEnRAgJPn8\n/pgDHkIghySTyfJ+Ph7nkZk535nzmYjnk+/nO/Mdc3dEREQOJSnqAEREpOZTshARkXIpWYiISLmU\nLEREpFxKFiIiUi4lCxERKVeoycLMRpjZIjPLMbM7yni/q5l9YGbfmNkcMzs37r07Y/stMrNzwoxT\nREQOzcK6z8LMkoHFwFlALjAdGO3u8+PaPAF84+7/Z2Z9gcnu3j22/AIwGOgIvAv0dvfiUIIVEZFD\nCrNnMRjIcfdl7l4IjAMuKNXGgeax5RbAmtjyBcA4d9/t7suBnNjxREQkAg1CPHYnYFXcei4wpFSb\n+4B3zOxGoAlwZty+X5bat9OhPqxNmzbevXv3SoQrIlL/fP311xvcvW157cJMFlbGttI1r9HAM+7+\nezM7EXjOzPonuC9mdh1wHUDXrl2ZMWNGJUMWEalfzGxFIu3CLEPlAl3i1jvzfZlpr38BXgJw9y+A\nNKBNgvvi7k+4e5a7Z7VtW25iFBGRCgozWUwHMs0sw8xSgVHAxFJtVgJnAJhZH4JkkRdrN8rMGppZ\nBpAJfBVirCIicgihlaHcvcjMxgBTgGTgaXfPNrOxwAx3nwj8CnjSzG4mKDNd7cHlWdlm9hIwHygC\nbtCVUCIi0Qnt0tnqlpWV5RqzEJG99uzZQ25uLrt27Yo6lBohLS2Nzp07k5KSst92M/va3bPK2z/M\nAW4Rkcjk5ubSrFkzunfvjllZ18zUH+7Oxo0byc3NJSMjo0LH0HQfIlIn7dq1i/T09HqfKADMjPT0\n9Er1spQsRKTOUqL4XmV/F0oWIiIxlz/+BZc//kXUYdRIShZ1yd/OC151QV06F6m3mjZtum95xIgR\ntGzZkvPPP7/MtjfccAODBg2ib9++NGrUiEGDBjFo0CDGjx9/WJ85c+ZM3n777UrFXRYNcIuIVINb\nb72VgoICHn/88TLff+yxxwD49ttvOf/885k1a1aFPmfmzJnMmzePESNGVDjWsqhnISJSDc444wya\nNWtWoX2XLFnCOeecw3HHHcepp57K4sWLARg3bhz9+/dn4MCBDBs2jJ07dzJ27Fief/75CvVKDkU9\nCxGp8+5/I5v5a7aV227+2qBNIuMWfTs2594f9qt0bIm47rrreOqpp+jZsyefffYZY8aM4Z133uH+\n++/nww8/pH379mzZsoVGjRpxzz33MG/ePP70pz9VaQxKFiJh2zv2cs2kaOOoAtkPnQxAv7s+jTiS\nyikpLAAgKbVxxJGUb8uWLXz55Zdccskl+7YVFRUBMHToUK64/BIuOv8cRv3shlDjULIQCVn22q0A\nVM/foFKW0j2AnWsXAtCow1H7bd/bo3jx+hOrJ7AEuDtt2rQpcwzjySef5ONJLzL53Q8ZOHAgc+bM\nCS0OjVmIiNRgrVq1okOHDrz66qsAlJSUMHv2bACWLVvG4OMGce9tN9GqVStWr15Ns2bN2L59e5XH\noWQhIlINTjnlFC677DLee+89OnfuzJQpUxLed9y4cfzlL39h4MCB9OvXjzfffBOAm2++meOHj+T4\n4SM588wz6d+/P8OHD2f27Nkcc8wxGuAWEakN8vPz9y1/8sknCe3TvXt35s2bt9+2Hj16lJlcJk6c\neEBJrW3btqE8CE7JQkQkpiaNVdQ0KkOJiEi5lCxERKRcShYiIlIuJQsRESmXkoWIyF6a7figQk0W\nZjbCzBaZWY6Z3VHG+380s1mx12Iz2xL33iNmlm1mC8zsv01PMRGRWmbvFOWzZs3ixBNPpF+/fgwY\nMIAXX3zxgLb1dopyM0sGHgPOAnKB6WY20d3n723j7jfHtb8ROCa2fBIwFBgQe/tT4DTgw7DiFREJ\nS+PGjXn22WfJzMxkzZo1HHfccZxzzjm0bNlyX5v6PEX5YCDH3Ze5eyEwDrjgEO1HAy/Elh1IA1KB\nhkAKsC7EWEVEQtO7d28yMzMB6NixI+3atSMvLy/h/Q81RXnWsB8y5MwLa/UU5Z2AVXHrucCQshqa\nWTcgA3gfwN2/MLMPgLWAAX929wVl7HcdcB1A165dqzR4EalD3roDvpu7bzU1NusspWed/S42EV8i\n4xZHHA0/+N1hh/LVV19RWFhIz549E97nUFOUv/XS32jftg27Gx1Ra6coL2uMwQ/SdhQw3t2LAcys\nF9AH6Bx7f6qZneruH+93MPcngCcAsrKyDnZsEZEaYe3atVx55ZX8/e9/JykpscJOeVOUX/tvd9T6\nKcpzgS5x652BNQdpOwqIP9OLgC/dPR/AzN4CTgA+LmNfEZFDK9UDKDzIFOVhPntk27ZtnHfeefzm\nN7/hhBNOSHi/+jBF+XQg08wyzCyVICFMLN3IzI4EWgHxj6ZaCZxmZg3MLIVgcPuAMpTUXdlrt+57\nDoRIbVdYWMhFF13ET3/6Uy677LLD2rfOT1Hu7kXAGGAKwRf9S+6ebWZjzWxkXNPRwDh3jy8jjQeW\nAnOB2cBsd38jrFhFRML00ksv8fHHH/PMM8/suyT2cK52qvNTlLv7ZGByqW33lFq/r4z9ioHrw4xN\nRCRse6cov+KKK7jiiisS2kdTlIscBvMSkimGkmJISo46HKkv6sBz0sOiZCE1R0kxfPsJzH2ZI/cs\nIJkSeKANNG0PzTpA846xnx2gWcf9fzZsFnX0InWakoVEyx3WzIS542HeBMhfB6nN2JbUgl3WiA5D\nfwLb1sL2NbBxaZBMdpUx8J3aLJY84pNKqeTStJ16KfWMu6OZggL7DwsfPiULicaGJTD35eC1aRkk\np0Lvc+DoyyDzbNY8ehYAHYb/+sB9Cwtg+1rYtqbsn8s/gfzvoKRo//0sOeilHDSpxH42bFoNvwAJ\nW1paGhs3biQ9Pb3eJwx3Z+PGjaSlpVX4GEoWUn22rQl6D3NfhrWzwZIg41Q45Vdw1PnQqGX5x4Dg\nrtv0nsHrYEpKYEde0CPZ2zPZtvb7pLIxJ0gqu8vopTRsXka5q1RSadK23F7KzsJiZq7czKRdJ7OT\nVDpNWURaShJpKck0TEkmrUGwHLxiyw2S49p8vy0l2er9F97h6ty5M7m5uWVOq7Fn63cApGyp/ffy\nJnouaWlpdO7c+ZBtDkXJQsJVsAkWTAzKTN9+Cjh0Og5G/A76XQTNjgjnc5OSoFn74NXxmIO3250P\n278rI6nEfm74KHg/mFwg7vgN4sZSgqSyu3F7lu1uzuwtjfh0fSoff9eAbcUNSWIoqRRR+GEOJRX8\nbkoyvk8sDeISTkrSfglmb+Jp2KB0Eir1fqnElLbfsZJp2CCJpKTanZxSUlLIyMgo873sh64FoM9d\nn1ZnSKGornNRspCqV1gAi98KEsSSqVCyB9IzYdhd0P+SQ/cIqlvDptCwF7TpdfA2JcWQv/6A3knh\nltXk562kZPkcGu9+l8a+kz4E89SMAkiBPY2bU7SnEDejUce+uAclgRJ3SvYtU+b6/st728SWi5yS\nPaX2LwEnbp8SP+j8OvF2x16l+1hmkGRBjyYptpy0ZxeGs/g3x1fs911DJBfuBKj15wHBuWwgwV55\nJShZQKi3+NcbxXtg2YdBiWnhJCjMD0o4J/wiGIc4YkDw7VMbJSVD8w5saZDOV1s6MW39JqYt38j8\nNdsocUhNTmJglxac3DWNoe0K6d9sB2k7g+SSsm0tO2e8iFGCNWq1b8K06hpm35tkit0pKfk+kRSX\nfJ+wSkpi78cSTlnvl7hTVAJ79mzGgQYNWlTTGYSjqDBIo7X9PCA4lyIL/6tcyUIqrqQEcr8KEkT2\nq1CwEdJaBr2Hoy+DbkODclAttTF/N18t38S05Zv4ctlGFq3bjjs0bJDEMV1bcuPwTIb0aM2xXVuR\nlnLwr/9Vs78EoN8VE6or9H2MIDFVVXLKfuhkAPrd8W4VHTEadeU8IDiXthSV37CSlCzk8K3Ljl3J\nNAG2roQGjeDIHwQJotcZ0KBh1BFWyPrtu5i2LOg1TFu2iSXrg7tvG6Ukc1y3Vpx3dAeG9EhnYJcW\nNGygS3ClflGykMRsXgHzxgfjEOvnB5eh9hwOw38NR51bK2+KW7t1537JYdmGHQA0SU0mq3trLjq2\nE0My0jm6UwtSG9TeHpJIVVCykIPLz4P5rwW9iFXTgm1dToBz/zO4kqlJm2jjO0yrNhUwbfkmpi3b\nyLTlm1i5KXgATrO0Bgzu3ppRg7swJCOdfh2b0yBZyUEknpKF7G/39mCAeu7LsPSD4JLRdv3gjHuD\nsYhW3aKOMCHuzoqNBft6DdOWb2L1luAKmJaNUxjcvTVXndSdIRmt6dOhOcm1/DJRkbApWQgU7Yac\nd4MEsehtKNoJLbrC0Jvg6Euhfb+oIyyXu7M0b0dcctjIum27AUhvksqQHq257tQeDOnRmt7tmtX6\newhEqpuSRX1VUgwrPgsSxPzXg/mWGqfDMVcEA9VdBtfoS11LSpwl6/P36zlsyA+SQ7tmDRnSI50h\nGa05oUdrerZtqrufRSpJyaI+cQ+m2Zj7Msx7JbjJLLVpMNXG0ZdBj9MgOSXqKMtUUuIs+G7bvl7D\nV8s3sblgDwAdW6RxSmYbhmS0ZkiPdLqnN1ZyEKliShb1wcalwVVMc1+GjUsgKQUyz4ajH4TeI4K5\nlmqYEoelJUfw+cfL9iWHbbuCa8m7tG7EGX3ax3oO6XRu1UjJQSRkShZ11fbvgt7D3JeDKcAx6H4y\nnHQj9B0JjVpFHeEh/X7nSD4s6geTF5DRpgnnHt2BIT1aMyQjnY4tG0Udnki9E2qyMLMRwH8R3ED6\nlLv/rtT7fwSGxVYbA+3cvWXsva7AU0AXwIFz3f3bMOOt9UqKoGAD/H0kLP8YcOgwCM5+EPpfHMyW\nWgtMmrOWD4v6cVHqNO645S7aN6/4tMoiUjVCSxZmlgw8BpwF5ALTzWyiu8/f28bdb45rfyMQPz3o\ns8CD7j7VzJoCJWHFWqu5w8ovYOZzwdQbXhK8Trs9uJKpTWbUER6Wjfm7uef1eWQmreWahh/QvvnY\nqEMSEcLtWQwGctx9GYCZjQMuAOYfpP1o4N5Y275AA3efCuDu+SHGWTttXwez/wnf/CN4NkNqM2jS\nLpg2+/qPavSVTIdy3xvz2bZrD2MbvUmy1f5nDYjUFWEmi07Aqrj1XGBIWQ3NrBuQAbwf29Qb2GJm\nr8S2vwvc4V76oQL1THERLHkHvnkOFk8JbpjreiKc/O/Q70J4/kdBu1qaKN6e9x1vzF7Dr87qTfdp\nG6IOR0TihJksyvrGOtifiqOA8XHJoAFwCkFZaiXwInA18Nf9PsDsOuA6gK5du1Y+4ppqQ06QIGa/\nEDyjukk7OGkMHHNlrSszHczmHYX8+rV59OvYnF+c3pPF06KOSETihZkscgkGp/fqDKw5SNtRwA2l\n9v0mroT1GnACpZKFuz8BPAGQlZVVt2oWhTuCm+VmPgcrPw8m7ut9TnDTXObZNfZ+iIoa++Z8thQU\n8uzPBpOieZlEapwwk8V0INPMMoDVBAnhx6UbmdmRQCvgi1L7tjKztu6eBwwHZoQYa83gDqtnwjfP\nBtN/F26H1j3hzPtg4OjwHkEasXfnr+PVb1Zz0xmZ9O3YPOpwRKQMoSULdy8yszHAFIJLZ59292wz\nGwvMcPeJsaajgXHu7nH7FpvZLcB7Ftxt9TXwZFixRm7HRpjzYlBqWj8/eD5EvwuDMlO3k2rtGEQi\nthbs4a5X53LUEc24YdghHm0qIpEK9T4Ld58MTC617Z5S6/cdZN+pwIDQgotaSTEs+yAoMy2aDMWF\n0PFYOP+PweyuabX/cY+JeGDSfDbuKOTpq4/XMyNEajDdwV3dNq+AWc/DN8/DttzgTuqsf4Fjr6wV\ns7tWpQ8WrWf817mMGdaL/p32T45j0x8FgisbRCR6ShbVYc8uWPhmUGZa9lGwrecwOPsBOOq8WvsY\n0srYtmsPd06YS+/2TbnxDJWfRGo6JYswfTc3KDPNfQl2bg6eEXH6HTDox9CyDl/qm4CHJi1g/fZd\nPH7lUD3PWqQWULKoaju3BM+qnvkcrJ0FyanBFODHXgkZp0OS6vIfL85j3PRV/OK0ngzs0jLqcELX\nr0P9GH+Suk3Joiq4w7efBmWm+a9D0S5o3x9GPAwDfgSNW0cdYY2Rv7uIO1+ZS8+2Tfh/Z9aNGwpF\n6gMli8rYtjY2WP0P2LwcGjYPSkzHXAkdj6nTl7xW1G8nL2DN1p2M/8VJpKWo/CRSWyhZHK7iPbD4\n7aDMlDM1mOG128nBWESfkTXyQUI1xec5G3h+2kquPSWD47rV7OdpiMj+lCwSlbc4uLN69jjYkQdN\nj4ChNwW9iPSeUUdX4+3YXcRtE+aQ0aYJvzr7yKjDqV7XTIo6girT765Pow6hStSV84DqOxcli0PZ\nnQ/ZrwZjEaumxeZnGhEMVvc6C5L160vUI28vZPWWnbx0/YkqP4nUQvq2K80dcqfDzGeDRFGYD+m9\n4Mz7Y/MztY86wlrny2Ub+fsXK7hmaHeO767BfpHaSMlir+I98Pmfg15E3kJIaQz9LgrKTF1P0GB1\nBe0sLOb2CXPo2roxt55Tz8pPInWIksW2tZC3AAo2Qe406JQFP/wv6HcxpGkG1Mp6dMoiVmws4IVr\nT6BxauL/3F68/sQQoxKRw6Vk0ahl8OyIZh3gylegXZ+oI6ozZny7ib99vpwrT+jGiT3Tow5HRCpB\nySKlEXQ8LigzKVFUmV17irlt/Bw6tWzEHT84KupwRKSSlCxA4xEh+MPUxSzbsIPnfz6EJg31z0yk\nttNERVLlZq7czFOfLGP04K4M7dUm6nBEpAooWUiV2rWnmFtfns0RzdO461yVn0TqCtUHpEr913tL\nWJq3g7//bDDN0lKiDkdEqoh6FlJlZq/awuMfLeVHWZ05rXfbqMMRkSoUarIwsxFmtsjMcszsjjLe\n/6OZzYq9FpvZllLvNzez1Wb25zDjlMrbXVTMreNn065ZGnef1zfqcESkioVWhjKzZOAx4CwgF5hu\nZhPdff7eNu5+c1z7G4FjSh3mAeCjsGKUqvPn93NYvC6fv119PC0aqfwkUteE2bMYDOS4+zJ3LwTG\nARccov1o4IW9K2Z2HNAeeCfEGKUKzFu9lf/9cCkXH9uJYUe1izocEQlBmMmiE7Aqbj03tu0AZtYN\nyADej60nAb8Hbj3UB5jZdWY2w8xm5OXlVUnQcngKi0q4dfwc0pukcu/5/aIOR0RCEmayKOtONz9I\n21HAeHcvjq3/Epjs7qsO0j44mPsT7p7l7llt22pANQr/+2EOC9Zu48GLjqZFY5WfROqqMC+dzQW6\nxK13BtYcpO0o4Ia49ROBU8zsl0BTINXM8t39gEFyic6Ctdv48/s5XDCoI2f11dTtInVZmMliOpBp\nZhnAaoKE8OPSjczsSKAV8MXebe7+k7j3rwaylChqlj3FJdw6fjYtG6dw3w9VfhKp60IrQ7l7ETAG\nmAIsAF5y92wzG2tmI+OajgbGufvBSlRSAz3+0VLmrd7Gby7sT6smqVGHIyIhC/UObnefDEwute2e\nUuv3lXOMZ4Bnqjg0qYTF67bz3+/lcN6ADozo3yHqcESkGugObjksRcUl3PrybJqmNWDsSJWfROoL\nzQ0lh+XJT5YzO3crf/7xMaQ3bRh1OCJSTdSzkITlrM/nj+8uZkS/IzjvaJWfROoTJQtJSHGJc+v4\n2TROTeaBC/tjemCUSL2SULIws9ZhByI129OfLueblVu4f2Q/2jZT+Umkvkm0ZzHNzF42s3NNf1LW\nO8vy8vnPdxZxZp/2jBzYMepwRCQCiSaL3sATwJVAjpk9ZGa9wwtLaoqSEuf2CXNo2CCJhy5S+Umk\nvkooWXhgqruPBn4OXAV8ZWYfmdmJoUYokfr7F98y/dvN3PvDfrRrnhZ1OCISkYQunTWzdOAKgp7F\nOuBGYCIwCHiZYMZYido1k6r0cCs27uDhtxcy7Mi2XHxsmRMGi0g9keh9Fl8AzwEXuntu3PYZZvaX\nqg9LolZS4tw2fg4pSUk8dPHRKj+J1HOJJosjDzZ3k7s/XIXxSCVc/ngwF+OL11e+Mvj8tBVMW76J\nRy4ZQIcWjSp9PBGp3RJNFm3M7DagH7CvcO3uw0OJSiK1alMBv31rIaf2bstlWZ2jDkdEaoBEr4Z6\nHlhIMDZxP/AtwRTkUse4O3e8MockM36r8pOIxCSaLNLd/a/AHnf/yN1/BpwQYlwSkRe+WsVnORu5\n89yj6NRS5ScRCSRahtoT+7nWzM4jeOKd6hN1zOotO3lo8gJO6pnOjwd3jTocEalBEk0WvzGzFsCv\ngP8BmgM3hxaVVDt3544Jcyhx5+FLBqj8JCL7SShZuPubscWtwLDwwolIFd+fUBu9PCOXT5ZsYOwF\n/ejSunHU4YhIDZPoTXl/Aw64dDY2diG13NqtO3ngzfkMyWjNFUO6RR2OiNRAiQ5wvwlMir3eIyhD\n5Ze3k5mNMLNFZpZjZneU8f4fzWxW7LXYzLbEtg8ysy/MLNvM5pjZ5YmfkhwOd+euV+ZSVOI8cukA\nkpJUfhKRAyVahpoQv25mLwDvHmofM0sGHgPOAnKB6WY20d3nxx335rj2NwLHxFYLgJ+6+xIz6wh8\nbWZT3H1LIvFK4l6ZuZoPFuVxz/l96ZbeJOpwRKSGqujDjzKB8i6XGQzkuPsydy8ExgEXHKL9aOAF\nAHdf7O5LYstrgPVA2wrGKgexbtsu7n8jm+O7t+Lqk7pHHY6I1GCJjllsZ/8xi++A28vZrROwKm49\nFxhykON3I7jh7/0y3hsMpAJLE4lVEuPu3P3qPHYXlfDIpQNVfhKRQ0q0DNWsAscu69unzPmlgFHA\neHcv3u8AZh0IJjC8yt1LDvgAs+uA6wC6dtV9AYdj4uw1vLtgHXef24eMNio/icihJfpY1Yti91ns\nXW9pZheWs1su0CVuvTPBzXxlGUWsBBX3Gc0JBtR/7e5flrWTuz/h7lnuntW2rapUicrbvpt7J2Zz\nTNeW/OxkzS4vIuVLdMziXnffunclNtB8bzn7TAcyzSzDzFIJEsLE0o3M7EigFcE06Hu3pQKvAs+6\n+8sJxigJcHf+47V5FBQW8+ilA0lW+UlEEpBosiir3SFLWO5eBIwBpgALgJfcPdvMxprZyLimo4Fx\npaZA/xFwKnB13KW1gxKMVQ5h0ty1vJ39HTef2Zte7ZpGHY6I1BKJTvcxw8z+QHAprBM8Ke/r8nZy\n98nA5FLb7im1fl8Z+/0D+EeCsUmCNubv5p7XsxnYuQXXnqLyk4gkLtGexY1AIfAi8BKwE7ghrKAk\nHPdMzCZ/VxGPXDqQBskVvWpaROqjRK+G2gEccAe21B5vz1vLpDlrueXs3hx5REUubhOR+izRq6Gm\nmlnLuPVWZjYlvLCkKm3eUcivX5tHv47Nuf60nlGHIyK1UKK1iDbxU224+2agXTghSVW7741sthTs\n4T8vG0iKyk8iUgGJfnOUmNm+u97MrDsHv8FOapCp89fx+qw1jBneiz4dmkcdjojUUoleDXU38KmZ\nfRRbP5XYndNSc20pKOSuV+dy1BHN+OXpvaIOR0RqsUQHuN82syyCBDELeJ3giiipwca+OZ9NOwr5\n29XHk9pA5ScRqbhEJxL8OXATwZQds4ATCO64Hh5eaFIZ7y9cxyszV3Pj8F7079Si/B1ERA4h0T83\nbwKOB1a4+zCC507khRaVVMrWnXu465V59G7flDHDVX4SkcpLNFnscvddAGbW0N0XAkeGF5ZUxoOT\n5pOXv5v/vGwgDRskRx2OiNQBiQ5w58bus3gNmGpmmzn4DLISoS0FhUxbvol/Pb0nAzq3LH8HEZEE\nJDrAfVFs8T4z+wBoAbwdWlRSIUUlzvINBfRq15SbzsiMOhwRqUMS7Vns4+4fld9KorBqUwGFxSU8\ncukA0lJUfhKRqqPrKeuIz3I2sH77bjq0SOPYrq2iDkdE6hglizpgx+4ibp8wh7SUJDq3bBR1OCJS\nBylZ1AEPv72Q1Vt20qNNE5L05DsRCYGSRS33xdKNPPvFCq45KYNmaSlRhyMidZSSRS1WUBiUn7ql\nN+bWc3Tbi4iEJ9RkYWYjzGyRmeWY2QEPTzKzP8Y9Y3uxmW2Je+8qM1sSe10VZpy11aNTFrFyUwEP\nXzKARqm6+klEwnPYl84mysySCZ7ZfRaQC0w3s4nuPn9vG3e/Oa79jQTTiGBmrYF7gSyCqdC/ju27\nOax4a5vp327imc+/5aoTu3FCj/SowxGROi7MnsVgIMfdl7l7ITAOuOAQ7UcDL8SWzwGmuvumWIKY\nCowIMdZaZWdhMbeNn0PnVo24bcRRUYcjIvVAmMmiE7Aqbj03tu0AZtYNyADeP9x966M/TF3E8g07\nePjiATRpGFrnUERknzCTRVnXcB7s6XqjgPHuXnw4+5rZdWY2w8xm5OXVj0lwv16xmb9+upyfDOnK\nSb3aRB2OiNQTYSaLXKBL3HpnDj754Ci+L0ElvK+7P+HuWe6e1bZt20qGW/Pt2lPMbeNn06FFI+48\nt0/U4YhIPRJmspgOZJpZhpmlEiSEiaUbmdmRQCuChyntNQU428xamVkr4OzYtnrtT+8uYWneDn57\n8dE0VflJRKpRaN847l5kZmMIvuSTgafdPdvMxgIz3H1v4hgNjHN3j9t3k5k9QJBwAMa6+6awYq0N\nZq/awhMfL+XyrC6c2rvu96JEpGYJ9c9Td58MTC617Z5S6/cdZN+ngadDC64W2V1UzC0vz6Z98zTu\nPl/lJxGpfqpl1AL/814OS9bn87drjqe5pvQQkQhouo8abt7qrfzfR0u55NjODDuyXdThiEg9pWRR\ngxUWlXDLy7NJb5LKPef3jTocEanHVIaqwR77IIeF323nqZ9m0aKxyk8iEh31LGqo+Wu28dgHOVw4\nqCNn9m0fdTgiUs8pWdRAe4piLFiaAAAN4klEQVSD8lPLxqnc+8N+UYcjIqIyVE30lw+XMn/tNv5y\nxXG0apIadTgiIupZ1DSLvtvOf7+/hPMHdGBE/yOiDkdEBFCyqFGKiku4dfxsmqelcP9IlZ9EpOZQ\nGaoGeeKTZczJ3cpjPz6W9KYNow5HRGQf9SxqiJz12/nT1CX8oP8RnDegQ9ThiIjsR8miBigucW55\neQ5NGiYz9oL+UYcjInIAlaFqgL9+uoxZq7bwX6MG0baZyk8iUvOoZxGxZXn5/P6dxZzVtz0jB3aM\nOhwRkTIpWUSouMS5bfwc0lKSefDC/piV9TRZEZHoqQwVoWc+/5YZKzbzhx8NpF3ztEof78XrT6yC\nqEREDqSeRUS+3bCDR6csZPhR7bjomE5RhyMickhKFhEoKXFumzCHlOQkHrroaJWfRKTGU7KIwD+m\nreCr5Zv4j/P7ckSLypefRETCFmqyMLMRZrbIzHLM7I6DtPmRmc03s2wz+2fc9kdi2xaY2X9bHfnz\ne9WmAn731kJO7d2Wy47rHHU4IiIJCW2A28ySgceAs4BcYLqZTXT3+XFtMoE7gaHuvtnM2sW2nwQM\nBQbEmn4KnAZ8GFa81cHduX3CHJLM+N3FKj+JSO0RZs9iMJDj7svcvRAYB1xQqs21wGPuvhnA3dfH\ntjuQBqQCDYEUYF2IsVaLf361ks+XbuSuc/vQsWWjqMMREUlYmMmiE7Aqbj03ti1eb6C3mX1mZl+a\n2QgAd/8C+ABYG3tNcfcFpT/AzK4zsxlmNiMvLy+Uk6gquZsLeGjSAk7u1YbRg7tEHY6IyGEJM1mU\nVWPxUusNgEzgdGA08JSZtTSzXkAfoDNBghluZqcecDD3J9w9y92z2rZtW6XBVyV3585X5uLAb1V+\nEpFaKMxkkQvE/wndGVhTRpvX3X2Puy8HFhEkj4uAL909393zgbeAE0KMNVQvzVjFJ0s2cOcPjqJL\n68ZRhyMictjCTBbTgUwzyzCzVGAUMLFUm9eAYQBm1oagLLUMWAmcZmYNzCyFYHD7gDJUbbB2605+\n8+YCTujRmp8M6RZ1OCIiFRJasnD3ImAMMIXgi/4ld882s7FmNjLWbAqw0czmE4xR3OruG4HxwFJg\nLjAbmO3ub4QVa1j2lp+KSpxHLhlIUpLKTyJSO4U6N5S7TwYml9p2T9yyA/8ee8W3KQauDzO26jBh\n5mo+XJTHvT/sS9d0lZ9EpPbSHdwhWbdtF2PfyGZw99ZcdWL3qMMREakUJYsQuDt3vzqX3UUlPHzp\nAJWfRKTWU7IIweuz1vDugvXces6RZLRpEnU4IiKVpmRRxdZv38V9b2RzbNeWXDM0I+pwRESqhJJF\nFXJ3/uO1eRQUFvPIpQNJVvlJROoIJYsq9OactUzJXse/n9WbXu2aRh2OiEiVUbKoIhvyd3PvxGwG\ndm7Bz09W+UlE6hYliypy7+vZ5O8q4tHLBtIgWb9WEalb9K1WBd6au5ZJc9dy05mZ9G7fLOpwRESq\nnJJFJW3aUch/vD6P/p2ac92pPaIOR0QkFKFO91Ef3Dcxm6079/DcvwwhReUnEamj9O1WCe9kf8fE\n2WsYMyyTPh2aRx2OiEholCwqaEtBIXe/No8+HZrzy2E9ow5HRCRUKkNV0Ng357N5RyHPXHO8yk8i\nUufpW64C3l+4jldmruaXp/ekX8cWUYcjIhI6JYvDtHXnHu58ZS5Htm/GmOGZUYcjIlItVIY6TA9O\nms+G/EKe/GkWqQ2Ua0WkftC33WH4cNF6XpqRy/Wn9mBA55ZRhyMiUm1CTRZmNsLMFplZjpndcZA2\nPzKz+WaWbWb/jNve1czeMbMFsfe7hxlrebbvCspPvdo15d/OUPlJROqX0MpQZpYMPAacBeQC081s\norvPj2uTCdwJDHX3zWbWLu4QzwIPuvtUM2sKlIQVayIemryQddt2MeFfTyItJTnKUEREql2YPYvB\nQI67L3P3QmAccEGpNtcCj7n7ZgB3Xw9gZn2BBu4+NbY9390LQoz1kD5dsoEXvlrJtaf04JiuraIK\nQ0QkMmEmi07Aqrj13Ni2eL2B3mb2mZl9aWYj4rZvMbNXzOwbM3s01lOpdvm7i7h9whx6tGnCzWf1\njiIEEZHIhZksynpMnJdabwBkAqcDo4GnzKxlbPspwC3A8UAP4OoDPsDsOjObYWYz8vLyqi7yOA+/\ntZA1W3fy6GUDVH4SkXorzGSRC3SJW+8MrCmjzevuvsfdlwOLCJJHLvBNrIRVBLwGHFv6A9z9CXfP\ncvestm3bVvkJfL50A899uYKfDc3guG6tq/z4IiK1RZjJYjqQaWYZZpYKjAImlmrzGjAMwMzaEJSf\nlsX2bWVmezPAcGA+1aigsIg7Jsyle3pjbjn7yOr8aBGRGie0ZBHrEYwBpgALgJfcPdvMxprZyFiz\nKcBGM5sPfADc6u4b3b2YoAT1npnNJShpPRlWrGV55O1FrNxUwMOXDKBRqspPIlK/mXvpYYTaKSsr\ny2fMmFElx/pq+SYuf+ILrjqxO/eN7FclxxQRqYnM7Gt3zyqvne7gLmVnYTG3jZ9N51aNuG2Eyk8i\nIqC5oQ7w+3cW8e3GAv557RAap+rXIyIC6lns5+sVm/jrZ8v5yZCunNSzTdThiIjUGEoWMbv2FHPr\n+Dl0bNGIO8/tE3U4IiI1iuoswOWPf8HKTQWs3bqL5/5lME0b6tciIhJPPQuCKT3Wbt3FqOO7cEpm\n1d/cJyJS29X7ZLG7qJhleTtITU7irvNUfhIRKUu9Txbrt+0GIKNNY5qnpUQcjYhIzVTvi/NdWjfm\n6E7NMStr3kMREQH1LACUKEREylHvexYAL15/YtQhiIjUaOpZiIhIuZQsRESkXEoWIiJSLiULEREp\nl5KFiIiUS8lCRETKpWQhIiLlUrIQEZFyKVmIiEi5zN2jjqFKmFkesKISh2gDbKiicKJUV84DdC41\nVV05l7pyHlC5c+nm7uU+m6HOJIvKMrMZ7p4VdRyVVVfOA3QuNVVdOZe6ch5QPeeiMpSIiJRLyUJE\nRMqlZPG9J6IOoIrUlfMAnUtNVVfOpa6cB1TDuWjMQkREyqWehYiIlEvJIsbMHjCzOWY2y8zeMbOO\nUcdUUWb2qJktjJ3Pq2bWMuqYKsrMLjOzbDMrMbNad+WKmY0ws0VmlmNmd0QdT2WY2dNmtt7M5kUd\nS2WYWRcz+8DMFsT+bd0UdUwVZWZpZvaVmc2Oncv9oX2WylABM2vu7ttiy/8G9HX3X0QcVoWY2dnA\n++5eZGYPA7j77RGHVSFm1gcoAR4HbnH3GRGHlDAzSwYWA2cBucB0YLS7z480sAoys1OBfOBZd+8f\ndTwVZWYdgA7uPtPMmgFfAxfWxv8uFjwTuom755tZCvApcJO7f1nVn6WeRczeRBHTBKi1WdTd33H3\notjql0DnKOOpDHdf4O6Loo6jggYDOe6+zN0LgXHABRHHVGHu/jGwKeo4Ksvd17r7zNjydmAB0Cna\nqCrGA/mx1ZTYK5TvLiWLOGb2oJmtAn4C3BN1PFXkZ8BbUQdRT3UCVsWt51JLv5TqKjPrDhwDTIs2\nkoozs2QzmwWsB6a6eyjnUq+ShZm9a2bzynhdAODud7t7F+B5YEy00R5aeecSa3M3UERwPjVWIudS\nS1kZ22ptj7WuMbOmwATg/5WqLNQq7l7s7oMIKgiDzSyUEmGDMA5aU7n7mQk2/ScwCbg3xHAqpbxz\nMbOrgPOBM7yGD0wdxn+X2iYX6BK33hlYE1EsEidW358APO/ur0QdT1Vw9y1m9iEwAqjyixDqVc/i\nUMwsM251JLAwqlgqy8xGALcDI929IOp46rHpQKaZZZhZKjAKmBhxTPVebFD4r8ACd/9D1PFUhpm1\n3Xu1o5k1As4kpO8uXQ0VY2YTgCMJrrxZAfzC3VdHG1XFmFkO0BDYGNv0ZS2+susi4H+AtsAWYJa7\nnxNtVIkzs3OBPwHJwNPu/mDEIVWYmb0AnE4ww+k64F53/2ukQVWAmZ0MfALMJfj/HeAud58cXVQV\nY2YDgL8T/PtKAl5y97GhfJaShYiIlEdlKBERKZeShYiIlEvJQkREyqVkISIi5VKyEBGRcilZiBwG\nM8svv9Uh9x9vZj1iy03N7HEzWxqbMfRjMxtiZqmx5Xp106zUbEoWItXEzPoBye6+LLbpKYKJ+TLd\nvR9wNdAmNunge8DlkQQqUgYlC5EKsMCjsTms5prZ5bHtSWb2v7GewptmNtnMLo3t9hPg9Vi7nsAQ\n4NfuXgIQm512Uqzta7H2IjWCurkiFXMxMAgYSHBH83Qz+xgYCnQHjgbaEUx//XRsn6HAC7HlfgR3\noxcf5PjzgONDiVykAtSzEKmYk4EXYjN+rgM+IvhyPxl42d1L3P074IO4fToAeYkcPJZECmMP5xGJ\nnJKFSMWUNf34obYD7ATSYsvZwEAzO9T/gw2BXRWITaTKKVmIVMzHwOWxB8+0BU4FviJ4rOUlsbGL\n9gQT7+21AOgF4O5LgRnA/bFZUDGzzL3P8DCzdCDP3fdU1wmJHIqShUjFvArMAWYD7wO3xcpOEwie\nYzGP4Lnh04CtsX0msX/y+DlwBJBjZnOBJ/n+eRfDgFo3C6rUXZp1VqSKmVlTd8+P9Q6+Aoa6+3ex\n5w18EFs/2MD23mO8AtxZi58/LnWMroYSqXpvxh5Ikwo8EOtx4O47zexegudwrzzYzrEHJb2mRCE1\niXoWIiJSLo1ZiIhIuZQsRESkXEoWIiJSLiULEREpl5KFiIiUS8lCRETK9f8BvW68gvogpi4AAAAA\nSUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "# plot CV误差曲线\n",
    "test_means = grid.cv_results_[ 'mean_test_score' ]\n",
    "test_stds = grid.cv_results_[ 'std_test_score' ]\n",
    "train_means = grid.cv_results_[ 'mean_train_score' ]\n",
    "train_stds = grid.cv_results_[ 'std_train_score' ]\n",
    "\n",
    "# plot results\n",
    "n_Cs = len(Cs)\n",
    "number_penaltys = len(penaltys)\n",
    "test_scores = np.array(test_means).reshape(n_Cs,number_penaltys)\n",
    "#train_scores = np.array(train_means).reshape(n_Cs,number_penaltys)\n",
    "test_stds = np.array(test_stds).reshape(n_Cs,number_penaltys)\n",
    "#train_stds = np.array(train_stds).reshape(n_Cs,number_penaltys)\n",
    "\n",
    "x_axis = np.log10(Cs)\n",
    "for i, value in enumerate(penaltys):\n",
    "    #pyplot.plot(log(Cs), test_scores[i], label= 'penalty:'   + str(value))\n",
    "   # plt.errorbar(x_axis, -test_scores[:,i], yerr=test_stds[:,i] ,label = penaltys[i] +' Test')\n",
    "    \n",
    "     plt.errorbar(x_axis, test_scores[:,i], yerr=test_stds[:,i] ,label = penaltys[i] +' Test')\n",
    "    #plt.errorbar(x_axis, -train_scores[:,i], yerr=train_stds[:,i] ,label = penaltys[i] +' Train')\n",
    "    \n",
    "plt.legend()\n",
    "plt.xlabel( 'log(C)' )                                                                                                      \n",
    "plt.ylabel( 'accuay' )\n",
    "plt.savefig('LogisticGridSearchCV_C.png' )\n",
    "\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 56,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "l2\n",
      "0.1\n"
     ]
    }
   ],
   "source": [
    "print(grid.best_params_['penalty'])\n",
    "print(grid.best_params_['C'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 57,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[ 0.36167135,  0.99773092, -0.0648661 ,  0.05966521, -0.04513553,\n",
       "         0.54600449,  0.25990383,  0.15544357]])"
      ]
     },
     "execution_count": 57,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "grid.best_estimator_.coef_"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 保存模型，用于后续测试"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "import pickle\n",
    "\n",
    "pickle.dump(grid.best_estimator_, open(\"Diabetes_L1_org.pkl\", 'wb'))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "anaconda-cloud": {},
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
